aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--bot/bot.py25
-rw-r--r--bot/constants.py1
-rw-r--r--bot/converters.py50
-rw-r--r--bot/exts/filtering/README.md (renamed from bot/exts/filters/__init__.py)0
-rw-r--r--bot/exts/filtering/__init__.py (renamed from tests/bot/exts/filters/__init__.py)0
-rw-r--r--bot/exts/filtering/_filter_context.py40
-rw-r--r--bot/exts/filtering/_filter_lists/__init__.py9
-rw-r--r--bot/exts/filtering/_filter_lists/domain.py74
-rw-r--r--bot/exts/filtering/_filter_lists/extension.py109
-rw-r--r--bot/exts/filtering/_filter_lists/filter_list.py106
-rw-r--r--bot/exts/filtering/_filter_lists/invite.py142
-rw-r--r--bot/exts/filtering/_filter_lists/token.py82
-rw-r--r--bot/exts/filtering/_filters/__init__.py0
-rw-r--r--bot/exts/filtering/_filters/domain.py45
-rw-r--r--bot/exts/filtering/_filters/extension.py16
-rw-r--r--bot/exts/filtering/_filters/filter.py40
-rw-r--r--bot/exts/filtering/_filters/invite.py20
-rw-r--r--bot/exts/filtering/_filters/token.py20
-rw-r--r--bot/exts/filtering/_settings.py197
-rw-r--r--bot/exts/filtering/_settings_types/__init__.py14
-rw-r--r--bot/exts/filtering/_settings_types/bypass_roles.py33
-rw-r--r--bot/exts/filtering/_settings_types/channel_scope.py66
-rw-r--r--bot/exts/filtering/_settings_types/delete_messages.py35
-rw-r--r--bot/exts/filtering/_settings_types/enabled.py19
-rw-r--r--bot/exts/filtering/_settings_types/filter_dm.py17
-rw-r--r--bot/exts/filtering/_settings_types/infraction_and_notification.py192
-rw-r--r--bot/exts/filtering/_settings_types/ping.py70
-rw-r--r--bot/exts/filtering/_settings_types/send_alert.py24
-rw-r--r--bot/exts/filtering/_settings_types/settings_entry.py68
-rw-r--r--bot/exts/filtering/_ui.py68
-rw-r--r--bot/exts/filtering/_utils.py120
-rw-r--r--bot/exts/filtering/filtering.py494
-rw-r--r--bot/exts/filters/antimalware.py106
-rw-r--r--bot/exts/filters/antispam.py324
-rw-r--r--bot/exts/filters/filter_lists.py297
-rw-r--r--bot/exts/filters/filtering.py735
-rw-r--r--bot/exts/filters/security.py30
-rw-r--r--bot/exts/filters/token_remover.py233
-rw-r--r--bot/exts/filters/webhook_remover.py94
-rw-r--r--bot/exts/info/codeblock/_cog.py4
-rw-r--r--bot/exts/moderation/infraction/infractions.py22
-rw-r--r--bot/exts/moderation/watchchannels/_watchchannel.py6
-rw-r--r--bot/rules/__init__.py12
-rw-r--r--bot/rules/attachments.py26
-rw-r--r--bot/rules/burst.py23
-rw-r--r--bot/rules/burst_shared.py18
-rw-r--r--bot/rules/chars.py24
-rw-r--r--bot/rules/discord_emojis.py34
-rw-r--r--bot/rules/duplicates.py28
-rw-r--r--bot/rules/links.py36
-rw-r--r--bot/rules/newlines.py45
-rw-r--r--bot/rules/role_mentions.py24
-rw-r--r--bot/utils/messages.py9
-rw-r--r--config-default.yml1
-rw-r--r--poetry.lock1190
-rw-r--r--pyproject.toml2
-rw-r--r--tests/bot/exts/filtering/__init__.py0
-rw-r--r--tests/bot/exts/filtering/test_filters.py41
-rw-r--r--tests/bot/exts/filtering/test_settings.py20
-rw-r--r--tests/bot/exts/filtering/test_settings_entries.py272
-rw-r--r--tests/bot/exts/filters/test_antimalware.py202
-rw-r--r--tests/bot/exts/filters/test_antispam.py35
-rw-r--r--tests/bot/exts/filters/test_filtering.py40
-rw-r--r--tests/bot/exts/filters/test_security.py53
-rw-r--r--tests/bot/exts/filters/test_token_remover.py409
-rw-r--r--tests/bot/rules/__init__.py76
-rw-r--r--tests/bot/rules/test_attachments.py69
-rw-r--r--tests/bot/rules/test_burst.py54
-rw-r--r--tests/bot/rules/test_burst_shared.py57
-rw-r--r--tests/bot/rules/test_chars.py64
-rw-r--r--tests/bot/rules/test_discord_emojis.py73
-rw-r--r--tests/bot/rules/test_duplicates.py64
-rw-r--r--tests/bot/rules/test_links.py67
-rw-r--r--tests/bot/rules/test_newlines.py102
-rw-r--r--tests/bot/rules/test_role_mentions.py55
-rw-r--r--tests/helpers.py2
76 files changed, 3570 insertions, 3704 deletions
diff --git a/bot/bot.py b/bot/bot.py
index aff07cd32..e40c3f8c1 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -27,8 +27,6 @@ class Bot(BotBase):
super().__init__(*args, **kwargs)
- self.filter_list_cache = defaultdict(dict)
-
async def ping_services(self) -> None:
"""A helper to make sure all the services the bot relies on are available on startup."""
# Connect Site/API
@@ -45,33 +43,10 @@ class Bot(BotBase):
raise
await asyncio.sleep(constants.URLs.connect_cooldown)
- def insert_item_into_filter_list_cache(self, item: dict[str, str]) -> None:
- """Add an item to the bots filter_list_cache."""
- type_ = item["type"]
- allowed = item["allowed"]
- content = item["content"]
-
- self.filter_list_cache[f"{type_}.{allowed}"][content] = {
- "id": item["id"],
- "comment": item["comment"],
- "created_at": item["created_at"],
- "updated_at": item["updated_at"],
- }
-
- async def cache_filter_list_data(self) -> None:
- """Cache all the data in the FilterList on the site."""
- full_cache = await self.api_client.get('bot/filter-lists')
-
- for item in full_cache:
- self.insert_item_into_filter_list_cache(item)
-
async def setup_hook(self) -> None:
"""Default async initialisation method for discord.py."""
await super().setup_hook()
- # Build the FilterList cache
- await self.cache_filter_list_data()
-
# This is not awaited to avoid a deadlock with any cogs that have
# wait_until_guild_available in their cog_load method.
scheduling.create_task(self.load_extensions(exts))
diff --git a/bot/constants.py b/bot/constants.py
index 68a96876f..66c6fed4f 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -478,6 +478,7 @@ class Webhooks(metaclass=YAMLGetter):
duck_pond: int
incidents: int
incidents_archive: int
+ filters: int
class Roles(metaclass=YAMLGetter):
diff --git a/bot/converters.py b/bot/converters.py
index e97a25bdd..3db5c6e10 100644
--- a/bot/converters.py
+++ b/bot/converters.py
@@ -12,7 +12,7 @@ from botcore.site_api import ResponseCodeError
from botcore.utils import unqualify
from botcore.utils.regex import DISCORD_INVITE
from dateutil.relativedelta import relativedelta
-from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter
+from discord.ext.commands import BadArgument, Context, Converter, IDConverter, MemberConverter, UserConverter
from discord.utils import escape_markdown, snowflake_time
from bot import exts, instance as bot_instance
@@ -68,54 +68,6 @@ class ValidDiscordServerInvite(Converter):
raise BadArgument("This does not appear to be a valid Discord server invite.")
-class ValidFilterListType(Converter):
- """
- A converter that checks whether the given string is a valid FilterList type.
-
- Raises `BadArgument` if the argument is not a valid FilterList type, and simply
- passes through the given argument otherwise.
- """
-
- @staticmethod
- async def get_valid_types(bot: Bot) -> list:
- """
- Try to get a list of valid filter list types.
-
- Raise a BadArgument if the API can't respond.
- """
- try:
- valid_types = await bot.api_client.get('bot/filter-lists/get-types')
- except ResponseCodeError:
- raise BadArgument("Cannot validate list_type: Unable to fetch valid types from API.")
-
- return [enum for enum, classname in valid_types]
-
- async def convert(self, ctx: Context, list_type: str) -> str:
- """Checks whether the given string is a valid FilterList type."""
- valid_types = await self.get_valid_types(ctx.bot)
- list_type = list_type.upper()
-
- if list_type not in valid_types:
-
- # Maybe the user is using the plural form of this type,
- # e.g. "guild_invites" instead of "guild_invite".
- #
- # This code will support the simple plural form (a single 's' at the end),
- # which works for all current list types, but if a list type is added in the future
- # which has an irregular plural form (like 'ies'), this code will need to be
- # refactored to support this.
- if list_type.endswith("S") and list_type[:-1] in valid_types:
- list_type = list_type[:-1]
-
- else:
- valid_types_list = '\n'.join([f"• {type_.lower()}" for type_ in valid_types])
- raise BadArgument(
- f"You have provided an invalid list type!\n\n"
- f"Please provide one of the following: \n{valid_types_list}"
- )
- return list_type
-
-
class Extension(Converter):
"""
Fully qualify the name of an extension and ensure it exists.
diff --git a/bot/exts/filters/__init__.py b/bot/exts/filtering/README.md
index e69de29bb..e69de29bb 100644
--- a/bot/exts/filters/__init__.py
+++ b/bot/exts/filtering/README.md
diff --git a/tests/bot/exts/filters/__init__.py b/bot/exts/filtering/__init__.py
index e69de29bb..e69de29bb 100644
--- a/tests/bot/exts/filters/__init__.py
+++ b/bot/exts/filtering/__init__.py
diff --git a/bot/exts/filtering/_filter_context.py b/bot/exts/filtering/_filter_context.py
new file mode 100644
index 000000000..02738d452
--- /dev/null
+++ b/bot/exts/filtering/_filter_context.py
@@ -0,0 +1,40 @@
+from __future__ import annotations
+
+from dataclasses import dataclass, field, replace
+from enum import Enum, auto
+from typing import Optional, Union
+
+from discord import DMChannel, Message, TextChannel, Thread, User
+
+
+class Event(Enum):
+ """Types of events that can trigger filtering. Note this does not have to align with gateway event types."""
+
+ MESSAGE = auto()
+ MESSAGE_EDIT = auto()
+
+
+@dataclass
+class FilterContext:
+ """A dataclass containing the information that should be filtered, and output information of the filtering."""
+
+ # Input context
+ event: Event # The type of event
+ author: User # Who triggered the event
+ channel: Union[TextChannel, Thread, DMChannel] # The channel involved
+ content: Union[str, set] # What actually needs filtering
+ message: Optional[Message] # The message involved
+ embeds: list = field(default_factory=list) # Any embeds involved
+ # Output context
+ dm_content: str = field(default_factory=str) # The content to DM the invoker
+ dm_embed: str = field(default_factory=str) # The embed description to DM the invoker
+ send_alert: bool = field(default=False) # Whether to send an alert for the moderators
+ alert_content: str = field(default_factory=str) # The content of the alert
+ alert_embeds: list = field(default_factory=list) # Any embeds to add to the alert
+ action_descriptions: list = field(default_factory=list) # What actions were taken
+ matches: list = field(default_factory=list) # What exactly was found
+ notification_domain: str = field(default_factory=str) # A domain to send the user for context
+
+ def replace(self, **changes) -> FilterContext:
+ """Return a new context object assigning new values to the specified fields."""
+ return replace(self, **changes)
diff --git a/bot/exts/filtering/_filter_lists/__init__.py b/bot/exts/filtering/_filter_lists/__init__.py
new file mode 100644
index 000000000..82e0452f9
--- /dev/null
+++ b/bot/exts/filtering/_filter_lists/__init__.py
@@ -0,0 +1,9 @@
+from os.path import dirname
+
+from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType, list_type_converter
+from bot.exts.filtering._utils import subclasses_in_package
+
+filter_list_types = subclasses_in_package(dirname(__file__), f"{__name__}.", FilterList)
+filter_list_types = {filter_list.name: filter_list for filter_list in filter_list_types}
+
+__all__ = [filter_list_types, FilterList, ListType, list_type_converter]
diff --git a/bot/exts/filtering/_filter_lists/domain.py b/bot/exts/filtering/_filter_lists/domain.py
new file mode 100644
index 000000000..7f92b62e8
--- /dev/null
+++ b/bot/exts/filtering/_filter_lists/domain.py
@@ -0,0 +1,74 @@
+from __future__ import annotations
+
+import re
+import typing
+from functools import reduce
+from operator import or_
+from typing import Optional, Type
+
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType
+from bot.exts.filtering._filters.domain import DomainFilter
+from bot.exts.filtering._filters.filter import Filter
+from bot.exts.filtering._settings import ActionSettings
+from bot.exts.filtering._utils import clean_input
+
+if typing.TYPE_CHECKING:
+ from bot.exts.filtering.filtering import Filtering
+
+URL_RE = re.compile(r"https?://([^\s]+)", flags=re.IGNORECASE)
+
+
+class DomainsList(FilterList):
+ """
+ A list of filters, each looking for a specific domain given by URL.
+
+ The blacklist defaults dictate what happens by default when a filter is matched, and can be overridden by
+ individual filters.
+
+ Domains are found by looking for a URL schema (http or https).
+ Filters will also trigger for subdomains unless set otherwise.
+ """
+
+ name = "domain"
+
+ def __init__(self, filtering_cog: Filtering):
+ super().__init__(DomainFilter)
+ filtering_cog.subscribe(self, Event.MESSAGE, Event.MESSAGE_EDIT)
+
+ @property
+ def filter_types(self) -> set[Type[Filter]]:
+ """Return the types of filters used by this list."""
+ return {DomainFilter}
+
+ async def actions_for(self, ctx: FilterContext) -> tuple[Optional[ActionSettings], Optional[str]]:
+ """Dispatch the given event to the list's filters, and return actions to take and a message to relay to mods."""
+ text = ctx.content
+ if not text:
+ return None, ""
+
+ text = clean_input(text)
+ urls = {match.group(1).lower().rstrip("/") for match in URL_RE.finditer(text)}
+ new_ctx = ctx.replace(content=urls)
+
+ triggers = self.filter_list_result(
+ new_ctx, self.filter_lists[ListType.DENY], self.defaults[ListType.DENY]["validations"]
+ )
+ ctx.notification_domain = new_ctx.notification_domain
+ actions = None
+ message = ""
+ if triggers:
+ action_defaults = self.defaults[ListType.DENY]["actions"]
+ actions = reduce(
+ or_,
+ (filter_.actions.fallback_to(action_defaults) if filter_.actions else action_defaults
+ for filter_ in triggers
+ )
+ )
+ if len(triggers) == 1:
+ message = f"#{triggers[0].id} (`{triggers[0].content}`)"
+ if triggers[0].description:
+ message += f" - {triggers[0].description}"
+ else:
+ message = ", ".join(f"#{filter_.id} (`{filter_.content}`)" for filter_ in triggers)
+ return actions, message
diff --git a/bot/exts/filtering/_filter_lists/extension.py b/bot/exts/filtering/_filter_lists/extension.py
new file mode 100644
index 000000000..2447bebde
--- /dev/null
+++ b/bot/exts/filtering/_filter_lists/extension.py
@@ -0,0 +1,109 @@
+from __future__ import annotations
+
+import typing
+from os.path import splitext
+from typing import Optional, Type
+
+import bot
+from bot.constants import Channels, URLs
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType
+from bot.exts.filtering._filters.extension import ExtensionFilter
+from bot.exts.filtering._filters.filter import Filter
+from bot.exts.filtering._settings import ActionSettings
+
+if typing.TYPE_CHECKING:
+ from bot.exts.filtering.filtering import Filtering
+
+
+PY_EMBED_DESCRIPTION = (
+ "It looks like you tried to attach a Python file - "
+ f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}"
+)
+
+TXT_LIKE_FILES = {".txt", ".csv", ".json"}
+TXT_EMBED_DESCRIPTION = (
+ "You either uploaded a `{blocked_extension}` file or entered a message that was too long. "
+ f"Please use our [paste bin]({URLs.site_schema}{URLs.site_paste}) instead."
+)
+
+DISALLOWED_EMBED_DESCRIPTION = (
+ "It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). "
+ "We currently allow the following file types: **{joined_whitelist}**.\n\n"
+ "Feel free to ask in {meta_channel_mention} if you think this is a mistake."
+)
+
+
+class ExtensionsList(FilterList):
+ """
+ A list of filters, each looking for a file attachment with a specific extension.
+
+ If an extension is not explicitly allowed, it will be blocked.
+
+ Whitelist defaults dictate what happens when an extension is *not* explicitly allowed,
+ and whitelist filters overrides have no effect.
+
+ Items should be added as file extensions preceded by a dot.
+ """
+
+ name = "extension"
+
+ def __init__(self, filtering_cog: Filtering):
+ super().__init__(ExtensionFilter)
+ filtering_cog.subscribe(self, Event.MESSAGE)
+ self._whitelisted_description = None
+
+ @property
+ def filter_types(self) -> set[Type[Filter]]:
+ """Return the types of filters used by this list."""
+ return {ExtensionFilter}
+
+ async def actions_for(self, ctx: FilterContext) -> tuple[Optional[ActionSettings], Optional[str]]:
+ """Dispatch the given event to the list's filters, and return actions to take and a message to relay to mods."""
+ # Return early if the message doesn't have attachments.
+ if not ctx.message.attachments:
+ return None, ""
+
+ _, failed = self.defaults[ListType.ALLOW]["validations"].evaluate(ctx)
+ if failed: # There's no extension filtering in this context.
+ return None, ""
+
+ # Find all extensions in the message.
+ all_ext = {
+ (splitext(attachment.filename.lower())[1], attachment.filename) for attachment in ctx.message.attachments
+ }
+ new_ctx = ctx.replace(content={ext for ext, _ in all_ext}) # And prepare the context for the filters to read.
+ triggered = [filter_ for filter_ in self.filter_lists[ListType.ALLOW].values() if filter_.triggered_on(new_ctx)]
+ allowed_ext = {filter_.content for filter_ in triggered} # Get the extensions in the message that are allowed.
+
+ # See if there are any extensions left which aren't allowed.
+ not_allowed = {ext: filename for ext, filename in all_ext if ext not in allowed_ext}
+
+ if not not_allowed: # Yes, it's a double negative. Meaning all attachments are allowed :)
+ return None, ""
+
+ # Something is disallowed.
+ if ".py" in not_allowed:
+ # Provide a pastebin link for .py files.
+ ctx.dm_embed = PY_EMBED_DESCRIPTION
+ elif txt_extensions := {ext for ext in TXT_LIKE_FILES if ext in not_allowed}:
+ # Work around Discord auto-conversion of messages longer than 2000 chars to .txt
+ cmd_channel = bot.instance.get_channel(Channels.bot_commands)
+ ctx.dm_embed = TXT_EMBED_DESCRIPTION.format(
+ blocked_extension=txt_extensions.pop(),
+ cmd_channel_mention=cmd_channel.mention
+ )
+ else:
+ meta_channel = bot.instance.get_channel(Channels.meta)
+ if not self._whitelisted_description:
+ self._whitelisted_description = ', '.join(
+ filter_.content for filter_ in self.filter_lists[ListType.ALLOW].values()
+ )
+ ctx.dm_embed = DISALLOWED_EMBED_DESCRIPTION.format(
+ joined_whitelist=self._whitelisted_description,
+ blocked_extensions_str=", ".join(not_allowed),
+ meta_channel_mention=meta_channel.mention,
+ )
+
+ ctx.matches += not_allowed.values()
+ return self.defaults[ListType.ALLOW]["actions"], ", ".join(f"`{ext}`" for ext in not_allowed)
diff --git a/bot/exts/filtering/_filter_lists/filter_list.py b/bot/exts/filtering/_filter_lists/filter_list.py
new file mode 100644
index 000000000..3b5138fe4
--- /dev/null
+++ b/bot/exts/filtering/_filter_lists/filter_list.py
@@ -0,0 +1,106 @@
+from abc import abstractmethod
+from enum import Enum
+from typing import Optional, Type
+
+from discord.ext.commands import BadArgument
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._filters.filter import Filter
+from bot.exts.filtering._settings import ActionSettings, ValidationSettings, create_settings
+from bot.exts.filtering._utils import FieldRequiring, past_tense
+from bot.log import get_logger
+
+log = get_logger(__name__)
+
+
+class ListType(Enum):
+ """An enumeration of list types."""
+
+ DENY = 0
+ ALLOW = 1
+
+
+# Alternative names with which each list type can be specified in commands.
+aliases = (
+ (ListType.DENY, {"deny", "blocklist", "blacklist", "denylist", "bl", "dl"}),
+ (ListType.ALLOW, {"allow", "allowlist", "whitelist", "al", "wl"})
+)
+
+
+def list_type_converter(argument: str) -> ListType:
+ """A converter to get the appropriate list type."""
+ argument = argument.lower()
+ for list_type, list_aliases in aliases:
+ if argument in list_aliases or argument in map(past_tense, list_aliases):
+ return list_type
+ raise BadArgument(f"No matching list type found for {argument!r}.")
+
+
+class FilterList(FieldRequiring):
+ """Dispatches events to lists of _filters, and aggregates the responses into a single list of actions to take."""
+
+ # Each subclass must define a name matching the filter_list name we're expecting to receive from the database.
+ # Names must be unique across all filter lists.
+ name = FieldRequiring.MUST_SET_UNIQUE
+
+ def __init__(self, filter_type: Type[Filter]):
+ self.filter_lists: dict[ListType, dict[int, Filter]] = {}
+ self.defaults = {}
+
+ self.filter_type = filter_type
+
+ def add_list(self, list_data: dict) -> None:
+ """Add a new type of list (such as a whitelist or a blacklist) this filter list."""
+ actions, validations = create_settings(list_data["settings"], keep_empty=True)
+ list_type = ListType(list_data["list_type"])
+ self.defaults[list_type] = {"actions": actions, "validations": validations}
+
+ filters = {}
+ for filter_data in list_data["filters"]:
+ try:
+ filters[filter_data["id"]] = self.filter_type(filter_data)
+ except TypeError as e:
+ log.warning(e)
+ self.filter_lists[list_type] = filters
+
+ @property
+ @abstractmethod
+ def filter_types(self) -> set[Type[Filter]]:
+ """Return the types of filters used by this list."""
+
+ @abstractmethod
+ async def actions_for(self, ctx: FilterContext) -> tuple[Optional[ActionSettings], Optional[str]]:
+ """Dispatch the given event to the list's filters, and return actions to take and a message to relay to mods."""
+
+ @staticmethod
+ def filter_list_result(
+ ctx: FilterContext, filters: dict[int, Filter], defaults: ValidationSettings
+ ) -> list[Filter]:
+ """
+ Sift through the list of filters, and return only the ones which apply to the given context.
+
+ The strategy is as follows:
+ 1. The default settings are evaluated on the given context. The default answer for whether the filter is
+ relevant in the given context is whether there aren't any validation settings which returned False.
+ 2. For each filter, its overrides are considered:
+ - If there are no overrides, then the filter is relevant if that is the default answer.
+ - Otherwise it is relevant if there are no failed overrides, and any failing default is overridden by a
+ successful override.
+
+ If the filter is relevant in context, see if it actually triggers.
+ """
+ passed_by_default, failed_by_default = defaults.evaluate(ctx)
+ default_answer = not bool(failed_by_default)
+
+ relevant_filters = []
+ for filter_ in filters.values():
+ if not filter_.validations:
+ if default_answer and filter_.triggered_on(ctx):
+ relevant_filters.append(filter_)
+ else:
+ passed, failed = filter_.validations.evaluate(ctx)
+ if not failed and failed_by_default < passed:
+ if filter_.triggered_on(ctx):
+ relevant_filters.append(filter_)
+
+ return relevant_filters
diff --git a/bot/exts/filtering/_filter_lists/invite.py b/bot/exts/filtering/_filter_lists/invite.py
new file mode 100644
index 000000000..4e8d74d8a
--- /dev/null
+++ b/bot/exts/filtering/_filter_lists/invite.py
@@ -0,0 +1,142 @@
+from __future__ import annotations
+
+import typing
+from functools import reduce
+from operator import or_
+from typing import Optional, Type
+
+from botcore.utils.regex import DISCORD_INVITE
+from discord import Embed, Invite
+from discord.errors import NotFound
+
+import bot
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType
+from bot.exts.filtering._filters.filter import Filter
+from bot.exts.filtering._filters.invite import InviteFilter
+from bot.exts.filtering._settings import ActionSettings
+from bot.exts.filtering._utils import clean_input
+
+if typing.TYPE_CHECKING:
+ from bot.exts.filtering.filtering import Filtering
+
+
+class InviteList(FilterList):
+ """
+ A list of filters, each looking for guild invites to a specific guild.
+
+ If the invite is not whitelisted, it will be blocked. Partnered and verified servers are allowed unless blacklisted.
+
+ Whitelist defaults dictate what happens when an invite is *not* explicitly allowed,
+ and whitelist filters overrides have no effect.
+
+ Blacklist defaults dictate what happens by default when an explicitly blocked invite is found.
+
+ Items in the list are added through invites for the purpose of fetching the guild info.
+ Items are stored as guild IDs, guild invites are *not* stored.
+ """
+
+ name = "invite"
+
+ def __init__(self, filtering_cog: Filtering):
+ super().__init__(InviteFilter)
+ filtering_cog.subscribe(self, Event.MESSAGE)
+
+ @property
+ def filter_types(self) -> set[Type[Filter]]:
+ """Return the types of filters used by this list."""
+ return {InviteFilter}
+
+ async def actions_for(self, ctx: FilterContext) -> tuple[Optional[ActionSettings], Optional[str]]:
+ """Dispatch the given event to the list's filters, and return actions to take and a message to relay to mods."""
+ _, failed = self.defaults[ListType.ALLOW]["validations"].evaluate(ctx)
+ if failed: # There's no invite filtering in this context.
+ return None, ""
+
+ text = clean_input(ctx.content)
+
+ # Avoid escape characters
+ text = text.replace("\\", "")
+
+ matches = list(DISCORD_INVITE.finditer(text))
+ invite_codes = {m.group("invite") for m in matches}
+ if not invite_codes:
+ return None, ""
+
+ # Sort the invites into three categories:
+ denied_by_default = dict() # Denied unless whitelisted.
+ allowed_by_default = dict() # Allowed unless blacklisted (partnered or verified servers).
+ disallowed_invites = dict() # Always denied (invalid invites).
+ for invite_code in invite_codes:
+ try:
+ invite = await bot.instance.fetch_invite(invite_code)
+ except NotFound:
+ disallowed_invites[invite_code] = None
+ else:
+ if not invite.guild:
+ disallowed_invites[invite_code] = invite
+ else:
+ if "PARTNERED" in invite.guild.features or "VERIFIED" in invite.guild.features:
+ allowed_by_default[invite_code] = invite
+ else:
+ denied_by_default[invite_code] = invite
+
+ # Add the disallowed by default unless they're whitelisted.
+ guilds_for_inspection = {invite.guild.id for invite in denied_by_default.values()}
+ new_ctx = ctx.replace(content=guilds_for_inspection)
+ allowed = {
+ filter_.content for filter_ in self.filter_lists[ListType.ALLOW].values() if filter_.triggered_on(new_ctx)
+ }
+ disallowed_invites.update({
+ invite_code: invite for invite_code, invite in denied_by_default.items() if invite.guild.id not in allowed
+ })
+
+ # Add the allowed by default only if they're blacklisted.
+ guilds_for_inspection = {invite.guild.id for invite in allowed_by_default.values()}
+ new_ctx = ctx.replace(content=guilds_for_inspection)
+ triggered = self.filter_list_result(
+ new_ctx, self.filter_lists[ListType.ALLOW], self.defaults[ListType.DENY]["validations"]
+ )
+ disallowed_invites.update({
+ invite_code: invite for invite_code, invite in allowed_by_default.items()
+ if invite.guild.id in {filter_.content for filter_ in triggered}
+ })
+
+ if not disallowed_invites:
+ return None, ""
+
+ actions = None
+ if len(disallowed_invites) > len(triggered): # There are invites which weren't allowed but aren't blacklisted.
+ deny_defaults = self.defaults[ListType.DENY]["actions"]
+ actions = reduce(
+ or_,
+ (
+ filter_.actions.fallback_to(deny_defaults) if filter_.actions else deny_defaults
+ for filter_ in triggered
+ ),
+ self.defaults[ListType.ALLOW]["actions"]
+ )
+ elif triggered:
+ actions = reduce(or_, (filter_.actions for filter_ in triggered))
+ ctx.matches += {match[0] for match in matches if match.group("invite") in disallowed_invites}
+ ctx.alert_embeds += (self._guild_embed(invite) for invite in disallowed_invites.values() if invite)
+ return actions, ", ".join(f"`{invite}`" for invite in disallowed_invites)
+
+ @staticmethod
+ def _guild_embed(invite: Invite) -> Embed:
+ """Return an embed representing the guild invites to."""
+ embed = Embed()
+ if invite.guild:
+ embed.title = invite.guild.name
+ embed.set_thumbnail(url=invite.guild.icon.url)
+ embed.set_footer(text=f"Guild ID: {invite.guild.id}")
+ else:
+ embed.title = "Group DM"
+
+ embed.description = (
+ f"**Invite Code:** {invite.code}\n"
+ f"**Members:** {invite.approximate_member_count}\n"
+ f"**Active:** {invite.approximate_presence_count}"
+ )
+
+ return embed
diff --git a/bot/exts/filtering/_filter_lists/token.py b/bot/exts/filtering/_filter_lists/token.py
new file mode 100644
index 000000000..c989b06b9
--- /dev/null
+++ b/bot/exts/filtering/_filter_lists/token.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+import re
+import typing
+from functools import reduce
+from operator import or_
+from typing import Optional, Type
+
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType
+from bot.exts.filtering._filters.filter import Filter
+from bot.exts.filtering._filters.token import TokenFilter
+from bot.exts.filtering._settings import ActionSettings
+from bot.exts.filtering._utils import clean_input
+
+if typing.TYPE_CHECKING:
+ from bot.exts.filtering.filtering import Filtering
+
+SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL)
+
+
+class TokensList(FilterList):
+ """
+ A list of filters, each looking for a specific token in the given content given as regex.
+
+ The blacklist defaults dictate what happens by default when a filter is matched, and can be overridden by
+ individual filters.
+
+ Usually, if blocking literal strings, the literals themselves can be specified as the filter's value.
+ But since this is a list of regex patterns, be careful of the items added. For example, a dot needs to be escaped
+ to function as a literal dot.
+ """
+
+ name = "token"
+
+ def __init__(self, filtering_cog: Filtering):
+ super().__init__(TokenFilter)
+ filtering_cog.subscribe(self, Event.MESSAGE, Event.MESSAGE_EDIT)
+
+ @property
+ def filter_types(self) -> set[Type[Filter]]:
+ """Return the types of filters used by this list."""
+ return {TokenFilter}
+
+ async def actions_for(self, ctx: FilterContext) -> tuple[Optional[ActionSettings], Optional[str]]:
+ """Dispatch the given event to the list's filters, and return actions to take and a message to relay to mods."""
+ text = ctx.content
+ if not text:
+ return None, ""
+ if SPOILER_RE.search(text):
+ text = self._expand_spoilers(text)
+ text = clean_input(text)
+ ctx = ctx.replace(content=text)
+
+ triggers = self.filter_list_result(
+ ctx, self.filter_lists[ListType.DENY], self.defaults[ListType.DENY]["validations"]
+ )
+ actions = None
+ message = ""
+ if triggers:
+ action_defaults = self.defaults[ListType.DENY]["actions"]
+ actions = reduce(
+ or_,
+ (filter_.actions.fallback_to(action_defaults) if filter_.actions else action_defaults
+ for filter_ in triggers
+ )
+ )
+ if len(triggers) == 1:
+ message = f"#{triggers[0].id} (`{triggers[0].content}`)"
+ if triggers[0].description:
+ message += f" - {triggers[0].description}"
+ else:
+ message = ", ".join(f"#{filter_.id} (`{filter_.content}`)" for filter_ in triggers)
+ return actions, message
+
+ @staticmethod
+ def _expand_spoilers(text: str) -> str:
+ """Return a string containing all interpretations of a spoilered message."""
+ split_text = SPOILER_RE.split(text)
+ return ''.join(
+ split_text[0::2] + split_text[1::2] + split_text
+ )
diff --git a/bot/exts/filtering/_filters/__init__.py b/bot/exts/filtering/_filters/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/bot/exts/filtering/_filters/__init__.py
diff --git a/bot/exts/filtering/_filters/domain.py b/bot/exts/filtering/_filters/domain.py
new file mode 100644
index 000000000..9f5f97413
--- /dev/null
+++ b/bot/exts/filtering/_filters/domain.py
@@ -0,0 +1,45 @@
+from typing import ClassVar, Optional
+
+import tldextract
+from pydantic import BaseModel
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._filters.filter import Filter
+
+
+class ExtraDomainSettings(BaseModel):
+ """Extra settings for how domains should be matched in a message."""
+
+ exact_description: ClassVar[str] = (
+ "A boolean. If True, will match the filter content exactly, and won't trigger for subdomains and subpaths."
+ )
+
+ # whether to match the filter content exactly, or to trigger for subdomains and subpaths as well.
+ exact: Optional[bool] = False
+
+
+class DomainFilter(Filter):
+ """
+ A filter which looks for a specific domain given by URL.
+
+ The schema (http, https) does not need to be included in the filter.
+ Will also match subdomains unless set otherwise.
+ """
+
+ name = "domain"
+ extra_fields_type = ExtraDomainSettings
+
+ def triggered_on(self, ctx: FilterContext) -> bool:
+ """Searches for a domain within a given context."""
+ domain = tldextract.extract(self.content).registered_domain
+
+ for found_url in ctx.content:
+ if self.content in found_url and tldextract.extract(found_url).registered_domain == domain:
+ ctx.matches.append(self.content)
+ if (
+ ("delete_messages" in self.actions and self.actions.get("delete_messages").delete_messages)
+ or not ctx.notification_domain
+ ): # Override this field only if this filter causes deletion.
+ ctx.notification_domain = self.content
+ return not self.extra_fields.exact or self.content == found_url
+ return False
diff --git a/bot/exts/filtering/_filters/extension.py b/bot/exts/filtering/_filters/extension.py
new file mode 100644
index 000000000..1a2ab8617
--- /dev/null
+++ b/bot/exts/filtering/_filters/extension.py
@@ -0,0 +1,16 @@
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._filters.filter import Filter
+
+
+class ExtensionFilter(Filter):
+ """
+ A filter which looks for a specific attachment extension in messages.
+
+ The filter stores the extension preceded by a dot.
+ """
+
+ name = "extension"
+
+ def triggered_on(self, ctx: FilterContext) -> bool:
+ """Searches for an attachment extension in the context content, given as a set of extensions."""
+ return self.content in ctx.content
diff --git a/bot/exts/filtering/_filters/filter.py b/bot/exts/filtering/_filters/filter.py
new file mode 100644
index 000000000..da149dce6
--- /dev/null
+++ b/bot/exts/filtering/_filters/filter.py
@@ -0,0 +1,40 @@
+from abc import abstractmethod
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings import create_settings
+from bot.exts.filtering._utils import FieldRequiring
+
+
+class Filter(FieldRequiring):
+ """
+ A class representing a filter.
+
+ Each filter looks for a specific attribute within an event (such as message sent),
+ and defines what action should be performed if it is triggered.
+ """
+
+ # Each subclass must define a name which will be used to fetch its description.
+ # Names must be unique across all types of filters.
+ name = FieldRequiring.MUST_SET_UNIQUE
+ # If a subclass uses extra fields, it should assign the pydantic model type to this variable.
+ extra_fields_type = None
+
+ def __init__(self, filter_data: dict):
+ self.id = filter_data["id"]
+ self.content = filter_data["content"]
+ self.description = filter_data["description"]
+ self.actions, self.validations = create_settings(filter_data["settings"])
+ self.extra_fields = filter_data["additional_field"] or "{}" # noqa: P103
+ if self.extra_fields_type:
+ self.extra_fields = self.extra_fields_type.parse_raw(self.extra_fields)
+
+ @abstractmethod
+ def triggered_on(self, ctx: FilterContext) -> bool:
+ """Search for the filter's content within a given context."""
+
+ def __str__(self) -> str:
+ """A string representation of the filter."""
+ string = f"#{self.id}. `{self.content}`"
+ if self.description:
+ string += f" - {self.description}"
+ return string
diff --git a/bot/exts/filtering/_filters/invite.py b/bot/exts/filtering/_filters/invite.py
new file mode 100644
index 000000000..5a9924833
--- /dev/null
+++ b/bot/exts/filtering/_filters/invite.py
@@ -0,0 +1,20 @@
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._filters.filter import Filter
+
+
+class InviteFilter(Filter):
+ """
+ A filter which looks for invites to a specific guild in messages.
+
+ The filter stores the guild ID which is allowed or denied.
+ """
+
+ name = "invite"
+
+ def __init__(self, filter_data: dict):
+ super().__init__(filter_data)
+ self.content = int(self.content)
+
+ def triggered_on(self, ctx: FilterContext) -> bool:
+ """Searches for a guild ID in the context content, given as a set of IDs."""
+ return self.content in ctx.content
diff --git a/bot/exts/filtering/_filters/token.py b/bot/exts/filtering/_filters/token.py
new file mode 100644
index 000000000..c955b269b
--- /dev/null
+++ b/bot/exts/filtering/_filters/token.py
@@ -0,0 +1,20 @@
+import re
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._filters.filter import Filter
+
+
+class TokenFilter(Filter):
+ """A filter which looks for a specific token given by regex."""
+
+ name = "token"
+
+ def triggered_on(self, ctx: FilterContext) -> bool:
+ """Searches for a regex pattern within a given context."""
+ pattern = self.content
+
+ match = re.search(pattern, ctx.content, flags=re.IGNORECASE)
+ if match:
+ ctx.matches.append(match[0])
+ return True
+ return False
diff --git a/bot/exts/filtering/_settings.py b/bot/exts/filtering/_settings.py
new file mode 100644
index 000000000..f88b26ee3
--- /dev/null
+++ b/bot/exts/filtering/_settings.py
@@ -0,0 +1,197 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from typing import Any, Iterator, Mapping, Optional, TypeVar
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types import settings_types
+from bot.exts.filtering._settings_types.settings_entry import ActionEntry, ValidationEntry
+from bot.exts.filtering._utils import FieldRequiring
+from bot.log import get_logger
+
+TSettings = TypeVar("TSettings", bound="Settings")
+
+log = get_logger(__name__)
+
+_already_warned: set[str] = set()
+
+
+def create_settings(
+ settings_data: dict, *, keep_empty: bool = False
+) -> tuple[Optional[ActionSettings], Optional[ValidationSettings]]:
+ """
+ Create and return instances of the Settings subclasses from the given data.
+
+ Additionally, warn for data entries with no matching class.
+ """
+ action_data = {}
+ validation_data = {}
+ for entry_name, entry_data in settings_data.items():
+ if entry_name in settings_types["ActionEntry"]:
+ action_data[entry_name] = entry_data
+ elif entry_name in settings_types["ValidationEntry"]:
+ validation_data[entry_name] = entry_data
+ else:
+ log.warning(
+ f"A setting named {entry_name} was loaded from the database, but no matching class."
+ )
+ _already_warned.add(entry_name)
+ return (
+ ActionSettings.create(action_data, keep_empty=keep_empty),
+ ValidationSettings.create(validation_data, keep_empty=keep_empty)
+ )
+
+
+class Settings(FieldRequiring):
+ """
+ A collection of settings.
+
+ For processing the settings parts in the database and evaluating them on given contexts.
+
+ Each filter list and filter has its own settings.
+
+ A filter doesn't have to have its own settings. For every undefined setting, it falls back to the value defined in
+ the filter list which contains the filter.
+ """
+
+ entry_type = FieldRequiring.MUST_SET
+
+ _already_warned: set[str] = set()
+
+ @abstractmethod
+ def __init__(self, settings_data: dict, *, keep_empty: bool = False):
+ self._entries: dict[str, Settings.entry_type] = {}
+
+ entry_classes = settings_types.get(self.entry_type.__name__)
+ for entry_name, entry_data in settings_data.items():
+ try:
+ entry_cls = entry_classes[entry_name]
+ except KeyError:
+ if entry_name not in self._already_warned:
+ log.warning(
+ f"A setting named {entry_name} was loaded from the database, "
+ f"but no matching {self.entry_type.__name__} class."
+ )
+ self._already_warned.add(entry_name)
+ else:
+ try:
+ new_entry = entry_cls.create(entry_data, keep_empty=keep_empty)
+ if new_entry:
+ self._entries[entry_name] = new_entry
+ except TypeError as e:
+ raise TypeError(
+ f"Attempted to load a {entry_name} setting, but the response is malformed: {entry_data}"
+ ) from e
+
+ def __contains__(self, item: str) -> bool:
+ return item in self._entries
+
+ def __setitem__(self, key: str, value: entry_type) -> None:
+ self._entries[key] = value
+
+ def copy(self: TSettings) -> TSettings:
+ """Create a shallow copy of the object."""
+ copy = self.__class__({})
+ copy._entries = self._entries.copy()
+ return copy
+
+ def items(self) -> Iterator[tuple[str, entry_type]]:
+ """Return an iterator for the items in the entries dictionary."""
+ yield from self._entries.items()
+
+ def update(self, mapping: Mapping[str, entry_type], **kwargs: entry_type) -> None:
+ """Update the entries with items from `mapping` and the kwargs."""
+ self._entries.update(mapping, **kwargs)
+
+ def get(self, key: str, default: Optional[Any] = None) -> entry_type:
+ """Get the entry matching the key, or fall back to the default value if the key is missing."""
+ return self._entries.get(key, default)
+
+ @classmethod
+ def create(cls, settings_data: dict, *, keep_empty: bool = False) -> Optional[Settings]:
+ """
+ Returns a Settings object from `settings_data` if it holds any value, None otherwise.
+
+ Use this method to create Settings objects instead of the init.
+ The None value is significant for how a filter list iterates over its filters.
+ """
+ settings = cls(settings_data, keep_empty=keep_empty)
+ # If an entry doesn't hold any values, its `create` method will return None.
+ # If all entries are None, then the settings object holds no values.
+ if not keep_empty and not any(settings._entries.values()):
+ return None
+
+ return settings
+
+
+class ValidationSettings(Settings):
+ """
+ A collection of validation settings.
+
+ A filter is triggered only if all of its validation settings (e.g whether to invoke in DM) approve
+ (the check returns True).
+ """
+
+ entry_type = ValidationEntry
+
+ def __init__(self, settings_data: dict, *, keep_empty: bool = False):
+ super().__init__(settings_data, keep_empty=keep_empty)
+
+ def evaluate(self, ctx: FilterContext) -> tuple[set[str], set[str]]:
+ """Evaluates for each setting whether the context is relevant to the filter."""
+ passed = set()
+ failed = set()
+
+ self._entries: dict[str, ValidationEntry]
+ for name, validation in self._entries.items():
+ if validation:
+ if validation.triggers_on(ctx):
+ passed.add(name)
+ else:
+ failed.add(name)
+
+ return passed, failed
+
+
+class ActionSettings(Settings):
+ """
+ A collection of action settings.
+
+ If a filter is triggered, its action settings (e.g how to infract the user) are combined with the action settings of
+ other triggered filters in the same event, and action is taken according to the combined action settings.
+ """
+
+ entry_type = ActionEntry
+
+ def __init__(self, settings_data: dict, *, keep_empty: bool = False):
+ super().__init__(settings_data, keep_empty=keep_empty)
+
+ def __or__(self, other: ActionSettings) -> ActionSettings:
+ """Combine the entries of two collections of settings into a new ActionsSettings."""
+ actions = {}
+ # A settings object doesn't necessarily have all types of entries (e.g in the case of filter overrides).
+ for entry in self._entries:
+ if entry in other._entries:
+ actions[entry] = self._entries[entry] | other._entries[entry]
+ else:
+ actions[entry] = self._entries[entry]
+ for entry in other._entries:
+ if entry not in actions:
+ actions[entry] = other._entries[entry]
+
+ result = ActionSettings({})
+ result.update(actions)
+ return result
+
+ async def action(self, ctx: FilterContext) -> None:
+ """Execute the action of every action entry stored."""
+ for entry in self._entries.values():
+ await entry.action(ctx)
+
+ def fallback_to(self, fallback: ActionSettings) -> ActionSettings:
+ """Fill in missing entries from `fallback`."""
+ new_actions = self.copy()
+ for entry_name, entry_value in fallback.items():
+ if entry_name not in self._entries:
+ new_actions._entries[entry_name] = entry_value
+ return new_actions
diff --git a/bot/exts/filtering/_settings_types/__init__.py b/bot/exts/filtering/_settings_types/__init__.py
new file mode 100644
index 000000000..620290cb2
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/__init__.py
@@ -0,0 +1,14 @@
+from os.path import dirname
+
+from bot.exts.filtering._settings_types.settings_entry import ActionEntry, ValidationEntry
+from bot.exts.filtering._utils import subclasses_in_package
+
+action_types = subclasses_in_package(dirname(__file__), f"{__name__}.", ActionEntry)
+validation_types = subclasses_in_package(dirname(__file__), f"{__name__}.", ValidationEntry)
+
+settings_types = {
+ "ActionEntry": {settings_type.name: settings_type for settings_type in action_types},
+ "ValidationEntry": {settings_type.name: settings_type for settings_type in validation_types}
+}
+
+__all__ = [settings_types]
diff --git a/bot/exts/filtering/_settings_types/bypass_roles.py b/bot/exts/filtering/_settings_types/bypass_roles.py
new file mode 100644
index 000000000..a5c18cffc
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/bypass_roles.py
@@ -0,0 +1,33 @@
+from typing import ClassVar, Union
+
+from discord import Member
+from pydantic import validator
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ValidationEntry
+
+
+class RoleBypass(ValidationEntry):
+ """A setting entry which tells whether the roles the member has allow them to bypass the filter."""
+
+ name: ClassVar[str] = "bypass_roles"
+ description: ClassVar[str] = "A list of role IDs or role names. Users with these roles will not trigger the filter."
+
+ bypass_roles: set[Union[int, str]]
+
+ @validator("bypass_roles", each_item=True)
+ @classmethod
+ def maybe_cast_to_int(cls, role: str) -> Union[int, str]:
+ """If the string is alphanumeric, cast it to int."""
+ if role.isdigit():
+ return int(role)
+ return role
+
+ def triggers_on(self, ctx: FilterContext) -> bool:
+ """Return whether the filter should be triggered on this user given their roles."""
+ if not isinstance(ctx.author, Member):
+ return True
+ return all(
+ member_role.id not in self.bypass_roles and member_role.name not in self.bypass_roles
+ for member_role in ctx.author.roles
+ )
diff --git a/bot/exts/filtering/_settings_types/channel_scope.py b/bot/exts/filtering/_settings_types/channel_scope.py
new file mode 100644
index 000000000..fd5206b81
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/channel_scope.py
@@ -0,0 +1,66 @@
+from typing import ClassVar, Union
+
+from pydantic import validator
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ValidationEntry
+
+
+class ChannelScope(ValidationEntry):
+ """A setting entry which tells whether the filter was invoked in a whitelisted channel or category."""
+
+ name: ClassVar[str] = "channel_scope"
+ description: ClassVar[str] = {
+ "disabled_channels": "A list of channel IDs or channel names. The filter will not trigger in these channels.",
+ "disabled_categories": (
+ "A list of category IDs or category names. The filter will not trigger in these categories."
+ ),
+ "enabled_channels": (
+ "A list of channel IDs or channel names. "
+ "The filter can trigger in these channels even if the category is disabled."
+ )
+ }
+
+ disabled_channels: set[Union[str, int]]
+ disabled_categories: set[Union[str, int]]
+ enabled_channels: set[Union[str, int]]
+
+ @validator("*", pre=True)
+ @classmethod
+ def init_if_sequence_none(cls, sequence: list[str]) -> list[str]:
+ """Initialize an empty sequence if the value is None."""
+ if sequence is None:
+ return []
+ return sequence
+
+ @validator("*", each_item=True)
+ @classmethod
+ def maybe_cast_items(cls, channel_or_category: str) -> Union[str, int]:
+ """Cast to int each value in each sequence if it is alphanumeric."""
+ if channel_or_category.isdigit():
+ return int(channel_or_category)
+ return channel_or_category
+
+ def triggers_on(self, ctx: FilterContext) -> bool:
+ """
+ Return whether the filter should be triggered in the given channel.
+
+ The filter is invoked by default.
+ If the channel is explicitly enabled, it bypasses the set disabled channels and categories.
+ """
+ channel = ctx.channel
+ enabled_id = (
+ channel.id in self.enabled_channels
+ or (
+ channel.id not in self.disabled_channels
+ and (not channel.category or channel.category.id not in self.disabled_categories)
+ )
+ )
+ enabled_name = (
+ channel.name in self.enabled_channels
+ or (
+ channel.name not in self.disabled_channels
+ and (not channel.category or channel.category.name not in self.disabled_categories)
+ )
+ )
+ return enabled_id and enabled_name
diff --git a/bot/exts/filtering/_settings_types/delete_messages.py b/bot/exts/filtering/_settings_types/delete_messages.py
new file mode 100644
index 000000000..710cb0ed8
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/delete_messages.py
@@ -0,0 +1,35 @@
+from contextlib import suppress
+from typing import ClassVar
+
+from discord.errors import NotFound
+
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ActionEntry
+
+
+class DeleteMessages(ActionEntry):
+ """A setting entry which tells whether to delete the offending message(s)."""
+
+ name: ClassVar[str] = "delete_messages"
+ description: ClassVar[str] = (
+ "A boolean field. If True, the filter being triggered will cause the offending message to be deleted."
+ )
+
+ delete_messages: bool
+
+ async def action(self, ctx: FilterContext) -> None:
+ """Delete the context message(s)."""
+ if not self.delete_messages or ctx.event not in (Event.MESSAGE, Event.MESSAGE_EDIT):
+ return
+
+ with suppress(NotFound):
+ if ctx.message.guild:
+ await ctx.message.delete()
+ ctx.action_descriptions.append("deleted")
+
+ def __or__(self, other: ActionEntry):
+ """Combines two actions of the same type. Each type of action is executed once per filter."""
+ if not isinstance(other, DeleteMessages):
+ return NotImplemented
+
+ return DeleteMessages(delete_messages=self.delete_messages or other.delete_messages)
diff --git a/bot/exts/filtering/_settings_types/enabled.py b/bot/exts/filtering/_settings_types/enabled.py
new file mode 100644
index 000000000..3b5e3e446
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/enabled.py
@@ -0,0 +1,19 @@
+from typing import ClassVar
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ValidationEntry
+
+
+class Enabled(ValidationEntry):
+ """A setting entry which tells whether the filter is enabled."""
+
+ name: ClassVar[str] = "enabled"
+ description: ClassVar[str] = (
+ "A boolean field. Setting it to False allows disabling the filter without deleting it entirely."
+ )
+
+ enabled: bool
+
+ def triggers_on(self, ctx: FilterContext) -> bool:
+ """Return whether the filter is enabled."""
+ return self.enabled
diff --git a/bot/exts/filtering/_settings_types/filter_dm.py b/bot/exts/filtering/_settings_types/filter_dm.py
new file mode 100644
index 000000000..93022320f
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/filter_dm.py
@@ -0,0 +1,17 @@
+from typing import ClassVar
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ValidationEntry
+
+
+class FilterDM(ValidationEntry):
+ """A setting entry which tells whether to apply the filter to DMs."""
+
+ name: ClassVar[str] = "filter_dm"
+ description: ClassVar[str] = "A boolean field. If True, the filter can trigger for messages sent to the bot in DMs."
+
+ filter_dm: bool
+
+ def triggers_on(self, ctx: FilterContext) -> bool:
+ """Return whether the filter should be triggered even if it was triggered in DMs."""
+ return hasattr(ctx.channel, "guild") or self.filter_dm
diff --git a/bot/exts/filtering/_settings_types/infraction_and_notification.py b/bot/exts/filtering/_settings_types/infraction_and_notification.py
new file mode 100644
index 000000000..9c7d7b8ff
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/infraction_and_notification.py
@@ -0,0 +1,192 @@
+from collections import namedtuple
+from datetime import timedelta
+from enum import Enum, auto
+from typing import ClassVar, Optional
+
+import arrow
+from discord import Colour, Embed
+from discord.errors import Forbidden
+from pydantic import validator
+
+import bot
+from bot.constants import Channels, Guild
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ActionEntry
+
+
+class Infraction(Enum):
+ """An enumeration of infraction types. The lower the value, the higher it is on the hierarchy."""
+
+ BAN = auto()
+ KICK = auto()
+ MUTE = auto()
+ VOICE_BAN = auto()
+ SUPERSTAR = auto()
+ WARNING = auto()
+ WATCH = auto()
+ NOTE = auto()
+ NONE = auto() # Allows making operations on an entry with no infraction without checking for None.
+
+ def __bool__(self) -> bool:
+ """
+ Make the NONE value false-y.
+
+ This is useful for Settings.create to evaluate whether the entry contains anything.
+ """
+ return self != Infraction.NONE
+
+ def __str__(self) -> str:
+ if self == Infraction.NONE:
+ return ""
+ return self.name
+
+
+superstar = namedtuple("superstar", ["reason", "duration"])
+
+
+class InfractionAndNotification(ActionEntry):
+ """
+ A setting entry which specifies what infraction to issue and the notification to DM the user.
+
+ Since a DM cannot be sent when a user is banned or kicked, these two functions need to be grouped together.
+ """
+
+ name: ClassVar[str] = "infraction_and_notification"
+ description: ClassVar[dict[str, str]] = {
+ "infraction_type": (
+ "The type of infraction to issue when the filter triggers, or 'NONE'. "
+ "If two infractions are triggered for the same message, "
+ "the harsher one will be applied (by type or duration). "
+ "Superstars will be triggered even if there is a harsher infraction.\n\n"
+ "Valid infraction types in order of harshness: "
+ ) + ", ".join(infraction.name for infraction in Infraction),
+ "infraction_duration": "How long the infraction should last for in seconds, or 'None' for permanent.",
+ "infraction_reason": "The reason delivered with the infraction.",
+ "dm_content": "The contents of a message to be DMed to the offending user.",
+ "dm_embed": "The contents of the embed to be DMed to the offending user."
+ }
+
+ dm_content: str
+ dm_embed: str
+ infraction_type: Optional[Infraction]
+ infraction_reason: Optional[str]
+ infraction_duration: Optional[float]
+ superstar: Optional[superstar] = None
+
+ @validator("infraction_type", pre=True)
+ @classmethod
+ def convert_infraction_name(cls, infr_type: str) -> Infraction:
+ """Convert the string to an Infraction by name."""
+ return Infraction[infr_type.replace(" ", "_").upper()] if infr_type else Infraction.NONE
+
+ async def action(self, ctx: FilterContext) -> None:
+ """Send the notification to the user, and apply any specified infractions."""
+ # If there is no infraction to apply, any DM contents already provided in the context take precedence.
+ if self.infraction_type == Infraction.NONE and (ctx.dm_content or ctx.dm_embed):
+ dm_content = ctx.dm_content
+ dm_embed = ctx.dm_embed
+ else:
+ dm_content = self.dm_content
+ dm_embed = self.dm_embed
+
+ if dm_content or dm_embed:
+ formatting = {"domain": ctx.notification_domain}
+ dm_content = f"Hey {ctx.author.mention}!\n{dm_content.format(**formatting)}"
+ if dm_embed:
+ dm_embed = Embed(description=dm_embed.format(**formatting), colour=Colour.og_blurple())
+ else:
+ dm_embed = None
+
+ try:
+ await ctx.author.send(dm_content, embed=dm_embed)
+ ctx.action_descriptions.append("notified")
+ except Forbidden:
+ ctx.action_descriptions.append("notified (failed)")
+
+ msg_ctx = await bot.instance.get_context(ctx.message)
+ msg_ctx.guild = bot.instance.get_guild(Guild.id)
+ msg_ctx.author = ctx.author
+ msg_ctx.channel = ctx.channel
+ if self.superstar:
+ msg_ctx.command = bot.instance.get_command("superstarify")
+ await msg_ctx.invoke(
+ msg_ctx.command,
+ ctx.author,
+ arrow.utcnow() + timedelta(seconds=self.superstar.duration)
+ if self.superstar.duration is not None else None,
+ reason=self.superstar.reason
+ )
+ ctx.action_descriptions.append("superstar")
+
+ if self.infraction_type != Infraction.NONE:
+ if self.infraction_type == Infraction.BAN or not hasattr(ctx.channel, "guild"):
+ msg_ctx.channel = bot.instance.get_channel(Channels.mod_alerts)
+ msg_ctx.command = bot.instance.get_command(self.infraction_type.name)
+ await msg_ctx.invoke(
+ msg_ctx.command,
+ ctx.author,
+ arrow.utcnow() + timedelta(seconds=self.infraction_duration)
+ if self.infraction_duration is not None else None,
+ reason=self.infraction_reason
+ )
+ ctx.action_descriptions.append(self.infraction_type.name.lower())
+
+ def __or__(self, other: ActionEntry):
+ """
+ Combines two actions of the same type. Each type of action is executed once per filter.
+
+ If the infractions are different, take the data of the one higher up the hierarchy.
+
+ A special case is made for superstar infractions. Even if we decide to auto-mute a user, if they have a
+ particularly problematic username we will still want to superstarify them.
+
+ This is a "best attempt" implementation. Trying to account for any type of combination would create an
+ extremely complex ruleset. For example, we could special-case watches as well.
+
+ There is no clear way to properly combine several notification messages, especially when it's in two parts.
+ To avoid bombarding the user with several notifications, the message with the more significant infraction
+ is used.
+ """
+ if not isinstance(other, InfractionAndNotification):
+ return NotImplemented
+
+ # Lower number -> higher in the hierarchy
+ if self.infraction_type.value < other.infraction_type.value and other.infraction_type != Infraction.SUPERSTAR:
+ result = self.copy()
+ result.superstar = self._merge_superstars(self.superstar, other.superstar)
+ return result
+ elif self.infraction_type.value > other.infraction_type.value and self.infraction_type != Infraction.SUPERSTAR:
+ result = other.copy()
+ result.superstar = self._merge_superstars(self.superstar, other.superstar)
+ return result
+
+ if self.infraction_type == other.infraction_type:
+ if self.infraction_duration is None or (
+ other.infraction_duration is not None and self.infraction_duration > other.infraction_duration
+ ):
+ result = self.copy()
+ else:
+ result = other.copy()
+ result.superstar = self._merge_superstars(self.superstar, other.superstar)
+ return result
+
+ # At this stage the infraction types are different, and the lower one is a superstar.
+ if self.infraction_type.value < other.infraction_type.value:
+ result = self.copy()
+ result.superstar = superstar(other.infraction_reason, other.infraction_duration)
+ else:
+ result = other.copy()
+ result.superstar = superstar(self.infraction_reason, self.infraction_duration)
+ return result
+
+ @staticmethod
+ def _merge_superstars(superstar1: Optional[superstar], superstar2: Optional[superstar]) -> Optional[superstar]:
+ """Take the superstar with the greater duration."""
+ if not superstar1:
+ return superstar2
+ if not superstar2:
+ return superstar1
+
+ if superstar1.duration is None or superstar1.duration > superstar2.duration:
+ return superstar1
+ return superstar2
diff --git a/bot/exts/filtering/_settings_types/ping.py b/bot/exts/filtering/_settings_types/ping.py
new file mode 100644
index 000000000..8a3403b59
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/ping.py
@@ -0,0 +1,70 @@
+from functools import cache
+from typing import ClassVar
+
+from discord import Guild
+from pydantic import validator
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ActionEntry
+
+
+class Ping(ActionEntry):
+ """A setting entry which adds the appropriate pings to the alert."""
+
+ name: ClassVar[str] = "mentions"
+ description: ClassVar[dict[str, str]] = {
+ "guild_pings": (
+ "A list of role IDs/role names/user IDs/user names/here/everyone. "
+ "If a mod-alert is generated for a filter triggered in a public channel, these will be pinged."
+ ),
+ "dm_pings": (
+ "A list of role IDs/role names/user IDs/user names/here/everyone. "
+ "If a mod-alert is generated for a filter triggered in DMs, these will be pinged."
+ )
+ }
+
+ guild_pings: set[str]
+ dm_pings: set[str]
+
+ @validator("*")
+ @classmethod
+ def init_sequence_if_none(cls, pings: list[str]) -> list[str]:
+ """Initialize an empty sequence if the value is None."""
+ if pings is None:
+ return []
+ return pings
+
+ async def action(self, ctx: FilterContext) -> None:
+ """Add the stored pings to the alert message content."""
+ mentions = self.guild_pings if ctx.channel.guild else self.dm_pings
+ new_content = " ".join([self._resolve_mention(mention, ctx.channel.guild) for mention in mentions])
+ ctx.alert_content = f"{new_content} {ctx.alert_content}"
+
+ def __or__(self, other: ActionEntry):
+ """Combines two actions of the same type. Each type of action is executed once per filter."""
+ if not isinstance(other, Ping):
+ return NotImplemented
+
+ return Ping(ping_type=self.guild_pings | other.guild_pings, dm_ping_type=self.dm_pings | other.dm_pings)
+
+ @staticmethod
+ @cache
+ def _resolve_mention(mention: str, guild: Guild) -> str:
+ """Return the appropriate formatting for the formatting, be it a literal, a user ID, or a role ID."""
+ if mention in ("here", "everyone"):
+ return f"@{mention}"
+ if mention.isdigit(): # It's an ID.
+ mention = int(mention)
+ if any(mention == role.id for role in guild.roles):
+ return f"<@&{mention}>"
+ else:
+ return f"<@{mention}>"
+
+ # It's a name
+ for role in guild.roles:
+ if role.name == mention:
+ return role.mention
+ for member in guild.members:
+ if str(member) == mention:
+ return member.mention
+ return mention
diff --git a/bot/exts/filtering/_settings_types/send_alert.py b/bot/exts/filtering/_settings_types/send_alert.py
new file mode 100644
index 000000000..04e400764
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/send_alert.py
@@ -0,0 +1,24 @@
+from typing import ClassVar
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._settings_types.settings_entry import ActionEntry
+
+
+class SendAlert(ActionEntry):
+ """A setting entry which tells whether to send an alert message."""
+
+ name: ClassVar[str] = "send_alert"
+ description: ClassVar[str] = "A boolean. If all filters triggered set this to False, no mod-alert will be created."
+
+ send_alert: bool
+
+ async def action(self, ctx: FilterContext) -> None:
+ """Add the stored pings to the alert message content."""
+ ctx.send_alert = self.send_alert
+
+ def __or__(self, other: ActionEntry):
+ """Combines two actions of the same type. Each type of action is executed once per filter."""
+ if not isinstance(other, SendAlert):
+ return NotImplemented
+
+ return SendAlert(send_alert=self.send_alert or other.send_alert)
diff --git a/bot/exts/filtering/_settings_types/settings_entry.py b/bot/exts/filtering/_settings_types/settings_entry.py
new file mode 100644
index 000000000..2b3b030a0
--- /dev/null
+++ b/bot/exts/filtering/_settings_types/settings_entry.py
@@ -0,0 +1,68 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from typing import Any, ClassVar, Optional, Union
+
+from pydantic import BaseModel
+
+from bot.exts.filtering._filter_context import FilterContext
+from bot.exts.filtering._utils import FieldRequiring
+
+
+class SettingsEntry(BaseModel, FieldRequiring):
+ """
+ A basic entry in the settings field appearing in every filter list and filter.
+
+ For a filter list, this is the default setting for it. For a filter, it's an override of the default entry.
+ """
+
+ # Each subclass must define a name matching the entry name we're expecting to receive from the database.
+ # Names must be unique across all filter lists.
+ name: ClassVar[str] = FieldRequiring.MUST_SET_UNIQUE
+ # Each subclass must define a description of what it does. If the data an entry type receives is comprised of
+ # several DB fields, the value should a dictionary of field names and their descriptions.
+ description: ClassVar[Union[str, dict[str, str]]] = FieldRequiring.MUST_SET
+
+ @classmethod
+ def create(cls, entry_data: Optional[dict[str, Any]], *, keep_empty: bool = False) -> Optional[SettingsEntry]:
+ """
+ Returns a SettingsEntry object from `entry_data` if it holds any value, None otherwise.
+
+ Use this method to create SettingsEntry objects instead of the init.
+ The None value is significant for how a filter list iterates over its filters.
+ """
+ if entry_data is None:
+ return None
+ if not keep_empty and hasattr(entry_data, "values") and not any(value for value in entry_data.values()):
+ return None
+
+ if not isinstance(entry_data, dict):
+ entry_data = {cls.name: entry_data}
+ return cls(**entry_data)
+
+
+class ValidationEntry(SettingsEntry):
+ """A setting entry to validate whether the filter should be triggered in the given context."""
+
+ @abstractmethod
+ def triggers_on(self, ctx: FilterContext) -> bool:
+ """Return whether the filter should be triggered with this setting in the given context."""
+ ...
+
+
+class ActionEntry(SettingsEntry):
+ """A setting entry defining what the bot should do if the filter it belongs to is triggered."""
+
+ @abstractmethod
+ async def action(self, ctx: FilterContext) -> None:
+ """Execute an action that should be taken when the filter this setting belongs to is triggered."""
+ ...
+
+ @abstractmethod
+ def __or__(self, other: ActionEntry):
+ """
+ Combine two actions of the same type. Each type of action is executed once per filter.
+
+ The following condition must hold: if self == other, then self | other == self.
+ """
+ ...
diff --git a/bot/exts/filtering/_ui.py b/bot/exts/filtering/_ui.py
new file mode 100644
index 000000000..efedb2c0c
--- /dev/null
+++ b/bot/exts/filtering/_ui.py
@@ -0,0 +1,68 @@
+from typing import Callable, Optional
+
+import discord
+import discord.ui
+from discord.ext.commands import Context
+
+from bot.log import get_logger
+
+log = get_logger(__name__)
+
+
+class ArgumentCompletionSelect(discord.ui.Select):
+ """A select detailing the options that can be picked to assign to a missing argument."""
+
+ def __init__(
+ self,
+ ctx: Context,
+ args: list,
+ arg_name: str,
+ options: list[str],
+ position: int,
+ converter: Optional[Callable] = None
+ ):
+ super().__init__(
+ placeholder=f"Select a value for {arg_name!r}",
+ options=[discord.SelectOption(label=option) for option in options]
+ )
+ self.ctx = ctx
+ self.args = args
+ self.position = position
+ self.converter = converter
+
+ async def callback(self, interaction: discord.Interaction) -> None:
+ """re-invoke the context command with the completed argument value."""
+ await interaction.response.defer()
+ value = interaction.data["values"][0]
+ if self.converter:
+ value = self.converter(value)
+ args = self.args.copy() # This makes the view reusable.
+ args.insert(self.position, value)
+ log.trace(f"Argument filled with the value {value}. Re-invoking command")
+ await self.ctx.invoke(self.ctx.command, *args)
+
+
+class ArgumentCompletionView(discord.ui.View):
+ """A view used to complete a missing argument in an in invoked command."""
+
+ def __init__(
+ self,
+ ctx: Context,
+ args: list,
+ arg_name: str,
+ options: list[str],
+ position: int,
+ converter: Optional[Callable] = None
+ ):
+ super().__init__()
+ log.trace(f"The {arg_name} argument was designated missing in the invocation {ctx.view.buffer!r}")
+ self.add_item(ArgumentCompletionSelect(ctx, args, arg_name, options, position, converter))
+ self.ctx = ctx
+
+ async def interaction_check(self, interaction: discord.Interaction) -> bool:
+ """Check to ensure that the interacting user is the user who invoked the command."""
+ if interaction.user != self.ctx.author:
+ embed = discord.Embed(description="Sorry, but this dropdown menu can only be used by the original author.")
+ await interaction.response.send_message(embed=embed, ephemeral=True)
+ return False
+ return True
diff --git a/bot/exts/filtering/_utils.py b/bot/exts/filtering/_utils.py
new file mode 100644
index 000000000..158f1e7bd
--- /dev/null
+++ b/bot/exts/filtering/_utils.py
@@ -0,0 +1,120 @@
+import importlib
+import importlib.util
+import inspect
+import pkgutil
+from abc import ABC, abstractmethod
+from collections import defaultdict
+from typing import Any, Iterable, Union
+
+import regex
+
+VARIATION_SELECTORS = r"\uFE00-\uFE0F\U000E0100-\U000E01EF"
+INVISIBLE_RE = regex.compile(rf"[{VARIATION_SELECTORS}\p{{UNASSIGNED}}\p{{FORMAT}}\p{{CONTROL}}--\s]", regex.V1)
+ZALGO_RE = regex.compile(rf"[\p{{NONSPACING MARK}}\p{{ENCLOSING MARK}}--[{VARIATION_SELECTORS}]]", regex.V1)
+
+
+def subclasses_in_package(package: str, prefix: str, parent: type) -> set[type]:
+ """Return all the subclasses of class `parent`, found in the top-level of `package`, given by absolute path."""
+ subclasses = set()
+
+ # Find all modules in the package.
+ for module_info in pkgutil.iter_modules([package], prefix):
+ if not module_info.ispkg:
+ module = importlib.import_module(module_info.name)
+ # Find all classes in each module...
+ for _, class_ in inspect.getmembers(module, inspect.isclass):
+ # That are a subclass of the given class.
+ if parent in class_.__bases__:
+ subclasses.add(class_)
+
+ return subclasses
+
+
+def clean_input(string: str) -> str:
+ """Remove zalgo and invisible characters from `string`."""
+ # For future consideration: remove characters in the Mc, Sk, and Lm categories too.
+ # Can be normalised with form C to merge char + combining char into a single char to avoid
+ # removing legit diacritics, but this would open up a way to bypass _filters.
+ no_zalgo = ZALGO_RE.sub("", string)
+ return INVISIBLE_RE.sub("", no_zalgo)
+
+
+def past_tense(word: str) -> str:
+ """Return the past tense form of the input word."""
+ if not word:
+ return word
+ if word.endswith("e"):
+ return word + "d"
+ if word.endswith("y") and len(word) > 1 and word[-2] not in "aeiou":
+ return word[:-1] + "ied"
+ return word + "ed"
+
+
+def to_serializable(item: Any) -> Union[bool, int, float, str, list, dict, None]:
+ """Convert the item into an object that can be converted to JSON."""
+ if isinstance(item, (bool, int, float, str, type(None))):
+ return item
+ if isinstance(item, dict):
+ result = {}
+ for key, value in item.items():
+ if not isinstance(key, (bool, int, float, str, type(None))):
+ key = str(key)
+ result[key] = to_serializable(value)
+ return result
+ if isinstance(item, Iterable):
+ return [to_serializable(subitem) for subitem in item]
+ return str(item)
+
+
+class FieldRequiring(ABC):
+ """A mixin class that can force its concrete subclasses to set a value for specific class attributes."""
+
+ # Sentinel value that mustn't remain in a concrete subclass.
+ MUST_SET = object()
+
+ # Sentinel value that mustn't remain in a concrete subclass.
+ # Overriding value must be unique in the subclasses of the abstract class in which the attribute was set.
+ MUST_SET_UNIQUE = object()
+
+ # A mapping of the attributes which must be unique, and their unique values, per FieldRequiring subclass.
+ __unique_attributes: defaultdict[type, dict[str, set]] = defaultdict(dict)
+
+ @abstractmethod
+ def __init__(self):
+ ...
+
+ def __init_subclass__(cls, **kwargs):
+ def inherited(attr: str) -> bool:
+ """True if `attr` was inherited from a parent class."""
+ for parent in cls.__mro__[1:-1]: # The first element is the class itself, last element is object.
+ if hasattr(parent, attr): # The attribute was inherited.
+ return True
+ return False
+
+ # If a new attribute with the value MUST_SET_UNIQUE was defined in an abstract class, record it.
+ if inspect.isabstract(cls):
+ for attribute in dir(cls):
+ if getattr(cls, attribute, None) is FieldRequiring.MUST_SET_UNIQUE:
+ if not inherited(attribute):
+ # A new attribute with the value MUST_SET_UNIQUE.
+ FieldRequiring.__unique_attributes[cls][attribute] = set()
+ return
+
+ for attribute in dir(cls):
+ if attribute.startswith("__") or attribute in ("MUST_SET", "MUST_SET_UNIQUE"):
+ continue
+ value = getattr(cls, attribute)
+ if value is FieldRequiring.MUST_SET and inherited(attribute):
+ raise ValueError(f"You must set attribute {attribute!r} when creating {cls!r}")
+ elif value is FieldRequiring.MUST_SET_UNIQUE and inherited(attribute):
+ raise ValueError(f"You must set a unique value to attribute {attribute!r} when creating {cls!r}")
+ else:
+ # Check if the value needs to be unique.
+ for parent in cls.__mro__[1:-1]:
+ # Find the parent class the attribute was first defined in.
+ if attribute in FieldRequiring.__unique_attributes[parent]:
+ if value in FieldRequiring.__unique_attributes[parent][attribute]:
+ raise ValueError(f"Value of {attribute!r} in {cls!r} is not unique for parent {parent!r}.")
+ else:
+ # Add to the set of unique values for that field.
+ FieldRequiring.__unique_attributes[parent][attribute].add(value)
diff --git a/bot/exts/filtering/filtering.py b/bot/exts/filtering/filtering.py
new file mode 100644
index 000000000..630474c13
--- /dev/null
+++ b/bot/exts/filtering/filtering.py
@@ -0,0 +1,494 @@
+import operator
+import re
+from collections import defaultdict
+from functools import reduce
+from typing import Optional
+
+from discord import Colour, Embed, HTTPException, Message
+from discord.ext import commands
+from discord.ext.commands import BadArgument, Cog, Context, has_any_role
+from discord.utils import escape_markdown
+
+from bot.bot import Bot
+from bot.constants import Colours, MODERATION_ROLES, Webhooks
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._filter_lists import FilterList, ListType, filter_list_types, list_type_converter
+from bot.exts.filtering._filters.filter import Filter
+from bot.exts.filtering._settings import ActionSettings
+from bot.exts.filtering._ui import ArgumentCompletionView
+from bot.exts.filtering._utils import past_tense, to_serializable
+from bot.log import get_logger
+from bot.pagination import LinePaginator
+from bot.utils.messages import format_channel, format_user
+
+log = get_logger(__name__)
+
+# Max number of characters in a Discord embed field value, minus 6 characters for a placeholder.
+MAX_FIELD_SIZE = 1018
+# Max number of characters for an embed field's value before it should take its own line.
+MAX_INLINE_SIZE = 50
+
+
+class Filtering(Cog):
+ """Filtering and alerting for content posted on the server."""
+
+ # region: init
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.filter_lists: dict[str, FilterList] = {}
+ self._subscriptions: defaultdict[Event, list[FilterList]] = defaultdict(list)
+ self.webhook = None
+
+ self.loaded_settings = {}
+ self.loaded_filters = {}
+ self.loaded_filter_settings = {}
+
+ async def cog_load(self) -> None:
+ """
+ Fetch the filter data from the API, parse it, and load it to the appropriate data structures.
+
+ Additionally, fetch the alerting webhook.
+ """
+ await self.bot.wait_until_guild_available()
+ already_warned = set()
+
+ raw_filter_lists = await self.bot.api_client.get("bot/filter/filter_lists")
+ for raw_filter_list in raw_filter_lists:
+ list_name = raw_filter_list["name"]
+ if list_name not in self.filter_lists:
+ if list_name not in filter_list_types:
+ if list_name not in already_warned:
+ log.warning(
+ f"A filter list named {list_name} was loaded from the database, but no matching class."
+ )
+ already_warned.add(list_name)
+ continue
+ self.filter_lists[list_name] = filter_list_types[list_name](self)
+ self.filter_lists[list_name].add_list(raw_filter_list)
+
+ try:
+ self.webhook = await self.bot.fetch_webhook(Webhooks.filters)
+ except HTTPException:
+ log.error(f"Failed to fetch incidents webhook with ID `{Webhooks.incidents}`.")
+
+ self.collect_loaded_types()
+
+ def subscribe(self, filter_list: FilterList, *events: Event) -> None:
+ """
+ Subscribe a filter list to the given events.
+
+ The filter list is added to a list for each event. When the event is triggered, the filter context will be
+ dispatched to the subscribed filter lists.
+
+ While it's possible to just make each filter list check the context's event, these are only the events a filter
+ list expects to receive from the filtering cog, there isn't an actual limitation on the kinds of events a filter
+ list can handle as long as the filter context is built properly. If for whatever reason we want to invoke a
+ filter list outside of the usual procedure with the filtering cog, it will be more problematic if the events are
+ hard-coded into each filter list.
+ """
+ for event in events:
+ if filter_list not in self._subscriptions[event]:
+ self._subscriptions[event].append(filter_list)
+
+ def collect_loaded_types(self) -> None:
+ """
+ Go over the classes used in initialization and collect them to dictionaries.
+
+ The information that is collected is about the types actually used to load the API response, not all types
+ available in the filtering extension.
+ """
+ # Get the filter types used by each filter list.
+ for filter_list in self.filter_lists.values():
+ self.loaded_filters.update({filter_type.name: filter_type for filter_type in filter_list.filter_types})
+
+ # Get the setting types used by each filter list.
+ if self.filter_lists:
+ # Any filter list has the fields for all settings in the DB schema, so picking any one of them is enough.
+ list_defaults = list(list(self.filter_lists.values())[0].defaults.values())[0]
+ settings_types = set()
+ # The settings are split between actions and validations.
+ settings_types.update(type(setting) for _, setting in list_defaults["actions"].items())
+ settings_types.update(type(setting) for _, setting in list_defaults["validations"].items())
+ for setting_type in settings_types:
+ # The description should be either a string or a dictionary.
+ if isinstance(setting_type.description, str):
+ # If it's a string, then the setting matches a single field in the DB,
+ # and its name is the setting type's name attribute.
+ self.loaded_settings[setting_type.name] = setting_type.description, setting_type
+ else:
+ # Otherwise, the setting type works with compound settings.
+ self.loaded_settings.update({
+ subsetting: (description, setting_type)
+ for subsetting, description in setting_type.description.items()
+ })
+
+ # Get the settings per filter as well.
+ for filter_name, filter_type in self.loaded_filters.items():
+ extra_fields_type = filter_type.extra_fields_type
+ if not extra_fields_type:
+ continue
+ # A class var with a `_description` suffix is expected per field name.
+ self.loaded_filter_settings[filter_name] = {
+ field_name: (getattr(extra_fields_type, f"{field_name}_description", ""), extra_fields_type)
+ for field_name in extra_fields_type.__fields__
+ }
+
+ async def cog_check(self, ctx: Context) -> bool:
+ """Only allow moderators to invoke the commands in this cog."""
+ return await has_any_role(*MODERATION_ROLES).predicate(ctx)
+
+ # endregion
+ # region: listeners
+
+ @Cog.listener()
+ async def on_message(self, msg: Message) -> None:
+ """Filter the contents of a sent message."""
+ if msg.author.bot or msg.webhook_id:
+ return
+
+ ctx = FilterContext(Event.MESSAGE, msg.author, msg.channel, msg.content, msg, msg.embeds)
+
+ result_actions, list_messages = await self._resolve_action(ctx)
+ if result_actions:
+ await result_actions.action(ctx)
+ if ctx.send_alert:
+ await self._send_alert(ctx, list_messages)
+
+ # endregion
+ # region: blacklist commands
+
+ @commands.group(aliases=("bl", "blacklist", "denylist", "dl"))
+ async def blocklist(self, ctx: Context) -> None:
+ """Group for managing blacklisted items."""
+ if not ctx.invoked_subcommand:
+ await ctx.send_help(ctx.command)
+
+ @blocklist.command(name="list", aliases=("get",))
+ async def bl_list(self, ctx: Context, list_name: Optional[str] = None) -> None:
+ """List the contents of a specified blacklist."""
+ result = self._resolve_list_type_and_name(ctx, ListType.DENY, list_name)
+ if not result:
+ return
+ list_type, filter_list = result
+ await self._send_list(ctx, filter_list, list_type)
+
+ # endregion
+ # region: whitelist commands
+
+ @commands.group(aliases=("wl", "whitelist", "al"))
+ async def allowlist(self, ctx: Context) -> None:
+ """Group for managing blacklisted items."""
+ if not ctx.invoked_subcommand:
+ await ctx.send_help(ctx.command)
+
+ @allowlist.command(name="list", aliases=("get",))
+ async def al_list(self, ctx: Context, list_name: Optional[str] = None) -> None:
+ """List the contents of a specified whitelist."""
+ result = self._resolve_list_type_and_name(ctx, ListType.ALLOW, list_name)
+ if not result:
+ return
+ list_type, filter_list = result
+ await self._send_list(ctx, filter_list, list_type)
+
+ # endregion
+ # region: filter commands
+
+ @commands.group(aliases=("filters", "f"), invoke_without_command=True)
+ async def filter(self, ctx: Context, id_: Optional[int] = None) -> None:
+ """
+ Group for managing filters.
+
+ If a valid filter ID is provided, an embed describing the filter will be posted.
+ """
+ if not ctx.invoked_subcommand and not id_:
+ await ctx.send_help(ctx.command)
+ return
+
+ result = self._get_filter_by_id(id_)
+ if result is None:
+ await ctx.send(f":x: Could not find a filter with ID `{id_}`.")
+ return
+ filter_, filter_list, list_type = result
+
+ # Get filter list settings
+ default_setting_values = {}
+ for type_ in ("actions", "validations"):
+ for _, setting in filter_list.defaults[list_type][type_].items():
+ default_setting_values.update(to_serializable(setting.dict()))
+
+ # Get the filter's overridden settings
+ overrides_values = {}
+ for settings in (filter_.actions, filter_.validations):
+ if settings:
+ for _, setting in settings.items():
+ overrides_values.update(to_serializable(setting.dict()))
+
+ # Combine them. It's done in this way to preserve field order, since the filter won't have all settings.
+ total_values = {}
+ for name, value in default_setting_values.items():
+ if name not in overrides_values:
+ total_values[name] = value
+ else:
+ total_values[f"{name}*"] = overrides_values[name]
+ # Add the filter-specific settings.
+ if hasattr(filter_.extra_fields, "dict"):
+ extra_fields_overrides = filter_.extra_fields.dict(exclude_unset=True)
+ for name, value in filter_.extra_fields.dict().items():
+ if name not in extra_fields_overrides:
+ total_values[f"{filter_.name}/{name}"] = value
+ else:
+ total_values[f"{filter_.name}/{name}*"] = value
+
+ embed = self._build_embed_from_dict(total_values)
+ embed.description = f"`{filter_.content}`"
+ if filter_.description:
+ embed.description += f" - {filter_.description}"
+ embed.set_author(name=f"Filter #{id_} - " + f"{past_tense(list_type.name.lower())} {filter_list.name}".title())
+ embed.set_footer(text=(
+ "Field names with an asterisk have values which override the defaults of the containing filter list. "
+ f"To view all defaults of the list, run `!filterlist describe {list_type.name} {filter_list.name}`."
+ ))
+ await ctx.send(embed=embed)
+
+ @filter.command(name="list", aliases=("get",))
+ async def f_list(
+ self, ctx: Context, list_type: Optional[list_type_converter] = None, list_name: Optional[str] = None
+ ) -> None:
+ """List the contents of a specified list of filters."""
+ result = await self._resolve_list_type_and_name(ctx, list_type, list_name)
+ if result is None:
+ return
+ list_type, filter_list = result
+
+ await self._send_list(ctx, filter_list, list_type)
+
+ @filter.command(name="describe", aliases=("explain", "manual"))
+ async def f_describe(self, ctx: Context, filter_name: Optional[str]) -> None:
+ """Show a description of the specified filter, or a list of possible values if no name is specified."""
+ if not filter_name:
+ embed = Embed(description="\n".join(self.loaded_filters))
+ embed.set_author(name="List of filter names")
+ else:
+ filter_type = self.loaded_filters.get(filter_name)
+ if not filter_type:
+ filter_type = self.loaded_filters.get(filter_name[:-1]) # A plural form or a typo.
+ if not filter_type:
+ await ctx.send(f":x: There's no filter type named {filter_name!r}.")
+ return
+ # Use the class's docstring, and ignore single newlines.
+ embed = Embed(description=re.sub(r"(?<!\n)\n(?!\n)", " ", filter_type.__doc__))
+ embed.set_author(name=f"Description of the {filter_name} filter")
+ embed.colour = Colour.blue()
+ await ctx.send(embed=embed)
+
+ @filter.group(aliases=("settings",))
+ async def setting(self, ctx: Context) -> None:
+ """Group for settings-related commands."""
+ if not ctx.invoked_subcommand:
+ await ctx.send_help(ctx.command)
+
+ @setting.command(name="describe", aliases=("explain", "manual"))
+ async def s_describe(self, ctx: Context, setting_name: Optional[str]) -> None:
+ """Show a description of the specified setting, or a list of possible settings if no name is specified."""
+ if not setting_name:
+ settings_list = list(self.loaded_settings)
+ for filter_name, filter_settings in self.loaded_filter_settings.items():
+ settings_list.extend(f"{filter_name}/{setting}" for setting in filter_settings)
+ embed = Embed(description="\n".join(settings_list))
+ embed.set_author(name="List of setting names")
+ else:
+ # The setting is either in a SettingsEntry subclass, or a pydantic model.
+ setting_data = self.loaded_settings.get(setting_name)
+ description = None
+ if setting_data:
+ description = setting_data[0]
+ elif "/" in setting_name: # It's a filter specific setting.
+ filter_name, filter_setting_name = setting_name.split("/", maxsplit=1)
+ if filter_name in self.loaded_filter_settings:
+ if filter_setting_name in self.loaded_filter_settings[filter_name]:
+ description = self.loaded_filter_settings[filter_name][filter_setting_name][0]
+ if description is None:
+ await ctx.send(f":x: There's no setting type named {setting_name!r}.")
+ return
+ embed = Embed(description=description)
+ embed.set_author(name=f"Description of the {setting_name} setting")
+ embed.colour = Colour.blue()
+ await ctx.send(embed=embed)
+
+ # endregion
+ # region: filterlist group
+
+ @commands.group(aliases=("fl",))
+ async def filterlist(self, ctx: Context) -> None:
+ """Group for managing filter lists."""
+ if not ctx.invoked_subcommand:
+ await ctx.send_help(ctx.command)
+
+ @filterlist.command(name="describe", aliases=("explain", "manual", "id"))
+ async def fl_describe(
+ self, ctx: Context, list_type: Optional[list_type_converter] = None, list_name: Optional[str] = None
+ ) -> None:
+ """Show a description of the specified filter list, or a list of possible values if no values are provided."""
+ if not list_type and not list_name:
+ embed = Embed(description="\n".join(f"\u2003 {fl}" for fl in self.filter_lists), colour=Colour.blue())
+ embed.set_author(name="List of filter lists names")
+ await ctx.send(embed=embed)
+ return
+
+ result = await self._resolve_list_type_and_name(ctx, list_type, list_name)
+ if result is None:
+ return
+ list_type, filter_list = result
+
+ list_defaults = filter_list.defaults[list_type]
+ setting_values = {}
+ for type_ in ("actions", "validations"):
+ for _, setting in list_defaults[type_].items():
+ setting_values.update(to_serializable(setting.dict()))
+
+ embed = self._build_embed_from_dict(setting_values)
+ # Use the class's docstring, and ignore single newlines.
+ embed.description = re.sub(r"(?<!\n)\n(?!\n)", " ", filter_list.__doc__)
+ embed.set_author(
+ name=f"Description of the {past_tense(list_type.name.lower())} {list_name.title()} filter list"
+ )
+ await ctx.send(embed=embed)
+
+ # endregion
+ # region: helper functions
+
+ async def _resolve_action(self, ctx: FilterContext) -> tuple[Optional[ActionSettings], dict[FilterList, str]]:
+ """
+ Return the actions that should be taken for all filter lists in the given context.
+
+ Additionally, a message is possibly provided from each filter list describing the triggers,
+ which should be relayed to the moderators.
+ """
+ actions = []
+ messages = {}
+ for filter_list in self._subscriptions[ctx.event]:
+ list_actions, list_message = await filter_list.actions_for(ctx)
+ if list_actions:
+ actions.append(list_actions)
+ if list_message:
+ messages[filter_list] = list_message
+
+ result_actions = None
+ if actions:
+ result_actions = reduce(operator.or_, (action for action in actions))
+
+ return result_actions, messages
+
+ async def _send_alert(self, ctx: FilterContext, triggered_filters: dict[FilterList, str]) -> None:
+ """Build an alert message from the filter context, and send it via the alert webhook."""
+ if not self.webhook:
+ return
+
+ name = f"{ctx.event.name.replace('_', ' ').title()} Filter"
+
+ embed = Embed(color=Colours.soft_orange)
+ embed.set_thumbnail(url=ctx.author.display_avatar.url)
+ triggered_by = f"**Triggered by:** {format_user(ctx.author)}"
+ if ctx.channel.guild:
+ triggered_in = f"**Triggered in:** {format_channel(ctx.channel)}"
+ else:
+ triggered_in = "**DM**"
+
+ filters = []
+ for filter_list, list_message in triggered_filters.items():
+ if list_message:
+ filters.append(f"**{filter_list.name.title()} Filters:** {list_message}")
+ filters = "\n".join(filters)
+
+ matches = "**Matches:** " + ", ".join(repr(match) for match in ctx.matches)
+ actions = "**Actions Taken:** " + (", ".join(ctx.action_descriptions) if ctx.action_descriptions else "-")
+ content = f"**[Original Content]({ctx.message.jump_url})**: {escape_markdown(ctx.content)}"
+
+ embed_content = "\n".join(
+ part for part in (triggered_by, triggered_in, filters, matches, actions, content) if part
+ )
+ if len(embed_content) > 4000:
+ embed_content = embed_content[:4000] + " [...]"
+ embed.description = embed_content
+
+ await self.webhook.send(username=name, content=ctx.alert_content, embeds=[embed, *ctx.alert_embeds][:10])
+
+ async def _resolve_list_type_and_name(
+ self, ctx: Context, list_type: Optional[ListType] = None, list_name: Optional[str] = None
+ ) -> Optional[tuple[ListType, FilterList]]:
+ """Prompt the user to complete the list type or list name if one of them is missing."""
+ if list_name is None:
+ await ctx.send(
+ "The **list_name** argument is unspecified. Please pick a value from the options below:",
+ view=ArgumentCompletionView(ctx, [list_type], "list_name", list(self.filter_lists), 1, None)
+ )
+ return None
+
+ filter_list = self._get_list_by_name(list_name)
+ if list_type is None:
+ if len(filter_list.filter_lists) > 1:
+ await ctx.send(
+ "The **list_type** argument is unspecified. Please pick a value from the options below:",
+ view=ArgumentCompletionView(
+ ctx, [list_name], "list_type", [option.name for option in ListType], 0, list_type_converter
+ )
+ )
+ return None
+ list_type = list(filter_list.filter_lists)[0]
+ return list_type, filter_list
+
+ def _get_list_by_name(self, list_name: str) -> FilterList:
+ """Get a filter list by its name, or raise an error if there's no such list."""
+ log.trace(f"Getting the filter list matching the name {list_name}")
+ filter_list = self.filter_lists.get(list_name)
+ if not filter_list:
+ if list_name.endswith("s"): # The user may have attempted to use the plural form.
+ filter_list = self.filter_lists.get(list_name[:-1])
+ if not filter_list:
+ raise BadArgument(f"There's no filter list named {list_name!r}.")
+ log.trace(f"Found list named {filter_list.name}")
+ return filter_list
+
+ @staticmethod
+ async def _send_list(ctx: Context, filter_list: FilterList, list_type: ListType) -> None:
+ """Show the list of filters identified by the list name and type."""
+ type_filters = filter_list.filter_lists.get(list_type)
+ if type_filters is None:
+ await ctx.send(f":x: There is no list of {past_tense(list_type.name.lower())} {filter_list.name}s.")
+ return
+
+ lines = list(map(str, type_filters.values()))
+ log.trace(f"Sending a list of {len(lines)} filters.")
+
+ embed = Embed(colour=Colour.blue())
+ embed.set_author(name=f"List of {past_tense(list_type.name.lower())} {filter_list.name}s ({len(lines)} total)")
+
+ await LinePaginator.paginate(lines, ctx, embed, max_lines=15, empty=False)
+
+ def _get_filter_by_id(self, id_: int) -> Optional[tuple[Filter, FilterList, ListType]]:
+ """Get the filter object corresponding to the provided ID, along with its containing list and list type."""
+ for filter_list in self.filter_lists.values():
+ for list_type, sublist in filter_list.filter_lists.items():
+ if id_ in sublist:
+ return sublist[id_], filter_list, list_type
+
+ @staticmethod
+ def _build_embed_from_dict(data: dict) -> Embed:
+ """Build a Discord embed by populating fields from the given dict."""
+ embed = Embed(description="", colour=Colour.blue())
+ for setting, value in data.items():
+ if setting.startswith("_"):
+ continue
+ value = str(value) if value not in ("", None) else "-"
+ if len(value) > MAX_FIELD_SIZE:
+ value = value[:MAX_FIELD_SIZE] + " [...]"
+ embed.add_field(name=setting, value=value, inline=len(value) < MAX_INLINE_SIZE)
+ return embed
+
+ # endregion
+
+
+async def setup(bot: Bot) -> None:
+ """Load the Filtering cog."""
+ await bot.add_cog(Filtering(bot))
diff --git a/bot/exts/filters/antimalware.py b/bot/exts/filters/antimalware.py
deleted file mode 100644
index ff39700a6..000000000
--- a/bot/exts/filters/antimalware.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import typing as t
-from os.path import splitext
-
-from discord import Embed, Message, NotFound
-from discord.ext.commands import Cog
-
-from bot.bot import Bot
-from bot.constants import Channels, Filter, URLs
-from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME
-from bot.log import get_logger
-
-log = get_logger(__name__)
-
-PY_EMBED_DESCRIPTION = (
- "It looks like you tried to attach a Python file - "
- f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}"
-)
-
-TXT_LIKE_FILES = {".txt", ".csv", ".json"}
-TXT_EMBED_DESCRIPTION = (
- "You either uploaded a `{blocked_extension}` file or entered a message that was too long. "
- f"Please use our [paste bin]({URLs.site_schema}{URLs.site_paste}) instead."
-)
-
-DISALLOWED_EMBED_DESCRIPTION = (
- "It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). "
- "We currently allow the following file types: **{joined_whitelist}**.\n\n"
- "Feel free to ask in {meta_channel_mention} if you think this is a mistake."
-)
-
-
-class AntiMalware(Cog):
- """Delete messages which contain attachments with non-whitelisted file extensions."""
-
- def __init__(self, bot: Bot):
- self.bot = bot
-
- def _get_whitelisted_file_formats(self) -> list:
- """Get the file formats currently on the whitelist."""
- return self.bot.filter_list_cache['FILE_FORMAT.True'].keys()
-
- def _get_disallowed_extensions(self, message: Message) -> t.Iterable[str]:
- """Get an iterable containing all the disallowed extensions of attachments."""
- file_extensions = {splitext(attachment.filename.lower())[1] for attachment in message.attachments}
- extensions_blocked = file_extensions - set(self._get_whitelisted_file_formats())
- return extensions_blocked
-
- @Cog.listener()
- async def on_message(self, message: Message) -> None:
- """Identify messages with prohibited attachments."""
- # Return when message don't have attachment and don't moderate DMs
- if not message.attachments or not message.guild:
- return
-
- # Ignore webhook and bot messages
- if message.webhook_id or message.author.bot:
- return
-
- # Ignore code jam channels
- if getattr(message.channel, "category", None) and message.channel.category.name == JAM_CATEGORY_NAME:
- return
-
- # Check if user is staff, if is, return
- # Since we only care that roles exist to iterate over, check for the attr rather than a User/Member instance
- if hasattr(message.author, "roles") and any(role.id in Filter.role_whitelist for role in message.author.roles):
- return
-
- embed = Embed()
- extensions_blocked = self._get_disallowed_extensions(message)
- blocked_extensions_str = ', '.join(extensions_blocked)
- if ".py" in extensions_blocked:
- # Short-circuit on *.py files to provide a pastebin link
- embed.description = PY_EMBED_DESCRIPTION
- elif extensions := TXT_LIKE_FILES.intersection(extensions_blocked):
- # Work around Discord AutoConversion of messages longer than 2000 chars to .txt
- cmd_channel = self.bot.get_channel(Channels.bot_commands)
- embed.description = TXT_EMBED_DESCRIPTION.format(
- blocked_extension=extensions.pop(),
- cmd_channel_mention=cmd_channel.mention
- )
- elif extensions_blocked:
- meta_channel = self.bot.get_channel(Channels.meta)
- embed.description = DISALLOWED_EMBED_DESCRIPTION.format(
- joined_whitelist=', '.join(self._get_whitelisted_file_formats()),
- blocked_extensions_str=blocked_extensions_str,
- meta_channel_mention=meta_channel.mention,
- )
-
- if embed.description:
- log.info(
- f"User '{message.author}' ({message.author.id}) uploaded blacklisted file(s): {blocked_extensions_str}",
- extra={"attachment_list": [attachment.filename for attachment in message.attachments]}
- )
-
- await message.channel.send(f"Hey {message.author.mention}!", embed=embed)
-
- # Delete the offending message:
- try:
- await message.delete()
- except NotFound:
- log.info(f"Tried to delete message `{message.id}`, but message could not be found.")
-
-
-async def setup(bot: Bot) -> None:
- """Load the AntiMalware cog."""
- await bot.add_cog(AntiMalware(bot))
diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py
deleted file mode 100644
index 842aab384..000000000
--- a/bot/exts/filters/antispam.py
+++ /dev/null
@@ -1,324 +0,0 @@
-import asyncio
-from collections import defaultdict
-from collections.abc import Mapping
-from dataclasses import dataclass, field
-from datetime import timedelta
-from itertools import takewhile
-from operator import attrgetter, itemgetter
-from typing import Dict, Iterable, List, Set
-
-import arrow
-from botcore.utils import scheduling
-from discord import Colour, Member, Message, MessageType, NotFound, Object, TextChannel
-from discord.ext.commands import Cog
-
-from bot import rules
-from bot.bot import Bot
-from bot.constants import (
- AntiSpam as AntiSpamConfig, Channels, Colours, DEBUG_MODE, Event, Filter, Guild as GuildConfig, Icons
-)
-from bot.converters import Duration
-from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME
-from bot.exts.moderation.modlog import ModLog
-from bot.log import get_logger
-from bot.utils import lock
-from bot.utils.message_cache import MessageCache
-from bot.utils.messages import format_user, send_attachments
-
-log = get_logger(__name__)
-
-RULE_FUNCTION_MAPPING = {
- 'attachments': rules.apply_attachments,
- 'burst': rules.apply_burst,
- # burst shared is temporarily disabled due to a bug
- # 'burst_shared': rules.apply_burst_shared,
- 'chars': rules.apply_chars,
- 'discord_emojis': rules.apply_discord_emojis,
- 'duplicates': rules.apply_duplicates,
- 'links': rules.apply_links,
- 'mentions': rules.apply_mentions,
- 'newlines': rules.apply_newlines,
- 'role_mentions': rules.apply_role_mentions,
-}
-
-
-@dataclass
-class DeletionContext:
- """Represents a Deletion Context for a single spam event."""
-
- members: frozenset[Member]
- triggered_in: TextChannel
- channels: set[TextChannel] = field(default_factory=set)
- rules: Set[str] = field(default_factory=set)
- messages: Dict[int, Message] = field(default_factory=dict)
- attachments: List[List[str]] = field(default_factory=list)
-
- async def add(self, rule_name: str, channels: Iterable[TextChannel], messages: Iterable[Message]) -> None:
- """Adds new rule violation events to the deletion context."""
- self.rules.add(rule_name)
-
- self.channels.update(channels)
-
- for message in messages:
- if message.id not in self.messages:
- self.messages[message.id] = message
-
- # Re-upload attachments
- destination = message.guild.get_channel(Channels.attachment_log)
- urls = await send_attachments(message, destination, link_large=False)
- self.attachments.append(urls)
-
- async def upload_messages(self, actor_id: int, modlog: ModLog) -> None:
- """Method that takes care of uploading the queue and posting modlog alert."""
- triggered_by_users = ", ".join(format_user(m) for m in self.members)
- triggered_in_channel = f"**Triggered in:** {self.triggered_in.mention}\n" if len(self.channels) > 1 else ""
- channels_description = ", ".join(channel.mention for channel in self.channels)
-
- mod_alert_message = (
- f"**Triggered by:** {triggered_by_users}\n"
- f"{triggered_in_channel}"
- f"**Channels:** {channels_description}\n"
- f"**Rules:** {', '.join(rule for rule in self.rules)}\n"
- )
-
- messages_as_list = list(self.messages.values())
- first_message = messages_as_list[0]
- # For multiple messages and those with attachments or excessive newlines, use the logs API
- if any((
- len(messages_as_list) > 1,
- len(first_message.attachments) > 0,
- first_message.content.count('\n') > 15
- )):
- url = await modlog.upload_log(self.messages.values(), actor_id, self.attachments)
- mod_alert_message += f"A complete log of the offending messages can be found [here]({url})"
- else:
- mod_alert_message += "Message:\n"
- content = first_message.clean_content
- remaining_chars = 4080 - len(mod_alert_message)
-
- if len(content) > remaining_chars:
- url = await modlog.upload_log([first_message], actor_id, self.attachments)
- log_site_msg = f"The full message can be found [here]({url})"
- content = content[:remaining_chars - (3 + len(log_site_msg))] + "..."
-
- mod_alert_message += content
-
- await modlog.send_log_message(
- content=", ".join(str(m.id) for m in self.members), # quality-of-life improvement for mobile moderators
- icon_url=Icons.filtering,
- colour=Colour(Colours.soft_red),
- title="Spam detected!",
- text=mod_alert_message,
- thumbnail=first_message.author.display_avatar.url,
- channel_id=Channels.mod_alerts,
- ping_everyone=AntiSpamConfig.ping_everyone
- )
-
-
-class AntiSpam(Cog):
- """Cog that controls our anti-spam measures."""
-
- def __init__(self, bot: Bot, validation_errors: Dict[str, str]) -> None:
- self.bot = bot
- self.validation_errors = validation_errors
- role_id = AntiSpamConfig.punishment['role_id']
- self.muted_role = Object(role_id)
- self.expiration_date_converter = Duration()
-
- self.message_deletion_queue = dict()
-
- # Fetch the rule configuration with the highest rule interval.
- max_interval_config = max(
- AntiSpamConfig.rules.values(),
- key=itemgetter('interval')
- )
- self.max_interval = max_interval_config['interval']
- self.cache = MessageCache(AntiSpamConfig.cache_size, newest_first=True)
-
- @property
- def mod_log(self) -> ModLog:
- """Allows for easy access of the ModLog cog."""
- return self.bot.get_cog("ModLog")
-
- async def cog_load(self) -> None:
- """Unloads the cog and alerts admins if configuration validation failed."""
- await self.bot.wait_until_guild_available()
- if self.validation_errors:
- body = "**The following errors were encountered:**\n"
- body += "\n".join(f"- {error}" for error in self.validation_errors.values())
- body += "\n\n**The cog has been unloaded.**"
-
- await self.mod_log.send_log_message(
- title="Error: AntiSpam configuration validation failed!",
- text=body,
- ping_everyone=True,
- icon_url=Icons.token_removed,
- colour=Colour.red()
- )
-
- self.bot.remove_cog(self.__class__.__name__)
- return
-
- @Cog.listener()
- async def on_message(self, message: Message) -> None:
- """Applies the antispam rules to each received message."""
- if (
- not message.guild
- or message.guild.id != GuildConfig.id
- or message.author.bot
- or (getattr(message.channel, "category", None) and message.channel.category.name == JAM_CATEGORY_NAME)
- or (message.channel.id in Filter.channel_whitelist and not DEBUG_MODE)
- or (any(role.id in Filter.role_whitelist for role in message.author.roles) and not DEBUG_MODE)
- or message.type == MessageType.auto_moderation_action
- ):
- return
-
- self.cache.append(message)
-
- earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.max_interval)
- relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, self.cache))
-
- for rule_name in AntiSpamConfig.rules:
- rule_config = AntiSpamConfig.rules[rule_name]
- rule_function = RULE_FUNCTION_MAPPING[rule_name]
-
- # Create a list of messages that were sent in the interval that the rule cares about.
- latest_interesting_stamp = arrow.utcnow() - timedelta(seconds=rule_config['interval'])
- messages_for_rule = list(
- takewhile(lambda msg: msg.created_at > latest_interesting_stamp, relevant_messages) # noqa: B023
- )
-
- result = await rule_function(message, messages_for_rule, rule_config)
-
- # If the rule returns `None`, that means the message didn't violate it.
- # If it doesn't, it returns a tuple in the form `(str, Iterable[discord.Member])`
- # which contains the reason for why the message violated the rule and
- # an iterable of all members that violated the rule.
- if result is not None:
- self.bot.stats.incr(f"mod_alerts.{rule_name}")
- reason, members, relevant_messages = result
- full_reason = f"`{rule_name}` rule: {reason}"
-
- # If there's no spam event going on for this channel, start a new Message Deletion Context
- authors_set = frozenset(members)
- if authors_set not in self.message_deletion_queue:
- log.trace(f"Creating queue for members `{authors_set}`")
- self.message_deletion_queue[authors_set] = DeletionContext(authors_set, message.channel)
- scheduling.create_task(
- self._process_deletion_context(authors_set),
- name=f"AntiSpam._process_deletion_context({authors_set})"
- )
-
- # Add the relevant of this trigger to the Deletion Context
- await self.message_deletion_queue[authors_set].add(
- rule_name=rule_name,
- channels=set(message.channel for message in relevant_messages),
- messages=relevant_messages
- )
-
- for member in members:
- scheduling.create_task(
- self.punish(message, member, full_reason),
- name=f"AntiSpam.punish(message={message.id}, member={member.id}, rule={rule_name})"
- )
-
- await self.maybe_delete_messages(relevant_messages)
- break
-
- @lock.lock_arg("antispam.punish", "member", attrgetter("id"))
- async def punish(self, msg: Message, member: Member, reason: str) -> None:
- """Punishes the given member for triggering an antispam rule."""
- if not any(role.id == self.muted_role.id for role in member.roles):
- remove_role_after = AntiSpamConfig.punishment['remove_after']
-
- # Get context and make sure the bot becomes the actor of infraction by patching the `author` attributes
- context = await self.bot.get_context(msg)
- context.author = self.bot.user
-
- # Since we're going to invoke the tempmute command directly, we need to manually call the converter.
- dt_remove_role_after = await self.expiration_date_converter.convert(context, f"{remove_role_after}S")
- await context.invoke(
- self.bot.get_command('tempmute'),
- member,
- dt_remove_role_after,
- reason=reason
- )
-
- async def maybe_delete_messages(self, messages: List[Message]) -> None:
- """Cleans the messages if cleaning is configured."""
- if AntiSpamConfig.clean_offending:
- # If we have more than one message, we can use bulk delete.
- if len(messages) > 1:
- message_ids = [message.id for message in messages]
- self.mod_log.ignore(Event.message_delete, *message_ids)
- channel_messages = defaultdict(list)
- for message in messages:
- channel_messages[message.channel].append(message)
- for channel, messages in channel_messages.items():
- try:
- await channel.delete_messages(messages)
- except NotFound:
- # In the rare case where we found messages matching the
- # spam filter across multiple channels, it is possible
- # that a single channel will only contain a single message
- # to delete. If that should be the case, discord.py will
- # use the "delete single message" endpoint instead of the
- # bulk delete endpoint, and the single message deletion
- # endpoint will complain if you give it that does not exist.
- # As this means that we have no other message to delete in
- # this channel (and message deletes work per-channel),
- # we can just log an exception and carry on with business.
- log.info(f"Tried to delete message `{messages[0].id}`, but message could not be found.")
-
- # Otherwise, the bulk delete endpoint will throw up.
- # Delete the message directly instead.
- else:
- self.mod_log.ignore(Event.message_delete, messages[0].id)
- try:
- await messages[0].delete()
- except NotFound:
- log.info(f"Tried to delete message `{messages[0].id}`, but message could not be found.")
-
- async def _process_deletion_context(self, context_id: frozenset) -> None:
- """Processes the Deletion Context queue."""
- log.trace("Sleeping before processing message deletion queue.")
- await asyncio.sleep(10)
-
- if context_id not in self.message_deletion_queue:
- log.error(f"Started processing deletion queue for context `{context_id}`, but it was not found!")
- return
-
- deletion_context = self.message_deletion_queue.pop(context_id)
- await deletion_context.upload_messages(self.bot.user.id, self.mod_log)
-
- @Cog.listener()
- async def on_message_edit(self, before: Message, after: Message) -> None:
- """Updates the message in the cache, if it's cached."""
- self.cache.update(after)
-
-
-def validate_config(rules_: Mapping = AntiSpamConfig.rules) -> Dict[str, str]:
- """Validates the antispam configs."""
- validation_errors = {}
- for name, config in rules_.items():
- if name not in RULE_FUNCTION_MAPPING:
- log.error(
- f"Unrecognized antispam rule `{name}`. "
- f"Valid rules are: {', '.join(RULE_FUNCTION_MAPPING)}"
- )
- validation_errors[name] = f"`{name}` is not recognized as an antispam rule."
- continue
- for required_key in ('interval', 'max'):
- if required_key not in config:
- log.error(
- f"`{required_key}` is required but was not "
- f"set in rule `{name}`'s configuration."
- )
- validation_errors[name] = f"Key `{required_key}` is required but not set for rule `{name}`"
- return validation_errors
-
-
-async def setup(bot: Bot) -> None:
- """Validate the AntiSpam configs and load the AntiSpam cog."""
- validation_errors = validate_config()
- await bot.add_cog(AntiSpam(bot, validation_errors))
diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py
deleted file mode 100644
index c643f9a84..000000000
--- a/bot/exts/filters/filter_lists.py
+++ /dev/null
@@ -1,297 +0,0 @@
-import re
-from typing import Optional
-
-from botcore.site_api import ResponseCodeError
-from discord import Colour, Embed
-from discord.ext.commands import BadArgument, Cog, Context, IDConverter, group, has_any_role
-
-from bot import constants
-from bot.bot import Bot
-from bot.constants import Channels
-from bot.converters import ValidDiscordServerInvite, ValidFilterListType
-from bot.log import get_logger
-from bot.pagination import LinePaginator
-
-log = get_logger(__name__)
-
-
-class FilterLists(Cog):
- """Commands for blacklisting and whitelisting things."""
-
- methods_with_filterlist_types = [
- "allow_add",
- "allow_delete",
- "allow_get",
- "deny_add",
- "deny_delete",
- "deny_get",
- ]
-
- def __init__(self, bot: Bot) -> None:
- self.bot = bot
-
- async def cog_load(self) -> None:
- """Add the valid FilterList types to the docstrings, so they'll appear in !help invocations."""
- await self.bot.wait_until_guild_available()
-
- # Add valid filterlist types to the docstrings
- valid_types = await ValidFilterListType.get_valid_types(self.bot)
- valid_types = [f"`{type_.lower()}`" for type_ in valid_types]
-
- for method_name in self.methods_with_filterlist_types:
- command = getattr(self, method_name)
- command.help = (
- f"{command.help}\n\nValid **list_type** values are {', '.join(valid_types)}."
- )
-
- async def _add_data(
- self,
- ctx: Context,
- allowed: bool,
- list_type: ValidFilterListType,
- content: str,
- comment: Optional[str] = None,
- ) -> None:
- """Add an item to a filterlist."""
- allow_type = "whitelist" if allowed else "blacklist"
-
- # If this is a guild invite, we gotta validate it.
- if list_type == "GUILD_INVITE":
- guild_data = await self._validate_guild_invite(ctx, content)
- content = guild_data.get("id")
-
- # Some guild invites are autoban filters, which require the mod
- # to set a comment which includes [autoban].
- # Having the guild name in the comment is still useful when reviewing
- # filter list, so prepend it to the set comment in case some mod forgets.
- guild_name_part = f'Guild "{guild_data["name"]}"' if "name" in guild_data else None
-
- comment = " - ".join(
- comment_part
- for comment_part in (guild_name_part, comment)
- if comment_part
- )
-
- # If it's a file format, let's make sure it has a leading dot.
- elif list_type == "FILE_FORMAT" and not content.startswith("."):
- content = f".{content}"
-
- # If it's a filter token, validate the passed regex
- elif list_type == "FILTER_TOKEN":
- try:
- re.compile(content)
- except re.error as e:
- await ctx.message.add_reaction("❌")
- await ctx.send(
- f"{ctx.author.mention} that's not a valid regex! "
- f"Regex error message: {e.msg}."
- )
- return
-
- # Try to add the item to the database
- log.trace(f"Trying to add the {content} item to the {list_type} {allow_type}")
- payload = {
- "allowed": allowed,
- "type": list_type,
- "content": content,
- "comment": comment,
- }
-
- try:
- item = await self.bot.api_client.post(
- "bot/filter-lists",
- json=payload
- )
- except ResponseCodeError as e:
- if e.status == 400:
- await ctx.message.add_reaction("❌")
- log.debug(
- f"{ctx.author} tried to add data to a {allow_type}, but the API returned 400, "
- "probably because the request violated the UniqueConstraint."
- )
- raise BadArgument(
- f"Unable to add the item to the {allow_type}. "
- "The item probably already exists. Keep in mind that a "
- "blacklist and a whitelist for the same item cannot co-exist, "
- "and we do not permit any duplicates."
- )
- raise
-
- # If it is an autoban trigger we send a warning in #mod-meta
- if comment and "[autoban]" in comment:
- await self.bot.get_channel(Channels.mod_meta).send(
- f":warning: Heads-up! The new `{list_type}` filter "
- f"`{content}` (`{comment}`) will automatically ban users."
- )
-
- # Insert the item into the cache
- self.bot.insert_item_into_filter_list_cache(item)
- await ctx.message.add_reaction("✅")
-
- async def _delete_data(self, ctx: Context, allowed: bool, list_type: ValidFilterListType, content: str) -> None:
- """Remove an item from a filterlist."""
- allow_type = "whitelist" if allowed else "blacklist"
-
- # If this is a server invite, we need to convert it.
- if list_type == "GUILD_INVITE" and not IDConverter()._get_id_match(content):
- guild_data = await self._validate_guild_invite(ctx, content)
- content = guild_data.get("id")
-
- # If it's a file format, let's make sure it has a leading dot.
- elif list_type == "FILE_FORMAT" and not content.startswith("."):
- content = f".{content}"
-
- # Find the content and delete it.
- log.trace(f"Trying to delete the {content} item from the {list_type} {allow_type}")
- item = self.bot.filter_list_cache[f"{list_type}.{allowed}"].get(content)
-
- if item is not None:
- try:
- await self.bot.api_client.delete(
- f"bot/filter-lists/{item['id']}"
- )
- del self.bot.filter_list_cache[f"{list_type}.{allowed}"][content]
- await ctx.message.add_reaction("✅")
- except ResponseCodeError as e:
- log.debug(
- f"{ctx.author} tried to delete an item with the id {item['id']}, but "
- f"the API raised an unexpected error: {e}"
- )
- await ctx.message.add_reaction("❌")
- else:
- await ctx.message.add_reaction("❌")
-
- async def _list_all_data(self, ctx: Context, allowed: bool, list_type: ValidFilterListType) -> None:
- """Paginate and display all items in a filterlist."""
- allow_type = "whitelist" if allowed else "blacklist"
- result = self.bot.filter_list_cache[f"{list_type}.{allowed}"]
-
- # Build a list of lines we want to show in the paginator
- lines = []
- for content, metadata in result.items():
- line = f"• `{content}`"
-
- if comment := metadata.get("comment"):
- line += f" - {comment}"
-
- lines.append(line)
- lines = sorted(lines)
-
- # Build the embed
- list_type_plural = list_type.lower().replace("_", " ").title() + "s"
- embed = Embed(
- title=f"{allow_type.title()}ed {list_type_plural} ({len(result)} total)",
- colour=Colour.blue()
- )
- log.trace(f"Trying to list {len(result)} items from the {list_type.lower()} {allow_type}")
-
- if result:
- await LinePaginator.paginate(lines, ctx, embed, max_lines=15, empty=False)
- else:
- embed.description = "Hmmm, seems like there's nothing here yet."
- await ctx.send(embed=embed)
- await ctx.message.add_reaction("❌")
-
- async def _sync_data(self, ctx: Context) -> None:
- """Syncs the filterlists with the API."""
- try:
- log.trace("Attempting to sync FilterList cache with data from the API.")
- await self.bot.cache_filter_list_data()
- await ctx.message.add_reaction("✅")
- except ResponseCodeError as e:
- log.debug(
- f"{ctx.author} tried to sync FilterList cache data but "
- f"the API raised an unexpected error: {e}"
- )
- await ctx.message.add_reaction("❌")
-
- @staticmethod
- async def _validate_guild_invite(ctx: Context, invite: str) -> dict:
- """
- Validates a guild invite, and returns the guild info as a dict.
-
- Will raise a BadArgument if the guild invite is invalid.
- """
- log.trace(f"Attempting to validate whether or not {invite} is a guild invite.")
- validator = ValidDiscordServerInvite()
- guild_data = await validator.convert(ctx, invite)
-
- # If we make it this far without raising a BadArgument, the invite is
- # valid. Let's return a dict of guild information.
- log.trace(f"{invite} validated as server invite. Converting to ID.")
- return guild_data
-
- @group(aliases=("allowlist", "allow", "al", "wl"))
- async def whitelist(self, ctx: Context) -> None:
- """Group for whitelisting commands."""
- if not ctx.invoked_subcommand:
- await ctx.send_help(ctx.command)
-
- @group(aliases=("denylist", "deny", "bl", "dl"))
- async def blacklist(self, ctx: Context) -> None:
- """Group for blacklisting commands."""
- if not ctx.invoked_subcommand:
- await ctx.send_help(ctx.command)
-
- @whitelist.command(name="add", aliases=("a", "set"))
- async def allow_add(
- self,
- ctx: Context,
- list_type: ValidFilterListType,
- content: str,
- *,
- comment: Optional[str] = None,
- ) -> None:
- """Add an item to the specified allowlist."""
- await self._add_data(ctx, True, list_type, content, comment)
-
- @blacklist.command(name="add", aliases=("a", "set"))
- async def deny_add(
- self,
- ctx: Context,
- list_type: ValidFilterListType,
- content: str,
- *,
- comment: Optional[str] = None,
- ) -> None:
- """Add an item to the specified denylist."""
- await self._add_data(ctx, False, list_type, content, comment)
-
- @whitelist.command(name="remove", aliases=("delete", "rm",))
- async def allow_delete(self, ctx: Context, list_type: ValidFilterListType, content: str) -> None:
- """Remove an item from the specified allowlist."""
- await self._delete_data(ctx, True, list_type, content)
-
- @blacklist.command(name="remove", aliases=("delete", "rm",))
- async def deny_delete(self, ctx: Context, list_type: ValidFilterListType, content: str) -> None:
- """Remove an item from the specified denylist."""
- await self._delete_data(ctx, False, list_type, content)
-
- @whitelist.command(name="get", aliases=("list", "ls", "fetch", "show"))
- async def allow_get(self, ctx: Context, list_type: ValidFilterListType) -> None:
- """Get the contents of a specified allowlist."""
- await self._list_all_data(ctx, True, list_type)
-
- @blacklist.command(name="get", aliases=("list", "ls", "fetch", "show"))
- async def deny_get(self, ctx: Context, list_type: ValidFilterListType) -> None:
- """Get the contents of a specified denylist."""
- await self._list_all_data(ctx, False, list_type)
-
- @whitelist.command(name="sync", aliases=("s",))
- async def allow_sync(self, ctx: Context) -> None:
- """Syncs both allowlists and denylists with the API."""
- await self._sync_data(ctx)
-
- @blacklist.command(name="sync", aliases=("s",))
- async def deny_sync(self, ctx: Context) -> None:
- """Syncs both allowlists and denylists with the API."""
- await self._sync_data(ctx)
-
- async def cog_check(self, ctx: Context) -> bool:
- """Only allow moderators to invoke the commands in this cog."""
- return await has_any_role(*constants.MODERATION_ROLES).predicate(ctx)
-
-
-async def setup(bot: Bot) -> None:
- """Load the FilterLists cog."""
- await bot.add_cog(FilterLists(bot))
diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py
deleted file mode 100644
index e4df0b1fd..000000000
--- a/bot/exts/filters/filtering.py
+++ /dev/null
@@ -1,735 +0,0 @@
-import asyncio
-import re
-import unicodedata
-import urllib.parse
-from datetime import timedelta
-from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
-
-import arrow
-import dateutil.parser
-import regex
-import tldextract
-from async_rediscache import RedisCache
-from botcore.site_api import ResponseCodeError
-from botcore.utils import scheduling
-from botcore.utils.regex import DISCORD_INVITE
-from dateutil.relativedelta import relativedelta
-from discord import ChannelType, Colour, Embed, Forbidden, HTTPException, Member, Message, NotFound, TextChannel
-from discord.ext.commands import Cog
-from discord.utils import escape_markdown
-
-from bot.bot import Bot
-from bot.constants import Bot as BotConfig, Channels, Colours, Filter, Guild, Icons, URLs
-from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME
-from bot.exts.moderation.modlog import ModLog
-from bot.log import get_logger
-from bot.utils.messages import format_user
-
-log = get_logger(__name__)
-
-# Regular expressions
-CODE_BLOCK_RE = re.compile(
- r"(?P<delim>``?)[^`]+?(?P=delim)(?!`+)" # Inline codeblock
- r"|```(.+?)```", # Multiline codeblock
- re.DOTALL | re.MULTILINE
-)
-EVERYONE_PING_RE = re.compile(rf"@everyone|<@&{Guild.id}>|@here")
-SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL)
-URL_RE = re.compile(r"(https?://[^\s]+)", flags=re.IGNORECASE)
-
-# Exclude variation selectors from zalgo because they're actually invisible.
-VARIATION_SELECTORS = r"\uFE00-\uFE0F\U000E0100-\U000E01EF"
-INVISIBLE_RE = regex.compile(rf"[{VARIATION_SELECTORS}\p{{UNASSIGNED}}\p{{FORMAT}}\p{{CONTROL}}--\s]", regex.V1)
-ZALGO_RE = regex.compile(rf"[\p{{NONSPACING MARK}}\p{{ENCLOSING MARK}}--[{VARIATION_SELECTORS}]]", regex.V1)
-
-# Other constants.
-DAYS_BETWEEN_ALERTS = 3
-OFFENSIVE_MSG_DELETE_TIME = timedelta(days=Filter.offensive_msg_delete_days)
-
-# Autoban
-LINK_PASSWORD = "https://support.discord.com/hc/en-us/articles/218410947-I-forgot-my-Password-Where-can-I-set-a-new-one"
-LINK_2FA = "https://support.discord.com/hc/en-us/articles/219576828-Setting-up-Two-Factor-Authentication"
-AUTO_BAN_REASON = (
- "Your account has been used to send links to a phishing website. You have been automatically banned. "
- "If you are not aware of sending them, that means your account has been compromised.\n\n"
-
- f"Here is a guide from Discord on [how to change your password]({LINK_PASSWORD}).\n\n"
-
- f"We also highly recommend that you [enable 2 factor authentication on your account]({LINK_2FA}), "
- "for heightened security.\n\n"
-
- "Once you have changed your password, feel free to follow the instructions at the bottom of "
- "this message to appeal your ban."
-)
-AUTO_BAN_DURATION = timedelta(days=4)
-
-FilterMatch = Union[re.Match, dict, bool, List[Embed]]
-
-
-class Stats(NamedTuple):
- """Additional stats on a triggered filter to append to a mod log."""
-
- message_content: str
- additional_embeds: Optional[List[Embed]]
-
-
-class Filtering(Cog):
- """Filtering out invites, blacklisting domains, and warning us of certain regular expressions."""
-
- # Redis cache mapping a user ID to the last timestamp a bad nickname alert was sent
- name_alerts = RedisCache()
-
- def __init__(self, bot: Bot):
- self.bot = bot
- self.scheduler = scheduling.Scheduler(self.__class__.__name__)
- self.name_lock = asyncio.Lock()
-
- staff_mistake_str = "If you believe this was a mistake, please let staff know!"
- self.filters = {
- "filter_zalgo": {
- "enabled": Filter.filter_zalgo,
- "function": self._has_zalgo,
- "type": "filter",
- "content_only": True,
- "user_notification": Filter.notify_user_zalgo,
- "notification_msg": (
- "Your post has been removed for abusing Unicode character rendering (aka Zalgo text). "
- f"{staff_mistake_str}"
- ),
- "schedule_deletion": False
- },
- "filter_invites": {
- "enabled": Filter.filter_invites,
- "function": self._has_invites,
- "type": "filter",
- "content_only": True,
- "user_notification": Filter.notify_user_invites,
- "notification_msg": (
- f"Per Rule 6, your invite link has been removed. {staff_mistake_str}\n\n"
- r"Our server rules can be found here: <https://pythondiscord.com/pages/rules>"
- ),
- "schedule_deletion": False
- },
- "filter_domains": {
- "enabled": Filter.filter_domains,
- "function": self._has_urls,
- "type": "filter",
- "content_only": True,
- "user_notification": Filter.notify_user_domains,
- "notification_msg": (
- f"Your URL has been removed because it matched a blacklisted domain. {staff_mistake_str}"
- ),
- "schedule_deletion": False
- },
- "watch_regex": {
- "enabled": Filter.watch_regex,
- "function": self._has_watch_regex_match,
- "type": "watchlist",
- "content_only": True,
- "schedule_deletion": True
- },
- "watch_rich_embeds": {
- "enabled": Filter.watch_rich_embeds,
- "function": self._has_rich_embed,
- "type": "watchlist",
- "content_only": False,
- "schedule_deletion": False
- },
- "filter_everyone_ping": {
- "enabled": Filter.filter_everyone_ping,
- "function": self._has_everyone_ping,
- "type": "filter",
- "content_only": True,
- "user_notification": Filter.notify_user_everyone_ping,
- "notification_msg": (
- "Please don't try to ping `@everyone` or `@here`. "
- f"Your message has been removed. {staff_mistake_str}"
- ),
- "schedule_deletion": False,
- "ping_everyone": False
- },
- }
-
- async def cog_unload(self) -> None:
- """Cancel scheduled tasks."""
- self.scheduler.cancel_all()
-
- def _get_filterlist_items(self, list_type: str, *, allowed: bool) -> list:
- """Fetch items from the filter_list_cache."""
- return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"].keys()
-
- def _get_filterlist_value(self, list_type: str, value: Any, *, allowed: bool) -> dict:
- """Fetch one specific value from filter_list_cache."""
- return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"][value]
-
- @staticmethod
- def _expand_spoilers(text: str) -> str:
- """Return a string containing all interpretations of a spoilered message."""
- split_text = SPOILER_RE.split(text)
- return ''.join(
- split_text[0::2] + split_text[1::2] + split_text
- )
-
- @property
- def mod_log(self) -> ModLog:
- """Get currently loaded ModLog cog instance."""
- return self.bot.get_cog("ModLog")
-
- @Cog.listener()
- async def on_message(self, msg: Message) -> None:
- """Invoke message filter for new messages."""
- await self._filter_message(msg)
-
- # Ignore webhook messages.
- if msg.webhook_id is None:
- await self.check_bad_words_in_name(msg.author)
-
- @Cog.listener()
- async def on_message_edit(self, before: Message, after: Message) -> None:
- """
- Invoke message filter for message edits.
-
- Also calculates the time delta from the previous edit or when message was sent if there's no prior edits.
- """
- # We only care about changes to the message contents/attachments and embed additions, not pin status etc.
- if all((
- before.content == after.content, # content hasn't changed
- before.attachments == after.attachments, # attachments haven't changed
- len(before.embeds) >= len(after.embeds) # embeds haven't been added
- )):
- return
-
- if not before.edited_at:
- delta = relativedelta(after.edited_at, before.created_at).microseconds
- else:
- delta = relativedelta(after.edited_at, before.edited_at).microseconds
- await self._filter_message(after, delta)
-
- @Cog.listener()
- async def on_voice_state_update(self, member: Member, *_) -> None:
- """Checks for bad words in usernames when users join, switch or leave a voice channel."""
- await self.check_bad_words_in_name(member)
-
- def get_name_match(self, name: str) -> Optional[re.Match]:
- """Check bad words from passed string (name). Return the first match found."""
- normalised_name = unicodedata.normalize("NFKC", name)
- cleaned_normalised_name = "".join([c for c in normalised_name if not unicodedata.combining(c)])
-
- # Run filters against normalised, cleaned normalised and the original name,
- # in case we have filters for one but not the other.
- names_to_check = (name, normalised_name, cleaned_normalised_name)
-
- watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False)
- for pattern in watchlist_patterns:
- for name in names_to_check:
- if match := re.search(pattern, name, flags=re.IGNORECASE):
- return match
- return None
-
- async def check_send_alert(self, member: Member) -> bool:
- """When there is less than 3 days after last alert, return `False`, otherwise `True`."""
- if last_alert := await self.name_alerts.get(member.id):
- last_alert = arrow.get(last_alert)
- if arrow.utcnow() - timedelta(days=DAYS_BETWEEN_ALERTS) < last_alert:
- log.trace(f"Last alert was too recent for {member}'s nickname.")
- return False
-
- return True
-
- async def check_bad_words_in_name(self, member: Member) -> None:
- """Send a mod alert every 3 days if a username still matches a watchlist pattern."""
- # Use lock to avoid race conditions
- async with self.name_lock:
- # Check if we recently alerted about this user first,
- # to avoid running all the filter tokens against their name again.
- if not await self.check_send_alert(member):
- return
-
- # Check whether the users display name contains any words in our blacklist
- match = self.get_name_match(member.display_name)
- if not match:
- return
-
- log.info(f"Sending bad nickname alert for '{member.display_name}' ({member.id}).")
-
- log_string = (
- f"**User:** {format_user(member)}\n"
- f"**Display Name:** {escape_markdown(member.display_name)}\n"
- f"**Bad Match:** {match.group()}"
- )
-
- await self.mod_log.send_log_message(
- content=str(member.id), # quality-of-life improvement for mobile moderators
- icon_url=Icons.token_removed,
- colour=Colours.soft_red,
- title="Username filtering alert",
- text=log_string,
- channel_id=Channels.mod_alerts,
- thumbnail=member.display_avatar.url,
- ping_everyone=True
- )
-
- # Update time when alert sent
- await self.name_alerts.set(member.id, arrow.utcnow().timestamp())
-
- async def filter_snekbox_output(self, result: str, msg: Message) -> bool:
- """
- Filter the result of a snekbox command to see if it violates any of our rules, and then respond accordingly.
-
- Also requires the original message, to check whether to filter and for mod logs.
- Returns whether a filter was triggered or not.
- """
- filter_triggered = False
- # Should we filter this message?
- if self._check_filter(msg):
- for filter_name, _filter in self.filters.items():
- # Is this specific filter enabled in the config?
- # We also do not need to worry about filters that take the full message,
- # since all we have is an arbitrary string.
- if _filter["enabled"] and _filter["content_only"]:
- filter_result = await _filter["function"](result)
- reason = None
-
- if isinstance(filter_result, tuple):
- match, reason = filter_result
- else:
- match = filter_result
-
- if match:
- # If this is a filter (not a watchlist), we set the variable so we know
- # that it has been triggered
- if _filter["type"] == "filter":
- filter_triggered = True
-
- stats = self._add_stats(filter_name, match, result)
- await self._send_log(filter_name, _filter, msg, stats, reason, is_eval=True)
-
- break # We don't want multiple filters to trigger
-
- return filter_triggered
-
- async def _filter_message(self, msg: Message, delta: Optional[int] = None) -> None:
- """Filter the input message to see if it violates any of our rules, and then respond accordingly."""
- # Should we filter this message?
- if self._check_filter(msg):
- for filter_name, _filter in self.filters.items():
- # Is this specific filter enabled in the config?
- if _filter["enabled"]:
- # Double trigger check for the embeds filter
- if filter_name == "watch_rich_embeds":
- # If the edit delta is less than 0.001 seconds, then we're probably dealing
- # with a double filter trigger.
- if delta is not None and delta < 100:
- continue
-
- if filter_name in ("filter_invites", "filter_everyone_ping"):
- # Disable invites filter in codejam team channels
- category = getattr(msg.channel, "category", None)
- if category and category.name == JAM_CATEGORY_NAME:
- continue
-
- # Does the filter only need the message content or the full message?
- if _filter["content_only"]:
- payload = msg.content
- else:
- payload = msg
-
- result = await _filter["function"](payload)
- reason = None
-
- if isinstance(result, tuple):
- match, reason = result
- else:
- match = result
-
- if match:
- is_private = msg.channel.type is ChannelType.private
-
- # If this is a filter (not a watchlist) and not in a DM, delete the message.
- if _filter["type"] == "filter" and not is_private:
- try:
- # Embeds (can?) trigger both the `on_message` and `on_message_edit`
- # event handlers, triggering filtering twice for the same message.
- #
- # If `on_message`-triggered filtering already deleted the message
- # then `on_message_edit`-triggered filtering will raise exception
- # since the message no longer exists.
- #
- # In addition, to avoid sending two notifications to the user, the
- # logs, and mod_alert, we return if the message no longer exists.
- await msg.delete()
- except NotFound:
- return
-
- # Notify the user if the filter specifies
- if _filter["user_notification"]:
- await self.notify_member(msg.author, _filter["notification_msg"], msg.channel)
-
- # If the message is classed as offensive, we store it in the site db and
- # it will be deleted after one week.
- if _filter["schedule_deletion"] and not is_private:
- delete_date = (msg.created_at + OFFENSIVE_MSG_DELETE_TIME).isoformat()
- data = {
- 'id': msg.id,
- 'channel_id': msg.channel.id,
- 'delete_date': delete_date
- }
-
- try:
- await self.bot.api_client.post('bot/offensive-messages', json=data)
- except ResponseCodeError as e:
- if e.status == 400 and "already exists" in e.response_json.get("id", [""])[0]:
- log.debug(f"Offensive message {msg.id} already exists.")
- else:
- log.error(f"Offensive message {msg.id} failed to post: {e}")
- else:
- self.schedule_msg_delete(data)
- log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}")
-
- stats = self._add_stats(filter_name, match, msg.content)
-
- # If the filter reason contains `[autoban]`, we want to auto-ban the user.
- # Also pass this to _send_log so mods are not pinged filter matches that are auto-actioned
- autoban = reason and "[autoban]" in reason.lower()
- if not autoban and filter_name == "filter_invites" and isinstance(result, dict):
- autoban = any(
- "[autoban]" in invite_info["reason"].lower()
- for invite_info in result.values()
- if invite_info.get("reason")
- )
-
- await self._send_log(filter_name, _filter, msg, stats, reason, autoban=autoban)
-
- if autoban:
- # Create a new context, with the author as is the bot, and the channel as #mod-alerts.
- # This sends the ban confirmation directly under watchlist trigger embed, to inform
- # mods that the user was auto-banned for the message.
- context = await self.bot.get_context(msg)
- context.guild = self.bot.get_guild(Guild.id)
- context.author = context.guild.get_member(self.bot.user.id)
- context.channel = self.bot.get_channel(Channels.mod_alerts)
- context.command = self.bot.get_command("tempban")
-
- await context.invoke(
- context.command,
- msg.author,
- (arrow.utcnow() + AUTO_BAN_DURATION).datetime,
- reason=AUTO_BAN_REASON
- )
-
- break # We don't want multiple filters to trigger
-
- async def _send_log(
- self,
- filter_name: str,
- _filter: Dict[str, Any],
- msg: Message,
- stats: Stats,
- reason: Optional[str] = None,
- *,
- is_eval: bool = False,
- autoban: bool = False,
- ) -> None:
- """Send a mod log for a triggered filter."""
- if msg.channel.type is ChannelType.private:
- channel_str = "via DM"
- ping_everyone = False
- else:
- channel_str = f"in {msg.channel.mention}"
- # Allow specific filters to override ping_everyone
- ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True)
-
- content = str(msg.author.id) # quality-of-life improvement for mobile moderators
-
- # If we are going to autoban, we don't want to ping and don't need the user ID
- if autoban:
- ping_everyone = False
- content = None
-
- eval_msg = f"using {BotConfig.prefix}eval " if is_eval else ""
- footer = f"Reason: {reason}" if reason else None
- message = (
- f"The {filter_name} {_filter['type']} was triggered by {format_user(msg.author)} "
- f"{channel_str} {eval_msg}with [the following message]({msg.jump_url}):\n\n"
- f"{stats.message_content}"
- )
-
- log.debug(message)
-
- # Send pretty mod log embed to mod-alerts
- await self.mod_log.send_log_message(
- content=content,
- icon_url=Icons.filtering,
- colour=Colour(Colours.soft_red),
- title=f"{_filter['type'].title()} triggered!",
- text=message,
- thumbnail=msg.author.display_avatar.url,
- channel_id=Channels.mod_alerts,
- ping_everyone=ping_everyone,
- additional_embeds=stats.additional_embeds,
- footer=footer,
- )
-
- def _add_stats(self, name: str, match: FilterMatch, content: str) -> Stats:
- """Adds relevant statistical information to the relevant filter and increments the bot's stats."""
- # Word and match stats for watch_regex
- if name == "watch_regex":
- surroundings = match.string[max(match.start() - 10, 0): match.end() + 10]
- message_content = (
- f"**Match:** '{match[0]}'\n"
- f"**Location:** '...{escape_markdown(surroundings)}...'\n"
- f"\n**Original Message:**\n{escape_markdown(content)}"
- )
- else: # Use original content
- message_content = content
-
- additional_embeds = None
-
- self.bot.stats.incr(f"filters.{name}")
-
- # The function returns True for invalid invites.
- # They have no data so additional embeds can't be created for them.
- if name == "filter_invites" and match is not True:
- additional_embeds = []
- for _, data in match.items():
- reason = f"Reason: {data['reason']} | " if data.get('reason') else ""
- embed = Embed(description=(
- f"**Members:**\n{data['members']}\n"
- f"**Active:**\n{data['active']}"
- ))
- embed.set_author(name=data["name"])
- embed.set_thumbnail(url=data["icon"])
- embed.set_footer(text=f"{reason}Guild ID: {data['id']}")
- additional_embeds.append(embed)
-
- elif name == "watch_rich_embeds":
- additional_embeds = match
-
- return Stats(message_content, additional_embeds)
-
- @staticmethod
- def _check_filter(msg: Message) -> bool:
- """Check whitelists to see if we should filter this message."""
- role_whitelisted = False
-
- if type(msg.author) is Member: # Only Member has roles, not User.
- for role in msg.author.roles:
- if role.id in Filter.role_whitelist:
- role_whitelisted = True
-
- return (
- msg.channel.id not in Filter.channel_whitelist # Channel not in whitelist
- and not role_whitelisted # Role not in whitelist
- and not msg.author.bot # Author not a bot
- )
-
- async def _has_watch_regex_match(self, text: str) -> Tuple[Union[bool, re.Match], Optional[str]]:
- """
- Return True if `text` matches any regex from `word_watchlist` or `token_watchlist` configs.
-
- `word_watchlist`'s patterns are placed between word boundaries while `token_watchlist` is
- matched as-is. Spoilers are expanded, if any, and URLs are ignored.
- Second return value is a reason written to database about blacklist entry (can be None).
- """
- if SPOILER_RE.search(text):
- text = self._expand_spoilers(text)
-
- text = self.clean_input(text)
-
- watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False)
- for pattern in watchlist_patterns:
- match = re.search(pattern, text, flags=re.IGNORECASE)
- if match:
- return match, self._get_filterlist_value('filter_token', pattern, allowed=False)['comment']
-
- return False, None
-
- async def _has_urls(self, text: str) -> Tuple[bool, Optional[str]]:
- """
- Returns True if the text contains one of the blacklisted URLs from the config file.
-
- Second return value is a reason of URL blacklisting (can be None).
- """
- text = self.clean_input(text)
-
- domain_blacklist = self._get_filterlist_items("domain_name", allowed=False)
- for match in URL_RE.finditer(text):
- for url in domain_blacklist:
- if url.lower() in match.group(1).lower():
- blacklisted_parsed = tldextract.extract(url.lower())
- url_parsed = tldextract.extract(match.group(1).lower())
- if blacklisted_parsed.registered_domain == url_parsed.registered_domain:
- return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"]
- return False, None
-
- @staticmethod
- async def _has_zalgo(text: str) -> bool:
- """
- Returns True if the text contains zalgo characters.
-
- Zalgo range is \u0300 – \u036F and \u0489.
- """
- return bool(ZALGO_RE.search(text))
-
- async def _has_invites(self, text: str) -> Union[dict, bool]:
- """
- Checks if there's any invites in the text content that aren't in the guild whitelist.
-
- If any are detected, a dictionary of invite data is returned, with a key per invite.
- If none are detected, False is returned.
- If we are unable to process an invite, True is returned.
-
- Attempts to catch some of common ways to try to cheat the system.
- """
- text = self.clean_input(text)
-
- # Remove backslashes to prevent escape character aroundfuckery like
- # discord\.gg/gdudes-pony-farm
- text = text.replace("\\", "")
-
- invites = [m.group("invite") for m in DISCORD_INVITE.finditer(text)]
- invite_data = dict()
- for invite in invites:
- invite = urllib.parse.quote_plus(invite.rstrip("/"))
- if invite in invite_data:
- continue
-
- response = await self.bot.http_session.get(
- f"{URLs.discord_invite_api}/{invite}", params={"with_counts": "true"}
- )
- response = await response.json()
- guild = response.get("guild")
- if guild is None:
- # Lack of a "guild" key in the JSON response indicates either an group DM invite, an
- # expired invite, or an invalid invite. The API does not currently differentiate
- # between invalid and expired invites
- return True
-
- guild_id = guild.get("id")
- guild_invite_whitelist = self._get_filterlist_items("guild_invite", allowed=True)
- guild_invite_blacklist = self._get_filterlist_items("guild_invite", allowed=False)
-
- # Is this invite allowed?
- guild_partnered_or_verified = (
- 'PARTNERED' in guild.get("features", [])
- or 'VERIFIED' in guild.get("features", [])
- )
- invite_not_allowed = (
- guild_id in guild_invite_blacklist # Blacklisted guilds are never permitted.
- or guild_id not in guild_invite_whitelist # Whitelisted guilds are always permitted.
- and not guild_partnered_or_verified # Otherwise guilds have to be Verified or Partnered.
- )
-
- if invite_not_allowed:
- reason = None
- if guild_id in guild_invite_blacklist:
- reason = self._get_filterlist_value("guild_invite", guild_id, allowed=False)["comment"]
-
- guild_icon_hash = guild["icon"]
- guild_icon = (
- "https://cdn.discordapp.com/icons/"
- f"{guild_id}/{guild_icon_hash}.png?size=512"
- )
-
- invite_data[invite] = {
- "name": guild["name"],
- "id": guild['id'],
- "icon": guild_icon,
- "members": response["approximate_member_count"],
- "active": response["approximate_presence_count"],
- "reason": reason
- }
-
- return invite_data if invite_data else False
-
- @staticmethod
- async def _has_rich_embed(msg: Message) -> Union[bool, List[Embed]]:
- """Determines if `msg` contains any rich embeds not auto-generated from a URL."""
- if msg.embeds:
- for embed in msg.embeds:
- if embed.type == "rich":
- urls = URL_RE.findall(msg.content)
- if not embed.url or embed.url not in urls:
- # If `embed.url` does not exist or if `embed.url` is not part of the content
- # of the message, it's unlikely to be an auto-generated embed by Discord.
- return msg.embeds
- else:
- log.trace(
- "Found a rich embed sent by a regular user account, "
- "but it was likely just an automatic URL embed."
- )
- return False
- return False
-
- @staticmethod
- async def _has_everyone_ping(text: str) -> bool:
- """Determines if `msg` contains an @everyone or @here ping outside of a codeblock."""
- # First pass to avoid running re.sub on every message
- if not EVERYONE_PING_RE.search(text):
- return False
-
- content_without_codeblocks = CODE_BLOCK_RE.sub("", text)
- return bool(EVERYONE_PING_RE.search(content_without_codeblocks))
-
- async def notify_member(self, filtered_member: Member, reason: str, channel: TextChannel) -> None:
- """
- Notify filtered_member about a moderation action with the reason str.
-
- First attempts to DM the user, fall back to in-channel notification if user has DMs disabled
- """
- try:
- await filtered_member.send(reason)
- except Forbidden:
- await channel.send(f"{filtered_member.mention} {reason}")
-
- def schedule_msg_delete(self, msg: dict) -> None:
- """Delete an offensive message once its deletion date is reached."""
- delete_at = dateutil.parser.isoparse(msg['delete_date'])
- self.scheduler.schedule_at(delete_at, msg['id'], self.delete_offensive_msg(msg))
-
- async def cog_load(self) -> None:
- """Get all the pending message deletion from the API and reschedule them."""
- await self.bot.wait_until_ready()
- response = await self.bot.api_client.get('bot/offensive-messages',)
-
- now = arrow.utcnow()
-
- for msg in response:
- delete_at = dateutil.parser.isoparse(msg['delete_date'])
-
- if delete_at < now:
- await self.delete_offensive_msg(msg)
- else:
- self.schedule_msg_delete(msg)
-
- async def delete_offensive_msg(self, msg: Mapping[str, int]) -> None:
- """Delete an offensive message, and then delete it from the db."""
- try:
- channel = self.bot.get_channel(msg['channel_id'])
- if channel:
- msg_obj = await channel.fetch_message(msg['id'])
- await msg_obj.delete()
- except NotFound:
- log.info(
- f"Tried to delete message {msg['id']}, but the message can't be found "
- f"(it has been probably already deleted)."
- )
- except HTTPException as e:
- log.warning(f"Failed to delete message {msg['id']}: status {e.status}")
-
- await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}')
- log.info(f"Deleted the offensive message with id {msg['id']}.")
-
- @staticmethod
- def clean_input(string: str) -> str:
- """Remove zalgo and invisible characters from `string`."""
- # For future consideration: remove characters in the Mc, Sk, and Lm categories too.
- # Can be normalised with form C to merge char + combining char into a single char to avoid
- # removing legit diacritics, but this would open up a way to bypass filters.
- no_zalgo = ZALGO_RE.sub("", string)
- return INVISIBLE_RE.sub("", no_zalgo)
-
-
-async def setup(bot: Bot) -> None:
- """Load the Filtering cog."""
- await bot.add_cog(Filtering(bot))
diff --git a/bot/exts/filters/security.py b/bot/exts/filters/security.py
deleted file mode 100644
index 27e4d9752..000000000
--- a/bot/exts/filters/security.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from discord.ext.commands import Cog, Context, NoPrivateMessage
-
-from bot.bot import Bot
-from bot.log import get_logger
-
-log = get_logger(__name__)
-
-
-class Security(Cog):
- """Security-related helpers."""
-
- def __init__(self, bot: Bot):
- self.bot = bot
- self.bot.check(self.check_not_bot) # Global commands check - no bots can run any commands at all
- self.bot.check(self.check_on_guild) # Global commands check - commands can't be run in a DM
-
- def check_not_bot(self, ctx: Context) -> bool:
- """Check if the context is a bot user."""
- return not ctx.author.bot
-
- def check_on_guild(self, ctx: Context) -> bool:
- """Check if the context is in a guild."""
- if ctx.guild is None:
- raise NoPrivateMessage("This command cannot be used in private messages.")
- return True
-
-
-async def setup(bot: Bot) -> None:
- """Load the Security cog."""
- await bot.add_cog(Security(bot))
diff --git a/bot/exts/filters/token_remover.py b/bot/exts/filters/token_remover.py
deleted file mode 100644
index a0d5aa7b6..000000000
--- a/bot/exts/filters/token_remover.py
+++ /dev/null
@@ -1,233 +0,0 @@
-import base64
-import re
-import typing as t
-
-from discord import Colour, Message, NotFound
-from discord.ext.commands import Cog
-
-from bot import utils
-from bot.bot import Bot
-from bot.constants import Channels, Colours, Event, Icons
-from bot.exts.moderation.modlog import ModLog
-from bot.log import get_logger
-from bot.utils.members import get_or_fetch_member
-from bot.utils.messages import format_user
-
-log = get_logger(__name__)
-
-LOG_MESSAGE = (
- "Censored a seemingly valid token sent by {author} in {channel}, "
- "token was `{user_id}.{timestamp}.{hmac}`"
-)
-UNKNOWN_USER_LOG_MESSAGE = "Decoded user ID: `{user_id}` (Not present in server)."
-KNOWN_USER_LOG_MESSAGE = (
- "Decoded user ID: `{user_id}` **(Present in server)**.\n"
- "This matches `{user_name}` and means this is likely a valid **{kind}** token."
-)
-DELETION_MESSAGE_TEMPLATE = (
- "Hey {mention}! I noticed you posted a seemingly valid Discord API "
- "token in your message and have removed your message. "
- "This means that your token has been **compromised**. "
- "Please change your token **immediately** at: "
- "<https://discordapp.com/developers/applications/me>\n\n"
- "Feel free to re-post it with the token removed. "
- "If you believe this was a mistake, please let us know!"
-)
-DISCORD_EPOCH = 1_420_070_400
-TOKEN_EPOCH = 1_293_840_000
-
-# Three parts delimited by dots: user ID, creation timestamp, HMAC.
-# The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string.
-# Each part only matches base64 URL-safe characters.
-# Padding has never been observed, but the padding character '=' is matched just in case.
-TOKEN_RE = re.compile(r"([\w\-=]+)\.([\w\-=]+)\.([\w\-=]+)", re.ASCII)
-
-
-class Token(t.NamedTuple):
- """A Discord Bot token."""
-
- user_id: str
- timestamp: str
- hmac: str
-
-
-class TokenRemover(Cog):
- """Scans messages for potential discord.py bot tokens and removes them."""
-
- def __init__(self, bot: Bot):
- self.bot = bot
-
- @property
- def mod_log(self) -> ModLog:
- """Get currently loaded ModLog cog instance."""
- return self.bot.get_cog("ModLog")
-
- @Cog.listener()
- async def on_message(self, msg: Message) -> None:
- """
- Check each message for a string that matches Discord's token pattern.
-
- See: https://discordapp.com/developers/docs/reference#snowflakes
- """
- # Ignore DMs; can't delete messages in there anyway.
- if not msg.guild or msg.author.bot:
- return
-
- found_token = self.find_token_in_message(msg)
- if found_token:
- await self.take_action(msg, found_token)
-
- @Cog.listener()
- async def on_message_edit(self, before: Message, after: Message) -> None:
- """
- Check each edit for a string that matches Discord's token pattern.
-
- See: https://discordapp.com/developers/docs/reference#snowflakes
- """
- await self.on_message(after)
-
- async def take_action(self, msg: Message, found_token: Token) -> None:
- """Remove the `msg` containing the `found_token` and send a mod log message."""
- self.mod_log.ignore(Event.message_delete, msg.id)
-
- try:
- await msg.delete()
- except NotFound:
- log.debug(f"Failed to remove token in message {msg.id}: message already deleted.")
- return
-
- await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention))
-
- log_message = self.format_log_message(msg, found_token)
- userid_message, mention_everyone = await self.format_userid_log_message(msg, found_token)
- log.debug(log_message)
-
- # Send pretty mod log embed to mod-alerts
- await self.mod_log.send_log_message(
- icon_url=Icons.token_removed,
- colour=Colour(Colours.soft_red),
- title="Token removed!",
- text=log_message + "\n" + userid_message,
- thumbnail=msg.author.display_avatar.url,
- channel_id=Channels.mod_alerts,
- ping_everyone=mention_everyone,
- )
-
- self.bot.stats.incr("tokens.removed_tokens")
-
- @classmethod
- async def format_userid_log_message(cls, msg: Message, token: Token) -> t.Tuple[str, bool]:
- """
- Format the portion of the log message that includes details about the detected user ID.
-
- If the user is resolved to a member, the format includes the user ID, name, and the
- kind of user detected.
-
- If we resolve to a member and it is not a bot, we also return True to ping everyone.
-
- Returns a tuple of (log_message, mention_everyone)
- """
- user_id = cls.extract_user_id(token.user_id)
- user = await get_or_fetch_member(msg.guild, user_id)
-
- if user:
- return KNOWN_USER_LOG_MESSAGE.format(
- user_id=user_id,
- user_name=str(user),
- kind="BOT" if user.bot else "USER",
- ), True
- else:
- return UNKNOWN_USER_LOG_MESSAGE.format(user_id=user_id), False
-
- @staticmethod
- def format_log_message(msg: Message, token: Token) -> str:
- """Return the generic portion of the log message to send for `token` being censored in `msg`."""
- return LOG_MESSAGE.format(
- author=format_user(msg.author),
- channel=msg.channel.mention,
- user_id=token.user_id,
- timestamp=token.timestamp,
- hmac='x' * (len(token.hmac) - 3) + token.hmac[-3:],
- )
-
- @classmethod
- def find_token_in_message(cls, msg: Message) -> t.Optional[Token]:
- """Return a seemingly valid token found in `msg` or `None` if no token is found."""
- # Use finditer rather than search to guard against method calls prematurely returning the
- # token check (e.g. `message.channel.send` also matches our token pattern)
- for match in TOKEN_RE.finditer(msg.content):
- token = Token(*match.groups())
- if (
- (cls.extract_user_id(token.user_id) is not None)
- and cls.is_valid_timestamp(token.timestamp)
- and cls.is_maybe_valid_hmac(token.hmac)
- ):
- # Short-circuit on first match
- return token
-
- # No matching substring
- return
-
- @staticmethod
- def extract_user_id(b64_content: str) -> t.Optional[int]:
- """Return a user ID integer from part of a potential token, or None if it couldn't be decoded."""
- b64_content = utils.pad_base64(b64_content)
-
- try:
- decoded_bytes = base64.urlsafe_b64decode(b64_content)
- string = decoded_bytes.decode('utf-8')
- if not (string.isascii() and string.isdigit()):
- # This case triggers if there are fancy unicode digits in the base64 encoding,
- # that means it's not a valid user id.
- return None
- return int(string)
- except ValueError:
- return None
-
- @staticmethod
- def is_valid_timestamp(b64_content: str) -> bool:
- """
- Return True if `b64_content` decodes to a valid timestamp.
-
- If the timestamp is greater than the Discord epoch, it's probably valid.
- See: https://i.imgur.com/7WdehGn.png
- """
- b64_content = utils.pad_base64(b64_content)
-
- try:
- decoded_bytes = base64.urlsafe_b64decode(b64_content)
- timestamp = int.from_bytes(decoded_bytes, byteorder="big")
- except ValueError as e:
- log.debug(f"Failed to decode token timestamp '{b64_content}': {e}")
- return False
-
- # Seems like newer tokens don't need the epoch added, but add anyway since an upper bound
- # is not checked.
- if timestamp + TOKEN_EPOCH >= DISCORD_EPOCH:
- return True
- else:
- log.debug(f"Invalid token timestamp '{b64_content}': smaller than Discord epoch")
- return False
-
- @staticmethod
- def is_maybe_valid_hmac(b64_content: str) -> bool:
- """
- Determine if a given HMAC portion of a token is potentially valid.
-
- If the HMAC has 3 or less characters, it's probably a dummy value like "xxxxxxxxxx",
- and thus the token can probably be skipped.
- """
- unique = len(set(b64_content.lower()))
- if unique <= 3:
- log.debug(
- f"Considering the HMAC {b64_content} a dummy because it has {unique}"
- " case-insensitively unique characters"
- )
- return False
- else:
- return True
-
-
-async def setup(bot: Bot) -> None:
- """Load the TokenRemover cog."""
- await bot.add_cog(TokenRemover(bot))
diff --git a/bot/exts/filters/webhook_remover.py b/bot/exts/filters/webhook_remover.py
deleted file mode 100644
index b42613804..000000000
--- a/bot/exts/filters/webhook_remover.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import re
-
-from discord import Colour, Message, NotFound
-from discord.ext.commands import Cog
-
-from bot.bot import Bot
-from bot.constants import Channels, Colours, Event, Icons
-from bot.exts.moderation.modlog import ModLog
-from bot.log import get_logger
-from bot.utils.messages import format_user
-
-WEBHOOK_URL_RE = re.compile(
- r"((?:https?:\/\/)?(?:ptb\.|canary\.)?discord(?:app)?\.com\/api\/webhooks\/\d+\/)\S+\/?",
- re.IGNORECASE
-)
-
-ALERT_MESSAGE_TEMPLATE = (
- "{user}, looks like you posted a Discord webhook URL. Therefore, your "
- "message has been removed, and your webhook has been deleted. "
- "You can re-create it if you wish to. If you believe this was a "
- "mistake, please let us know."
-)
-
-log = get_logger(__name__)
-
-
-class WebhookRemover(Cog):
- """Scan messages to detect Discord webhooks links."""
-
- def __init__(self, bot: Bot):
- self.bot = bot
-
- @property
- def mod_log(self) -> ModLog:
- """Get current instance of `ModLog`."""
- return self.bot.get_cog("ModLog")
-
- async def delete_and_respond(self, msg: Message, redacted_url: str, *, webhook_deleted: bool) -> None:
- """Delete `msg` and send a warning that it contained the Discord webhook `redacted_url`."""
- # Don't log this, due internal delete, not by user. Will make different entry.
- self.mod_log.ignore(Event.message_delete, msg.id)
-
- try:
- await msg.delete()
- except NotFound:
- log.debug(f"Failed to remove webhook in message {msg.id}: message already deleted.")
- return
-
- await msg.channel.send(ALERT_MESSAGE_TEMPLATE.format(user=msg.author.mention))
- if webhook_deleted:
- delete_state = "The webhook was successfully deleted."
- else:
- delete_state = "There was an error when deleting the webhook, it might have already been removed."
- message = (
- f"{format_user(msg.author)} posted a Discord webhook URL to {msg.channel.mention}. {delete_state} "
- f"Webhook URL was `{redacted_url}`"
- )
- log.debug(message)
-
- # Send entry to moderation alerts.
- await self.mod_log.send_log_message(
- icon_url=Icons.token_removed,
- colour=Colour(Colours.soft_red),
- title="Discord webhook URL removed!",
- text=message,
- thumbnail=msg.author.display_avatar.url,
- channel_id=Channels.mod_alerts
- )
-
- self.bot.stats.incr("tokens.removed_webhooks")
-
- @Cog.listener()
- async def on_message(self, msg: Message) -> None:
- """Check if a Discord webhook URL is in `message`."""
- # Ignore DMs; can't delete messages in there anyway.
- if not msg.guild or msg.author.bot:
- return
-
- matches = WEBHOOK_URL_RE.search(msg.content)
- if matches:
- async with self.bot.http_session.delete(matches[0]) as resp:
- # The Discord API Returns a 204 NO CONTENT response on success.
- deleted_successfully = resp.status == 204
- await self.delete_and_respond(msg, matches[1] + "xxx", webhook_deleted=deleted_successfully)
-
- @Cog.listener()
- async def on_message_edit(self, before: Message, after: Message) -> None:
- """Check if a Discord webhook URL is in the edited message `after`."""
- await self.on_message(after)
-
-
-async def setup(bot: Bot) -> None:
- """Load `WebhookRemover` cog."""
- await bot.add_cog(WebhookRemover(bot))
diff --git a/bot/exts/info/codeblock/_cog.py b/bot/exts/info/codeblock/_cog.py
index 9027105d9..cc5862131 100644
--- a/bot/exts/info/codeblock/_cog.py
+++ b/bot/exts/info/codeblock/_cog.py
@@ -8,8 +8,6 @@ from discord.ext.commands import Cog
from bot import constants
from bot.bot import Bot
-from bot.exts.filters.token_remover import TokenRemover
-from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE
from bot.exts.info.codeblock._instructions import get_instructions
from bot.log import get_logger
from bot.utils import has_lines
@@ -135,8 +133,6 @@ class CodeBlockCog(Cog, name="Code Block"):
not message.author.bot
and self.is_valid_channel(message.channel)
and has_lines(message.content, constants.CodeBlock.minimum_lines)
- and not TokenRemover.find_token_in_message(message)
- and not WEBHOOK_URL_RE.search(message.content)
)
@Cog.listener()
diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py
index 60b4428b7..999f9ba7f 100644
--- a/bot/exts/moderation/infraction/infractions.py
+++ b/bot/exts/moderation/infraction/infractions.py
@@ -1,5 +1,6 @@
import textwrap
import typing as t
+from datetime import timedelta
import arrow
import discord
@@ -12,7 +13,6 @@ from bot.bot import Bot
from bot.constants import Event
from bot.converters import Age, Duration, DurationOrExpiry, MemberOrUser, UnambiguousMemberOrUser
from bot.decorators import ensure_future_timestamp, respect_role_hierarchy
-from bot.exts.filters.filtering import AUTO_BAN_DURATION, AUTO_BAN_REASON
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction._scheduler import InfractionScheduler
from bot.log import get_logger
@@ -27,6 +27,24 @@ if t.TYPE_CHECKING:
from bot.exts.moderation.watchchannels.bigbrother import BigBrother
+# Comp ban
+LINK_PASSWORD = "https://support.discord.com/hc/en-us/articles/218410947-I-forgot-my-Password-Where-can-I-set-a-new-one"
+LINK_2FA = "https://support.discord.com/hc/en-us/articles/219576828-Setting-up-Two-Factor-Authentication"
+COMP_BAN_REASON = (
+ "Your account has been used to send links to a phishing website. You have been automatically banned. "
+ "If you are not aware of sending them, that means your account has been compromised.\n\n"
+
+ f"Here is a guide from Discord on [how to change your password]({LINK_PASSWORD}).\n\n"
+
+ f"We also highly recommend that you [enable 2 factor authentication on your account]({LINK_2FA}), "
+ "for heightened security.\n\n"
+
+ "Once you have changed your password, feel free to follow the instructions at the bottom of "
+ "this message to appeal your ban."
+)
+COMP_BAN_DURATION = timedelta(days=4)
+
+
class Infractions(InfractionScheduler, commands.Cog):
"""Apply and pardon infractions on users for moderation purposes."""
@@ -157,7 +175,7 @@ class Infractions(InfractionScheduler, commands.Cog):
@command()
async def compban(self, ctx: Context, user: UnambiguousMemberOrUser) -> None:
"""Same as cleanban, but specifically with the ban reason and duration used for compromised accounts."""
- await self.cleanban(ctx, user, duration=(arrow.utcnow() + AUTO_BAN_DURATION).datetime, reason=AUTO_BAN_REASON)
+ await self.cleanban(ctx, user, duration=(arrow.utcnow() + COMP_BAN_DURATION).datetime, reason=COMP_BAN_REASON)
@command(aliases=("vban",))
async def voiceban(self, ctx: Context) -> None:
diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py
index 46f9c296e..30b1f342b 100644
--- a/bot/exts/moderation/watchchannels/_watchchannel.py
+++ b/bot/exts/moderation/watchchannels/_watchchannel.py
@@ -14,8 +14,6 @@ from discord.ext.commands import Cog, Context
from bot.bot import Bot
from bot.constants import BigBrother as BigBrotherConfig, Guild as GuildConfig, Icons
-from bot.exts.filters.token_remover import TokenRemover
-from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE
from bot.exts.moderation.modlog import ModLog
from bot.log import CustomLogger, get_logger
from bot.pagination import LinePaginator
@@ -235,9 +233,7 @@ class WatchChannel(metaclass=CogABCMeta):
await self.send_header(msg)
- if TokenRemover.find_token_in_message(msg) or WEBHOOK_URL_RE.search(msg.content):
- cleaned_content = "Content is censored because it contains a bot or webhook token."
- elif cleaned_content := msg.clean_content:
+ if cleaned_content := msg.clean_content:
# Put all non-media URLs in a code block to prevent embeds
media_urls = {embed.url for embed in msg.embeds if embed.type in ("image", "video")}
for url in URL_RE.findall(cleaned_content):
diff --git a/bot/rules/__init__.py b/bot/rules/__init__.py
deleted file mode 100644
index a01ceae73..000000000
--- a/bot/rules/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# flake8: noqa
-
-from .attachments import apply as apply_attachments
-from .burst import apply as apply_burst
-from .burst_shared import apply as apply_burst_shared
-from .chars import apply as apply_chars
-from .discord_emojis import apply as apply_discord_emojis
-from .duplicates import apply as apply_duplicates
-from .links import apply as apply_links
-from .mentions import apply as apply_mentions
-from .newlines import apply as apply_newlines
-from .role_mentions import apply as apply_role_mentions
diff --git a/bot/rules/attachments.py b/bot/rules/attachments.py
deleted file mode 100644
index 8903c385c..000000000
--- a/bot/rules/attachments.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects total attachments exceeding the limit sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if (
- msg.author == last_message.author
- and len(msg.attachments) > 0
- )
- )
- total_recent_attachments = sum(len(msg.attachments) for msg in relevant_messages)
-
- if total_recent_attachments > config['max']:
- return (
- f"sent {total_recent_attachments} attachments in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- return None
diff --git a/bot/rules/burst.py b/bot/rules/burst.py
deleted file mode 100644
index 25c5a2f33..000000000
--- a/bot/rules/burst.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects repeated messages sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if msg.author == last_message.author
- )
- total_relevant = len(relevant_messages)
-
- if total_relevant > config['max']:
- return (
- f"sent {total_relevant} messages in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- return None
diff --git a/bot/rules/burst_shared.py b/bot/rules/burst_shared.py
deleted file mode 100644
index bbe9271b3..000000000
--- a/bot/rules/burst_shared.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects repeated messages sent by multiple users."""
- total_recent = len(recent_messages)
-
- if total_recent > config['max']:
- return (
- f"sent {total_recent} messages in {config['interval']}s",
- set(msg.author for msg in recent_messages),
- recent_messages
- )
- return None
diff --git a/bot/rules/chars.py b/bot/rules/chars.py
deleted file mode 100644
index 1f587422c..000000000
--- a/bot/rules/chars.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects total message char count exceeding the limit sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if msg.author == last_message.author
- )
-
- total_recent_chars = sum(len(msg.content) for msg in relevant_messages)
-
- if total_recent_chars > config['max']:
- return (
- f"sent {total_recent_chars} characters in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- return None
diff --git a/bot/rules/discord_emojis.py b/bot/rules/discord_emojis.py
deleted file mode 100644
index d979ac5e7..000000000
--- a/bot/rules/discord_emojis.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import re
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-from emoji import demojize
-
-DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>|:\w+:")
-CODE_BLOCK_RE = re.compile(r"```.*?```", flags=re.DOTALL)
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects total Discord emojis exceeding the limit sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if msg.author == last_message.author
- )
-
- # Get rid of code blocks in the message before searching for emojis.
- # Convert Unicode emojis to :emoji: format to get their count.
- total_emojis = sum(
- len(DISCORD_EMOJI_RE.findall(demojize(CODE_BLOCK_RE.sub("", msg.content))))
- for msg in relevant_messages
- )
-
- if total_emojis > config['max']:
- return (
- f"sent {total_emojis} emojis in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- return None
diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py
deleted file mode 100644
index 8e4fbc12d..000000000
--- a/bot/rules/duplicates.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects duplicated messages sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if (
- msg.author == last_message.author
- and msg.content == last_message.content
- and msg.content
- )
- )
-
- total_duplicated = len(relevant_messages)
-
- if total_duplicated > config['max']:
- return (
- f"sent {total_duplicated} duplicated messages in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- return None
diff --git a/bot/rules/links.py b/bot/rules/links.py
deleted file mode 100644
index c46b783c5..000000000
--- a/bot/rules/links.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import re
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-LINK_RE = re.compile(r"(https?://[^\s]+)")
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects total links exceeding the limit sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if msg.author == last_message.author
- )
- total_links = 0
- messages_with_links = 0
-
- for msg in relevant_messages:
- total_matches = len(LINK_RE.findall(msg.content))
- if total_matches:
- messages_with_links += 1
- total_links += total_matches
-
- # Only apply the filter if we found more than one message with
- # links to prevent wrongfully firing the rule on users posting
- # e.g. an installation log of pip packages from GitHub.
- if total_links > config['max'] and messages_with_links > 1:
- return (
- f"sent {total_links} links in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- return None
diff --git a/bot/rules/newlines.py b/bot/rules/newlines.py
deleted file mode 100644
index 4e66e1359..000000000
--- a/bot/rules/newlines.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import re
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects total newlines exceeding the set limit sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if msg.author == last_message.author
- )
-
- # Identify groups of newline characters and get group & total counts
- exp = r"(\n+)"
- newline_counts = []
- for msg in relevant_messages:
- newline_counts += [len(group) for group in re.findall(exp, msg.content)]
- total_recent_newlines = sum(newline_counts)
-
- # Get maximum newline group size
- if newline_counts:
- max_newline_group = max(newline_counts)
- else:
- # If no newlines are found, newline_counts will be an empty list, which will error out max()
- max_newline_group = 0
-
- # Check first for total newlines, if this passes then check for large groupings
- if total_recent_newlines > config['max']:
- return (
- f"sent {total_recent_newlines} newlines in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- elif max_newline_group > config['max_consecutive']:
- return (
- f"sent {max_newline_group} consecutive newlines in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
-
- return None
diff --git a/bot/rules/role_mentions.py b/bot/rules/role_mentions.py
deleted file mode 100644
index 0649540b6..000000000
--- a/bot/rules/role_mentions.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from typing import Dict, Iterable, List, Optional, Tuple
-
-from discord import Member, Message
-
-
-async def apply(
- last_message: Message, recent_messages: List[Message], config: Dict[str, int]
-) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects total role mentions exceeding the limit sent by a single user."""
- relevant_messages = tuple(
- msg
- for msg in recent_messages
- if msg.author == last_message.author
- )
-
- total_recent_mentions = sum(len(msg.role_mentions) for msg in relevant_messages)
-
- if total_recent_mentions > config['max']:
- return (
- f"sent {total_recent_mentions} role mentions in {config['interval']}s",
- (last_message.author,),
- relevant_messages
- )
- return None
diff --git a/bot/utils/messages.py b/bot/utils/messages.py
index a5ed84351..63929cd0b 100644
--- a/bot/utils/messages.py
+++ b/bot/utils/messages.py
@@ -238,3 +238,12 @@ async def send_denial(ctx: Context, reason: str) -> discord.Message:
def format_user(user: discord.abc.User) -> str:
"""Return a string for `user` which has their mention and ID."""
return f"{user.mention} (`{user.id}`)"
+
+
+def format_channel(channel: discord.abc.Messageable) -> str:
+ """Return a string for `channel` with its mention, ID, and the parent channel if it is a thread."""
+ formatted = f"{channel.mention} ({channel.category}/#{channel}"
+ if hasattr(channel, "parent"):
+ formatted += f"/{channel.parent}"
+ formatted += ")"
+ return formatted
diff --git a/config-default.yml b/config-default.yml
index a12b680e1..fc334fa7a 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -317,6 +317,7 @@ guild:
incidents: 816650601844572212
incidents_archive: 720671599790915702
python_news: &PYNEWS_WEBHOOK 704381182279942324
+ filters: 926442964463521843
filter:
diff --git a/poetry.lock b/poetry.lock
index 1191549fc..ca8c88575 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -133,7 +133,7 @@ type = "url"
url = "https://github.com/python-discord/bot-core/archive/refs/tags/v8.0.0.zip"
[[package]]
name = "certifi"
-version = "2022.6.15"
+version = "2022.9.14"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
@@ -160,7 +160,7 @@ python-versions = ">=3.6.1"
[[package]]
name = "charset-normalizer"
-version = "2.1.0"
+version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
@@ -256,7 +256,7 @@ url = "https://github.com/Rapptz/discord.py/archive/0eb3d26343969a25ffc43ba72eca
[[package]]
name = "distlib"
-version = "0.3.5"
+version = "0.3.6"
description = "Distribution utilities"
category = "dev"
optional = false
@@ -315,15 +315,15 @@ sgmllib3k = "*"
[[package]]
name = "filelock"
-version = "3.7.1"
+version = "3.8.0"
description = "A platform independent file lock."
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
-testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
+docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"]
+testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"]
[[package]]
name = "flake8"
@@ -446,7 +446,7 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
[[package]]
name = "identify"
-version = "2.5.3"
+version = "2.5.5"
description = "File identification library for Python"
category = "dev"
optional = false
@@ -457,7 +457,7 @@ license = ["ukkonen"]
[[package]]
name = "idna"
-version = "3.3"
+version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
@@ -487,7 +487,7 @@ plugins = ["setuptools"]
[[package]]
name = "jarowinkler"
-version = "1.2.0"
+version = "1.2.1"
description = "library for fast approximate string matching using Jaro and Jaro-Winkler similarity"
category = "main"
optional = false
@@ -635,8 +635,8 @@ optional = false
python-versions = ">=3.6"
[package.extras]
-testing = ["pytest-benchmark", "pytest"]
-dev = ["tox", "pre-commit"]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
@@ -656,7 +656,7 @@ virtualenv = ">=20.0.8"
[[package]]
name = "psutil"
-version = "5.9.1"
+version = "5.9.2"
description = "Cross-platform lib for process and system monitoring in Python."
category = "dev"
optional = false
@@ -683,7 +683,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycares"
-version = "4.2.1"
+version = "4.2.2"
description = "Python interface for c-ares"
category = "main"
optional = false
@@ -712,6 +712,21 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
+name = "pydantic"
+version = "1.10.2"
+description = "Data validation and settings management using python type hints"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+typing-extensions = ">=4.1.0"
+
+[package.extras]
+dotenv = ["python-dotenv (>=0.10.4)"]
+email = ["email-validator (>=1.0.3)"]
+
+[[package]]
name = "pydocstyle"
version = "6.1.1"
description = "Python docstring style checker"
@@ -786,7 +801,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
-testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"]
+testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-forked"
@@ -852,8 +867,8 @@ python-versions = "*"
PyYAML = "*"
[package.extras]
-test = ["pyaml", "toml", "pytest"]
docs = ["sphinx"]
+test = ["pytest", "toml", "pyaml"]
[[package]]
name = "pyyaml"
@@ -1057,8 +1072,16 @@ optional = false
python-versions = ">=3.7"
[[package]]
+name = "typing-extensions"
+version = "4.3.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
name = "urllib3"
-version = "1.26.11"
+version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
@@ -1066,25 +1089,25 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*,
[package.extras]
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
-secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
-version = "20.16.2"
+version = "20.16.5"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
-distlib = ">=0.3.1,<1"
-filelock = ">=3.2,<4"
-platformdirs = ">=2,<3"
+distlib = ">=0.3.5,<1"
+filelock = ">=3.4.1,<4"
+platformdirs = ">=2.4,<3"
[package.extras]
-docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
-testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"]
+docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"]
+testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
[[package]]
name = "wrapt"
@@ -1109,100 +1132,1051 @@ multidict = ">=4.0"
[metadata]
lock-version = "1.1"
python-versions = "3.10.*"
-content-hash = "b0dc5e1339805bf94be5f1b6a8454f8722d4eae645b8188ff62cd7b3c925f7e6"
+content-hash = "61b75a77cd170c395d78f13d0906a5b88d760097bf6fd6e238c506efd5cc99bb"
[metadata.files]
-aiodns = []
-aiohttp = []
-aiosignal = []
-arrow = []
-async-rediscache = []
-async-timeout = []
+aiodns = [
+ {file = "aiodns-3.0.0-py3-none-any.whl", hash = "sha256:2b19bc5f97e5c936638d28e665923c093d8af2bf3aa88d35c43417fa25d136a2"},
+ {file = "aiodns-3.0.0.tar.gz", hash = "sha256:946bdfabe743fceeeb093c8a010f5d1645f708a241be849e17edfb0e49e08cd6"},
+]
+aiohttp = [
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"},
+ {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"},
+ {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"},
+ {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"},
+ {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"},
+ {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"},
+ {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"},
+]
+aiosignal = [
+ {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"},
+ {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"},
+]
+arrow = [
+ {file = "arrow-1.2.2-py3-none-any.whl", hash = "sha256:d622c46ca681b5b3e3574fcb60a04e5cc81b9625112d5fb2b44220c36c892177"},
+ {file = "arrow-1.2.2.tar.gz", hash = "sha256:05caf1fd3d9a11a1135b2b6f09887421153b94558e5ef4d090b567b47173ac2b"},
+]
+async-rediscache = [
+ {file = "async-rediscache-1.0.0rc2.tar.gz", hash = "sha256:65b1f67df0bd92defe37a3e645ea4c868da29eb41bfa493643a3b4ae7c0e109c"},
+ {file = "async_rediscache-1.0.0rc2-py3-none-any.whl", hash = "sha256:b156cc42b3285e1bd620487c594d7238552f95e48dc07b4e5d0b1c095c3acc86"},
+]
+async-timeout = [
+ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
+ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
+]
atomicwrites = []
-attrs = []
-beautifulsoup4 = []
+attrs = [
+ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
+ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
+]
+beautifulsoup4 = [
+ {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"},
+ {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"},
+]
bot-core = []
-certifi = []
+certifi = [
+ {file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"},
+ {file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"},
+]
cffi = []
-cfgv = []
-charset-normalizer = []
-colorama = []
-coloredlogs = []
+cfgv = [
+ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
+ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
+]
+charset-normalizer = [
+ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
+ {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+]
+colorama = [
+ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
+ {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
+]
+coloredlogs = [
+ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
+ {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
+]
coverage = []
-deepdiff = []
-deprecated = []
+deepdiff = [
+ {file = "deepdiff-5.8.1-py3-none-any.whl", hash = "sha256:e9aea49733f34fab9a0897038d8f26f9d94a97db1790f1b814cced89e9e0d2b7"},
+ {file = "deepdiff-5.8.1.tar.gz", hash = "sha256:8d4eb2c4e6cbc80b811266419cb71dd95a157094a3947ccf937a94d44943c7b8"},
+]
+deprecated = [
+ {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"},
+ {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"},
+]
"discord.py" = []
-distlib = []
-emoji = []
-execnet = []
-fakeredis = []
-feedparser = []
-filelock = []
-flake8 = []
-flake8-annotations = []
-flake8-bugbear = []
-flake8-docstrings = []
-flake8-isort = []
-flake8-string-format = []
-flake8-tidy-imports = []
-flake8-todo = []
-frozenlist = []
-humanfriendly = []
-identify = []
-idna = []
-iniconfig = []
-isort = []
-jarowinkler = []
-lupa = []
+distlib = [
+ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
+ {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
+]
+emoji = [
+ {file = "emoji-2.0.0.tar.gz", hash = "sha256:297fac7ec9e86f7b602792c28eb6f04819ba67ab88a34c56afcde52243a9a105"},
+]
+execnet = [
+ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
+ {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
+]
+fakeredis = [
+ {file = "fakeredis-1.8.2-py3-none-any.whl", hash = "sha256:5e85a480c41b2a46edd6ba67f44197acc6603c59427fdf4456ebb89e56e77fa5"},
+ {file = "fakeredis-1.8.2.tar.gz", hash = "sha256:3564fbaed1eaec890eff96ee9088c1d30ee5fba2b81c97c72143f809d9c60c74"},
+]
+feedparser = [
+ {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"},
+ {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"},
+]
+filelock = [
+ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"},
+ {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"},
+]
+flake8 = [
+ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"},
+ {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
+]
+flake8-annotations = [
+ {file = "flake8-annotations-2.9.0.tar.gz", hash = "sha256:63fb3f538970b6a8dfd84125cf5af16f7b22e52d5032acb3b7eb23645ecbda9b"},
+ {file = "flake8_annotations-2.9.0-py3-none-any.whl", hash = "sha256:84f46de2964cb18fccea968d9eafce7cf857e34d913d515120795b9af6498d56"},
+]
+flake8-bugbear = [
+ {file = "flake8-bugbear-22.7.1.tar.gz", hash = "sha256:e450976a07e4f9d6c043d4f72b17ec1baf717fe37f7997009c8ae58064f88305"},
+ {file = "flake8_bugbear-22.7.1-py3-none-any.whl", hash = "sha256:db5d7a831ef4412a224b26c708967ff816818cabae415e76b8c58df156c4b8e5"},
+]
+flake8-docstrings = [
+ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
+ {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
+]
+flake8-isort = [
+ {file = "flake8-isort-4.1.2.post0.tar.gz", hash = "sha256:dee69bc3c09f0832df88acf795845db8a6673b79237371a05fa927ce095248e5"},
+ {file = "flake8_isort-4.1.2.post0-py3-none-any.whl", hash = "sha256:4f95b40706dbb507cff872b34683283662e945d6028d3c8257e69de5fc6b7446"},
+]
+flake8-string-format = [
+ {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"},
+ {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"},
+]
+flake8-tidy-imports = [
+ {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"},
+ {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"},
+]
+flake8-todo = [
+ {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"},
+]
+frozenlist = [
+ {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5f271c93f001748fc26ddea409241312a75e13466b06c94798d1a341cf0e6989"},
+ {file = "frozenlist-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c6ef8014b842f01f5d2b55315f1af5cbfde284eb184075c189fd657c2fd8204"},
+ {file = "frozenlist-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:219a9676e2eae91cb5cc695a78b4cb43d8123e4160441d2b6ce8d2c70c60e2f3"},
+ {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b47d64cdd973aede3dd71a9364742c542587db214e63b7529fbb487ed67cddd9"},
+ {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2af6f7a4e93f5d08ee3f9152bce41a6015b5cf87546cb63872cc19b45476e98a"},
+ {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a718b427ff781c4f4e975525edb092ee2cdef6a9e7bc49e15063b088961806f8"},
+ {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c56c299602c70bc1bb5d1e75f7d8c007ca40c9d7aebaf6e4ba52925d88ef826d"},
+ {file = "frozenlist-1.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:717470bfafbb9d9be624da7780c4296aa7935294bd43a075139c3d55659038ca"},
+ {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:31b44f1feb3630146cffe56344704b730c33e042ffc78d21f2125a6a91168131"},
+ {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c3b31180b82c519b8926e629bf9f19952c743e089c41380ddca5db556817b221"},
+ {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d82bed73544e91fb081ab93e3725e45dd8515c675c0e9926b4e1f420a93a6ab9"},
+ {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49459f193324fbd6413e8e03bd65789e5198a9fa3095e03f3620dee2f2dabff2"},
+ {file = "frozenlist-1.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:94e680aeedc7fd3b892b6fa8395b7b7cc4b344046c065ed4e7a1e390084e8cb5"},
+ {file = "frozenlist-1.3.1-cp310-cp310-win32.whl", hash = "sha256:fabb953ab913dadc1ff9dcc3a7a7d3dc6a92efab3a0373989b8063347f8705be"},
+ {file = "frozenlist-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:eee0c5ecb58296580fc495ac99b003f64f82a74f9576a244d04978a7e97166db"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0bc75692fb3770cf2b5856a6c2c9de967ca744863c5e89595df64e252e4b3944"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086ca1ac0a40e722d6833d4ce74f5bf1aba2c77cbfdc0cd83722ffea6da52a04"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b51eb355e7f813bcda00276b0114c4172872dc5fb30e3fea059b9367c18fbcb"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74140933d45271c1a1283f708c35187f94e1256079b3c43f0c2267f9db5845ff"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee4c5120ddf7d4dd1eaf079af3af7102b56d919fa13ad55600a4e0ebe532779b"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97d9e00f3ac7c18e685320601f91468ec06c58acc185d18bb8e511f196c8d4b2"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e19add867cebfb249b4e7beac382d33215d6d54476bb6be46b01f8cafb4878b"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a027f8f723d07c3f21963caa7d585dcc9b089335565dabe9c814b5f70c52705a"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:61d7857950a3139bce035ad0b0945f839532987dfb4c06cfe160254f4d19df03"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:53b2b45052e7149ee8b96067793db8ecc1ae1111f2f96fe1f88ea5ad5fd92d10"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bbb1a71b1784e68870800b1bc9f3313918edc63dbb8f29fbd2e767ce5821696c"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:ab6fa8c7871877810e1b4e9392c187a60611fbf0226a9e0b11b7b92f5ac72792"},
+ {file = "frozenlist-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89139662cc4e65a4813f4babb9ca9544e42bddb823d2ec434e18dad582543bc"},
+ {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4c0c99e31491a1d92cde8648f2e7ccad0e9abb181f6ac3ddb9fc48b63301808e"},
+ {file = "frozenlist-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:61e8cb51fba9f1f33887e22488bad1e28dd8325b72425f04517a4d285a04c519"},
+ {file = "frozenlist-1.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc2f3e368ee5242a2cbe28323a866656006382872c40869b49b265add546703f"},
+ {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58fb94a01414cddcdc6839807db77ae8057d02ddafc94a42faee6004e46c9ba8"},
+ {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:022178b277cb9277d7d3b3f2762d294f15e85cd2534047e68a118c2bb0058f3e"},
+ {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:572ce381e9fe027ad5e055f143763637dcbac2542cfe27f1d688846baeef5170"},
+ {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19127f8dcbc157ccb14c30e6f00392f372ddb64a6ffa7106b26ff2196477ee9f"},
+ {file = "frozenlist-1.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42719a8bd3792744c9b523674b752091a7962d0d2d117f0b417a3eba97d1164b"},
+ {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2743bb63095ef306041c8f8ea22bd6e4d91adabf41887b1ad7886c4c1eb43d5f"},
+ {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fa47319a10e0a076709644a0efbcaab9e91902c8bd8ef74c6adb19d320f69b83"},
+ {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52137f0aea43e1993264a5180c467a08a3e372ca9d378244c2d86133f948b26b"},
+ {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:f5abc8b4d0c5b556ed8cd41490b606fe99293175a82b98e652c3f2711b452988"},
+ {file = "frozenlist-1.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1e1cf7bc8cbbe6ce3881863671bac258b7d6bfc3706c600008925fb799a256e2"},
+ {file = "frozenlist-1.3.1-cp38-cp38-win32.whl", hash = "sha256:0dde791b9b97f189874d654c55c24bf7b6782343e14909c84beebd28b7217845"},
+ {file = "frozenlist-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:9494122bf39da6422b0972c4579e248867b6b1b50c9b05df7e04a3f30b9a413d"},
+ {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31bf9539284f39ff9398deabf5561c2b0da5bb475590b4e13dd8b268d7a3c5c1"},
+ {file = "frozenlist-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e0c8c803f2f8db7217898d11657cb6042b9b0553a997c4a0601f48a691480fab"},
+ {file = "frozenlist-1.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da5ba7b59d954f1f214d352308d1d86994d713b13edd4b24a556bcc43d2ddbc3"},
+ {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e6b2b456f21fc93ce1aff2b9728049f1464428ee2c9752a4b4f61e98c4db96"},
+ {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526d5f20e954d103b1d47232e3839f3453c02077b74203e43407b962ab131e7b"},
+ {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b499c6abe62a7a8d023e2c4b2834fce78a6115856ae95522f2f974139814538c"},
+ {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab386503f53bbbc64d1ad4b6865bf001414930841a870fc97f1546d4d133f141"},
+ {file = "frozenlist-1.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f63c308f82a7954bf8263a6e6de0adc67c48a8b484fab18ff87f349af356efd"},
+ {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:12607804084d2244a7bd4685c9d0dca5df17a6a926d4f1967aa7978b1028f89f"},
+ {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:da1cdfa96425cbe51f8afa43e392366ed0b36ce398f08b60de6b97e3ed4affef"},
+ {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f810e764617b0748b49a731ffaa525d9bb36ff38332411704c2400125af859a6"},
+ {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:35c3d79b81908579beb1fb4e7fcd802b7b4921f1b66055af2578ff7734711cfa"},
+ {file = "frozenlist-1.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c92deb5d9acce226a501b77307b3b60b264ca21862bd7d3e0c1f3594022f01bc"},
+ {file = "frozenlist-1.3.1-cp39-cp39-win32.whl", hash = "sha256:5e77a8bd41e54b05e4fb2708dc6ce28ee70325f8c6f50f3df86a44ecb1d7a19b"},
+ {file = "frozenlist-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:625d8472c67f2d96f9a4302a947f92a7adbc1e20bedb6aff8dbc8ff039ca6189"},
+ {file = "frozenlist-1.3.1.tar.gz", hash = "sha256:3a735e4211a04ccfa3f4833547acdf5d2f863bfeb01cfd3edaffbc251f15cec8"},
+]
+humanfriendly = [
+ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
+ {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
+]
+identify = [
+ {file = "identify-2.5.5-py2.py3-none-any.whl", hash = "sha256:ef78c0d96098a3b5fe7720be4a97e73f439af7cf088ebf47b620aeaa10fadf97"},
+ {file = "identify-2.5.5.tar.gz", hash = "sha256:322a5699daecf7c6fd60e68852f36f2ecbb6a36ff6e6e973e0d2bb6fca203ee6"},
+]
+idna = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+iniconfig = [
+ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
+ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
+]
+isort = [
+ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
+ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
+]
+jarowinkler = [
+ {file = "jarowinkler-1.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e006017c2ab73533f96202bdeed7e510738a0ef7585f18cb2ab4122b15e8467e"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d32c821e3aef70686c0726fad936adedf39f5f2985a2b9f525fe2da784f39490"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:94fd891230a08f8b285c78c1799b1e4c44587746cb761e1914873fc2922c8e42"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b8c9eae4662fc96c71d9192ba6d30f43433ff4fd20398920c276245fe414ce"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7315dcbfeb73130b9343513cf147c1fc50bfae0988c2b03128fb424b84d3866"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b440840e67574b4b069c0f298ebd02c1a4fe703b90863e044688d0cad01be636"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3873cae784f744dab3c592aecacab8c584c3a5156dc402528b07372449559ae5"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1859480ae1140e06e3c99886623711b0bbede53754201cad92e6c087e6f42123"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:88e74b1fdbb4251d1ce42b0fd411f540d3da51a04d0b0440f77ddc6b613e4042"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df33e200e5ea7ce1d2c2253582f5aaccd5fcbb18b9f8c9e67bce000277cceb5f"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:dfe72578d3f1c8e0da15685d0e3b75361866dda77fcee411b8402ec794a82cbf"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4a40c702154034815cd4b74f5c22b90c9741e562afb24592e1f3674b8cd661d1"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8761c045f1310e682d2f32e17f5054ed7663289c90d63d9fb82af8c233ed1c95"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-win32.whl", hash = "sha256:a06e89556baecd2fdcd77ea2f044acca4ffbfcf7f28d42dc9bffd1dc96fca8a8"},
+ {file = "jarowinkler-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:193bc642d46cfade301f5c7684b4254035f3a870e8f608f7a46a4c4d66cc625a"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2e265c6a3c87872a4badd72d7e2beb4ef968b9d04f024f4ce3e6e4e048f767e1"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d48b85ccdb4885b1f7c5ff14ac0a179e1e8d13fa7e2380557bc8cfa570f94750"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:687d4af42b2fb0450ab0e4cc094ef33e5c42b5f77596b1bd40d4ecb2a18cf99f"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79bed41cdc671881d04ce1250498a8c5b1444761920a2cb569a09c07d39403d"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25a7fd1c00beb08f023c28fa8e6c27939daac4ae5d700b875dd0f2c6e352fdb7"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e242e7c2d65865cf81e98d35ce0d51f165e1739bcd1fffa450beb55ddea2aaa"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0895225e4b3fcc1786c7298616f368b0e479cc9f7087ced7a9b2042808fe855"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a84aac689fd95b45865122a36843f18aa0a58664c44558ea809060b200b1cf1"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:de0b323a34205326351e5ab9ec103294365285b587e3dafe7361f78f735eaf02"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5037674a5b41aa22d7a9e1ad53e4560f2441c5584b514e333476db457e5644dc"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:60f6c9889432382fc6ecaae3f29e59ec560152bd1d1246eaca6b8d63c5029cd5"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:afbe29453712afe00bee3bb21f36fef7852129584c05039df40086cdfed9b95e"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fefb9f8a51bbd3704a9b0fa9ca3a01a66b322e82ac75093bda56a7e108a97801"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-win32.whl", hash = "sha256:892e7825acae0d4cd21b811e1996d976574e5e209785f644e5830ac4f86b12d7"},
+ {file = "jarowinkler-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:6cc53c9c2bda481d2184221d13121847b26271a90e2522c96169de7e3b00d704"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:068e7785ad2bbca5092c13cdf2720a8a968ae46f0e2972f76faa5de5185d911b"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb7e227c991e0129daf3ca3794325c37aa490333ac6aa223eebb6dbe6b2c059"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b30901cf2012c3aa01f5a9779cd6c7314167cf00750b76f3eba4872f61f830ba"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cfa6ff2260707e522a814a93c3074337e016ce60493b6b2cb6aa1350aae6e375"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ea7ae636db15ca1143382681b41ca77cc6743aa48772455de0d5a172159ea13"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc61a8667cb4601801a472732f43319feb16eea7145755415e64462ab660cdc5"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0a3271494a2298c72372ee064f83445636c257b3c77e4bbe2045ad974c4982f"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f4ac432ffff5c2b119c1d231a37e27ca6b0f5f1fcc5f27f715127df265ba4706"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:ea3d4521d90b0d8bf3174e97b039b5706935ea7830d5ad672363a85a57c30d9e"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:40d17decb4a34be5f086cf3d492264003cd9412e85e4ad182a43b885ae89fa60"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d2ce7fe2a324f425d8639fbdd2b393432e70ff9cae5ddc16e8059b47d16be613"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-win32.whl", hash = "sha256:51f3d4bae8feac02776561e7d3bc8f449d41cde07203ef36fba49c60fc84921e"},
+ {file = "jarowinkler-1.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:0b8b7c178ff0b8089815938d9c9b3f2bc17ab785dc5590a3ee5413a980c4872c"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2760ba9c3c702c68c1d6b04da50bfa2595de87cb2f57ca65d14abc9f5fa587bb"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dc8d23c9abc5e61b00917b1bc040a165096fe9d0a1b44aae92199d0c605cc3c"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1dcfcb35081a08f127ab843bc1a6fb540c7cc5d6670f0126e546e6060243cdd"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774b5f0e6daa07e1dae7539d4c691812f0f903fcdcc48d17ee225901ceec5767"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940ccc7d3f7fbd4cf220aab2ee106719976a408c13a56bf35d48db0d73d9467b"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7c34879601112fe728f167f099aea227a5a9790e85be09f079f85d94ece4af3"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3b6fce44c7bcbbaa38678cebc9fb0af0604aff234644976c6b7816257ce42420"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:793f4fd23c7e361197f8a07a035c059b6254ff4998b1a54d5484e558b78f143a"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:615c09d4ebe5417c201c4a00cff703d7d5c335350f36b8ae856d1644fe1a38e9"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:90c624f1b660884415e0141a2c4fc8446fed00d3d795a9c8afafb6b6e304e8fd"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ff3de9898a05d7902ed129714f32a66ac79af175a918663831f430da16578573"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-win32.whl", hash = "sha256:9e4af981c50ee02eabe26b91091c51750dfef3a9ca4ae8fd9d5bde83ae802d27"},
+ {file = "jarowinkler-1.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:09730ced0acb262396465086174229ac40328fdee57a698d0c033cf1c7447039"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a805a091d65f083da44f1bfdeef81818331ab76ae1475959077f41a94ee944ff"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:641ef0750468eb17bfd3a4e4c25ae14d37ecbcc9d7b10abfcf86b3881b1aca3a"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9d476d34dbcb4ed8cbfa1e5e7d865440f2c9781cf023d1c64e4d8ec16167b52a"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41fad5783c09352a0bc1a0290fdbfd08d7301a6cf7c933c9b8b6fc0d07332aae"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:245225d69ea54a4f3875c243a4448da235f31de2c8280fad597732815d22cc4b"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46b2106b56329699c2f775b65747940aaafd077dac124aae7864de4dcd7a79dd"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab4e24cbb27f4602e4b809397ee12138583132a0d3e80a92de1a5f150c9ca58"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088d1da357dc9781e85385e6437275e676333a30416d2b1cdde3936bb9abc80f"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5b9747dab72c7c6a25e98a68ca7e3ba3a9f3a3de574dc635c92fa4551bd58505"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fce17899e967489ba4b932dc53b7c408454c3e3873eab92c126a98ddb20cc246"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e0b822891ef854cf00c4df6269ddf04928d8f65c8741a8766839964cd8732c48"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9f76759af8d383b18466b90c0661e54eed41ff5d405ce437000147ba7e4e4d07"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f3213a9878c7f3c0e2a807275d38a9d9848a193dcb5ada748863a7a18a335f9"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-win32.whl", hash = "sha256:dcfbdb72dcfe2a7a8439243f2c5dccf85be7bd81c65ba9f7ecd73a78aef4ad28"},
+ {file = "jarowinkler-1.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9243e428655b956fa564708f037dffb28bc97dd699001c794344281774f3dfda"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:78f946d11aeb92aea59e2dea380fc59d54dd22339f3fa79093b71c466057ecca"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d259319d4e37539166a4080ce8ca64b2d9c5c54cd5c2d8136fcaf777186c5c71"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc9b4a802cc69f8aeceed871e4caf1ae305ff0f2d55825d47117f367271d9111"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04a451348e72b1703902faae9456e53aff25d9428a674d61d9d42a12780da7ae"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27068fb2073a5dcb265411dd7c9beb2bf7fca5c5e1fb4de35eb2562bd89a6462"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84d5672a235c9b0fcdaf4236db78a7ec76384dee43e875016eb60d6365de72f4"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:667351b515d4678ceb0e5f596febd38c446c6bc64de1545b2868cd739f4389da"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32eefc177aa6ca6f3aeb660b1923dd82527686f7a522e0267c877a511361bb25"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:68efe1acfc3e920fc055c15dc139a8bb69ae0c292541a0985ab6df819133e80a"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:116a5e8d8113b026f5c79a446c996282c70e6b1d2a9cc6fa335193a0a5d4fb11"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bf7c3f413c03320d452fb5c00d26177a1c3472c489e4c28f6d10c031a307f325"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:46307dd8b0026f8062ff31fccd676dafd63f75f2bca4a29110b7b17bdf12d2c6"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a0936d5d98ed56a157b81d36209c562a0e6d3995244ee3117859b5f64be9c653"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-win32.whl", hash = "sha256:e61bf2306727e5152f9c67a62c02ef09039181dc0b9611c81266ae46ba695d63"},
+ {file = "jarowinkler-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:438ff34e4cf801562e87fdf0319d6deeb839aaaf4e4e1a7e300e28f365f52cd1"},
+ {file = "jarowinkler-1.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d61b40b466c258386949329b40d97b2554f3cd4934756bf3931104da1d888236"},
+ {file = "jarowinkler-1.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787a2c382e774c0d97a5ebc84c8051e72d82208ae124c8fc07dec09e3e69e885"},
+ {file = "jarowinkler-1.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f95cc000493db9aacdad56b7890964517a60c8cb275d546524b9669e32922c49"},
+ {file = "jarowinkler-1.2.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e90dabfc948838412082e308c37408d36948bb51844f97d3fc2698eaa0d7441e"},
+ {file = "jarowinkler-1.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dbd784c599e6cb36b0f072ad5502ee98d256cee73d23adc6fa7c5c30d2c614f4"},
+ {file = "jarowinkler-1.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56f24f5778b2f5837459739573c3294b79f274f526ff1406fdb2160e81371db2"},
+ {file = "jarowinkler-1.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7049ef0ab6676837b200c7170492bae58d2780647c847f840615c72ae6ae8f8b"},
+ {file = "jarowinkler-1.2.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92196b5238ac9063c91d8221dc38cb3a1c22c68b11e547b52ddaa120cd9a93d9"},
+ {file = "jarowinkler-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:926d9174c651b552193239557ff1da0044720d12dc2ad6fb754be29a46926ebb"},
+ {file = "jarowinkler-1.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b106de1e1122ebb1663720474be6fcdceffbdf6c2509e3c50d19401f73fe7d3"},
+ {file = "jarowinkler-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f571a597ce417cb0fd3647f1adbb69b5793d449b098410eb249de7994c107f88"},
+ {file = "jarowinkler-1.2.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9511fbbd8a3f6cb979cb35c637e43759607c7102f836aae755e4c2f29c033bac"},
+ {file = "jarowinkler-1.2.1.tar.gz", hash = "sha256:206364a885ce296f7f79c669734317f2741f6bbd964907e49e3b9ea0e9de5029"},
+]
+lupa = [
+ {file = "lupa-1.13-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:da1885faca29091f9e408c0cc6b43a0b29a2128acf8d08c188febc5d9f99129d"},
+ {file = "lupa-1.13-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4525e954e951562eb5609eca6ac694d0158a5351649656e50d524f87f71e2a35"},
+ {file = "lupa-1.13-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5a04febcd3016cb992e6c5b2f97834ad53a2fd4b37767d9afdce116021c2463a"},
+ {file = "lupa-1.13-cp27-cp27m-win32.whl", hash = "sha256:98f6d3debc4d3668e5e19d70e288dbdbbedef021a75ac2e42c450c7679b4bf52"},
+ {file = "lupa-1.13-cp27-cp27m-win_amd64.whl", hash = "sha256:7009719bf65549c018a2f925ff06b9d862a5a1e22f8a7aeeef807eb1e99b56bc"},
+ {file = "lupa-1.13-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bde9e73b06d147d31b970123a013cc6d28a4bea7b3d6b64fe115650cbc62b1a3"},
+ {file = "lupa-1.13-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a122baad6c6f9aaae496a59318217c068ae73654f618526e404a28775b46da38"},
+ {file = "lupa-1.13-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4d1588486ed16d6b53f41b080047d44db3aa9991cf8a30da844cb97486a63c8b"},
+ {file = "lupa-1.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:a79be3ca652c8392d612bdc2234074325a68ec572c4175a35347cd650ef4a4b9"},
+ {file = "lupa-1.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d9105f3b098cd4c276d6258f8254224243066f51c5d3c923b8f460efac9de37b"},
+ {file = "lupa-1.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2d1fbddfa2914c405004f805afb13f5fc385793f3ba28e86a6f0c85b4059b86c"},
+ {file = "lupa-1.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a3c84994399887a8befc82aef4d837582db45a301413025c510e20fef9e9148"},
+ {file = "lupa-1.13-cp310-cp310-win32.whl", hash = "sha256:c665af2a92e79106045f973174e0849f92b44395f5247505d321bc1173d9f3fd"},
+ {file = "lupa-1.13-cp310-cp310-win_amd64.whl", hash = "sha256:c9b47a9e93cb8e8f342343f4e0963eb1966d36baeced482575141925eafc17dc"},
+ {file = "lupa-1.13-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:b3003d723faabb9502259662722462cbff368f26ed83a6311f65949d298593bf"},
+ {file = "lupa-1.13-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b341b8a4711558af771bd4a954a6ffe531bfe097c1f1cdce84b9ad56070dfe90"},
+ {file = "lupa-1.13-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ea049ee507a549eec553a9d27e3e6c034eae8c145e7bad5947e85c4b9e23757b"},
+ {file = "lupa-1.13-cp35-cp35m-win32.whl", hash = "sha256:ba6c49646ad42c836f18ff8f1b6b8db4ca32fc02e786e1bf401b0fa34fe82cca"},
+ {file = "lupa-1.13-cp35-cp35m-win_amd64.whl", hash = "sha256:de51177d1374fd9cce27b9cdb20771142d91a509e42337b3e7c6cffbba818d6f"},
+ {file = "lupa-1.13-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:dddfeb031ab67c8bdbeefd2de237a98bee58e2166d5ed629c3a0c3842bb91738"},
+ {file = "lupa-1.13-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57f00004c185bd60459586a9d08961541f5da1cfec5925a3fc1ab68deaa2e038"},
+ {file = "lupa-1.13-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a940be5b38b68b344691558ffde1b44377ad66c105661f6f58c7d4c0c227d8ea"},
+ {file = "lupa-1.13-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:807b27c13f7598af9343455204a6a23b6b919180f01668c9b8fa4f9b0d75dedb"},
+ {file = "lupa-1.13-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a52d5a8305f4854f91ee39f5ee6f175f4d38f362c6b00483fe618ae6f9dff5b"},
+ {file = "lupa-1.13-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0ad47549359df03b3e59796ba09df548e1fd046f9245391dae79699c9ffec0f6"},
+ {file = "lupa-1.13-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fbf99cea003b38a146dff5333ba58edb8165e01c42f15d7f76fdb72e761b5827"},
+ {file = "lupa-1.13-cp36-cp36m-win32.whl", hash = "sha256:a101c84097fdfa7b1a38f9d5a3055759da4e222c255ab8e5ac5b683704e62c97"},
+ {file = "lupa-1.13-cp36-cp36m-win_amd64.whl", hash = "sha256:00376b3bcb00bb57e067740ea9ff00f610a44aff5338ea93d3198a035f8965c6"},
+ {file = "lupa-1.13-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:91001c9667d60b69c3ad623dc315d7b59712e1617fe6204e5852c31cda778678"},
+ {file = "lupa-1.13-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:65c9d034d7215e8929a4ab48c9d9d372786ef47c8e61c294851bf0b8f5b4fbf4"},
+ {file = "lupa-1.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:928527222b2a15bd3dcea646f7585852097302c078c338fb0f184ce560d48c6c"},
+ {file = "lupa-1.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:5e157d97e379931a7fa90d9afa66600f796960bc062e04a9bb37f24fa7c5c967"},
+ {file = "lupa-1.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a67336d542d71e095c07dacc72c16158745ae4ef08e8a7bfe75827da604b4979"},
+ {file = "lupa-1.13-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0c5cd027c998db5b29ca8dd956c255d50914aed614d1c9edb68bc3315f916f59"},
+ {file = "lupa-1.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:76b06355f0b3d3aece5c38d20a66ab7d3046add95b8d04b677ade162fce2ffd0"},
+ {file = "lupa-1.13-cp37-cp37m-win32.whl", hash = "sha256:2a6b0a7e45390de36d11dd8705b2a0a10739ba8ed2e99c130e983ad72d56ddc9"},
+ {file = "lupa-1.13-cp37-cp37m-win_amd64.whl", hash = "sha256:42ffbe43119225cc58c7ebd2210123b9367b098ac25a7f0ef5d473e2f65fc0d9"},
+ {file = "lupa-1.13-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:7ff445a5d8ab25e623f871c600af58f1cd6207f6873a42c3b8c1683f13a22db0"},
+ {file = "lupa-1.13-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:dd0404f11b9473372fe2a8bdf0d64b361852ae08699d6dcde1215db3bd6c7b9c"},
+ {file = "lupa-1.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:14419b29152667fb2d78c6d5176f9a704c765aeecb80fe6c079a8dba9f864529"},
+ {file = "lupa-1.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:9e644032b40b59420ffa0d58ca1705351785ce8e39b77d9f1a8c4cf78e371adb"},
+ {file = "lupa-1.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c090991e2b701ded6c9e330ea582a74dd9cb09069b3de9ae897b938bd97dc98f"},
+ {file = "lupa-1.13-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6812f16530a1dc88f66c76a002e1c16039d3d98e1ff283a2efd5a492342ba00c"},
+ {file = "lupa-1.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff3989ab562fb62e9df2290739c7f82e05d5ba7d2fa2ea319991885dfc818c81"},
+ {file = "lupa-1.13-cp38-cp38-win32.whl", hash = "sha256:48fa15cf24d297c50f21bff1fe1883f7a6a15b34b70db5a6c18d2dfbed6b6e16"},
+ {file = "lupa-1.13-cp38-cp38-win_amd64.whl", hash = "sha256:ea32a62d404c3d9e119e83b653aa56c034cae63a4e830aefa15bf3a25299b29e"},
+ {file = "lupa-1.13-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:80d36fbdc6218332232b4c214a2f9c36b13136b546dca0b3d19aca12d77e1f8e"},
+ {file = "lupa-1.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:db4745132f8abe0c9daac155af9d196926c9e10662d999edd805756d91502a01"},
+ {file = "lupa-1.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:938fb12c556737f9e4ffb7912540e35423d1be3166c6d4099ca4f3e177fe619e"},
+ {file = "lupa-1.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:de913a471ee6dc86435b647dda3cdb787990b164d8c8c63ca03d6e934f305a55"},
+ {file = "lupa-1.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:488d1bd773f10331ca67b0914c880900316634fd14538f76c3c2fbc7e6b56043"},
+ {file = "lupa-1.13-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dc101e6d82ffa1b3fcfc77f2430a10c02def972cf0f8c7a229e272697e22e35c"},
+ {file = "lupa-1.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:361a55883b692d25478a69104d8ecce4cad058ba39ec1b7378b1209f86867687"},
+ {file = "lupa-1.13-cp39-cp39-win32.whl", hash = "sha256:9a6cd192e789fbc7f6a777a17b5b517c447a6dc6049e60c1becb300f86205345"},
+ {file = "lupa-1.13-cp39-cp39-win_amd64.whl", hash = "sha256:9fe47cda7cc81bd9b111f1317ed60e3da2620f4fef5360b690dcf62f88bbc668"},
+ {file = "lupa-1.13-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:7d860dc0062b3001993355b12b939f68e0e2871a19a81427d2a9ced893574b58"},
+ {file = "lupa-1.13-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6c0358386f16afb50145b143774791c942c93a9721078a17983486a2d9f8f45b"},
+ {file = "lupa-1.13-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:a46962ebdc6278e82520c66d5dd1eed50099aa2f56b6827b7a4f001664d9ad1d"},
+ {file = "lupa-1.13-pp37-pypy37_pp73-win32.whl", hash = "sha256:436daf32385bcb9b6b9f922cbc0b64d133db141f0f7d8946a3a653e83b478713"},
+ {file = "lupa-1.13-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:f1165e89aa8d2a0644619517e04410b9f5e3da2c9b3d105bf53f70e786f91f79"},
+ {file = "lupa-1.13-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:325069e4f3cf4b1232d03fb330ba1449867fc7dd727ecebaf0e602ddcacaf9d4"},
+ {file = "lupa-1.13-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:ce59c335b80ec4f9e98181970c18552f51adba5c3380ef5d46bdb3246b87963d"},
+ {file = "lupa-1.13-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ad263ba6e54a13ac036364ae43ba7613c869c5ee6ff7dbb86791685a6cba13c5"},
+ {file = "lupa-1.13-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:86f4f46ee854e36cf5b6cf2317075023f395eede53efec0a694bc4a01fc03ab7"},
+ {file = "lupa-1.13-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:59799f40774dd5b8cfb99b11d6ce3a3f3a141e112472874389d47c81a7377ef9"},
+ {file = "lupa-1.13.tar.gz", hash = "sha256:e1d94ac2a630d271027dac2c21d1428771d9ea9d4d88f15f20a7781340f02a4e"},
+]
lxml = []
-markdownify = []
-mccabe = []
-more-itertools = []
-mslex = []
-multidict = []
+markdownify = [
+ {file = "markdownify-0.6.1-py3-none-any.whl", hash = "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"},
+ {file = "markdownify-0.6.1.tar.gz", hash = "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d"},
+]
+mccabe = [
+ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
+ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
+]
+more-itertools = [
+ {file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"},
+ {file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"},
+]
+mslex = [
+ {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"},
+ {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"},
+]
+multidict = [
+ {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"},
+ {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"},
+ {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"},
+ {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"},
+ {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"},
+ {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"},
+ {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"},
+ {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"},
+ {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"},
+ {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"},
+ {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"},
+ {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"},
+ {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"},
+ {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"},
+ {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"},
+ {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"},
+ {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"},
+ {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"},
+ {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"},
+]
nodeenv = []
-ordered-set = []
-packaging = []
-pep8-naming = []
-pip-licenses = []
-platformdirs = []
-pluggy = []
-pre-commit = []
-psutil = []
-ptable = []
-py = []
-pycares = []
-pycodestyle = []
-pycparser = []
-pydocstyle = []
-pyflakes = []
+ordered-set = [
+ {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"},
+ {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"},
+]
+packaging = [
+ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
+]
+pep8-naming = [
+ {file = "pep8-naming-0.13.1.tar.gz", hash = "sha256:3af77cdaa9c7965f7c85a56cd579354553c9bbd3fdf3078a776f12db54dd6944"},
+ {file = "pep8_naming-0.13.1-py3-none-any.whl", hash = "sha256:f7867c1a464fe769be4f972ef7b79d6df1d9aff1b1f04ecf738d471963d3ab9c"},
+]
+pip-licenses = [
+ {file = "pip-licenses-3.5.4.tar.gz", hash = "sha256:a8b4dabe2b83901f9ac876afc47b57cff9a5ebe19a6d90c0b2579fa8cf2db176"},
+ {file = "pip_licenses-3.5.4-py3-none-any.whl", hash = "sha256:5e23593c670b8db616b627c68729482a65bb88498eefd8df337762fdaf7936a8"},
+]
+platformdirs = [
+ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
+ {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
+]
+pluggy = [
+ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
+ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+]
+pre-commit = [
+ {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
+ {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
+]
+psutil = [
+ {file = "psutil-5.9.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:8f024fbb26c8daf5d70287bb3edfafa22283c255287cf523c5d81721e8e5d82c"},
+ {file = "psutil-5.9.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:b2f248ffc346f4f4f0d747ee1947963613216b06688be0be2e393986fe20dbbb"},
+ {file = "psutil-5.9.2-cp27-cp27m-win32.whl", hash = "sha256:b1928b9bf478d31fdffdb57101d18f9b70ed4e9b0e41af751851813547b2a9ab"},
+ {file = "psutil-5.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:404f4816c16a2fcc4eaa36d7eb49a66df2d083e829d3e39ee8759a411dbc9ecf"},
+ {file = "psutil-5.9.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:94e621c6a4ddb2573d4d30cba074f6d1aa0186645917df42c811c473dd22b339"},
+ {file = "psutil-5.9.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:256098b4f6ffea6441eb54ab3eb64db9ecef18f6a80d7ba91549195d55420f84"},
+ {file = "psutil-5.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:614337922702e9be37a39954d67fdb9e855981624d8011a9927b8f2d3c9625d9"},
+ {file = "psutil-5.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39ec06dc6c934fb53df10c1672e299145ce609ff0611b569e75a88f313634969"},
+ {file = "psutil-5.9.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3ac2c0375ef498e74b9b4ec56df3c88be43fe56cac465627572dbfb21c4be34"},
+ {file = "psutil-5.9.2-cp310-cp310-win32.whl", hash = "sha256:e4c4a7636ffc47b7141864f1c5e7d649f42c54e49da2dd3cceb1c5f5d29bfc85"},
+ {file = "psutil-5.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:f4cb67215c10d4657e320037109939b1c1d2fd70ca3d76301992f89fe2edb1f1"},
+ {file = "psutil-5.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dc9bda7d5ced744622f157cc8d8bdd51735dafcecff807e928ff26bdb0ff097d"},
+ {file = "psutil-5.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75291912b945a7351d45df682f9644540d564d62115d4a20d45fa17dc2d48f8"},
+ {file = "psutil-5.9.2-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4018d5f9b6651f9896c7a7c2c9f4652e4eea53f10751c4e7d08a9093ab587ec"},
+ {file = "psutil-5.9.2-cp36-cp36m-win32.whl", hash = "sha256:f40ba362fefc11d6bea4403f070078d60053ed422255bd838cd86a40674364c9"},
+ {file = "psutil-5.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9770c1d25aee91417eba7869139d629d6328a9422ce1cdd112bd56377ca98444"},
+ {file = "psutil-5.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42638876b7f5ef43cef8dcf640d3401b27a51ee3fa137cb2aa2e72e188414c32"},
+ {file = "psutil-5.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91aa0dac0c64688667b4285fa29354acfb3e834e1fd98b535b9986c883c2ce1d"},
+ {file = "psutil-5.9.2-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fb54941aac044a61db9d8eb56fc5bee207db3bc58645d657249030e15ba3727"},
+ {file = "psutil-5.9.2-cp37-cp37m-win32.whl", hash = "sha256:7cbb795dcd8ed8fd238bc9e9f64ab188f3f4096d2e811b5a82da53d164b84c3f"},
+ {file = "psutil-5.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5d39e3a2d5c40efa977c9a8dd4f679763c43c6c255b1340a56489955dbca767c"},
+ {file = "psutil-5.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd331866628d18223a4265371fd255774affd86244fc307ef66eaf00de0633d5"},
+ {file = "psutil-5.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b315febaebae813326296872fdb4be92ad3ce10d1d742a6b0c49fb619481ed0b"},
+ {file = "psutil-5.9.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7929a516125f62399d6e8e026129c8835f6c5a3aab88c3fff1a05ee8feb840d"},
+ {file = "psutil-5.9.2-cp38-cp38-win32.whl", hash = "sha256:561dec454853846d1dd0247b44c2e66a0a0c490f937086930ec4b8f83bf44f06"},
+ {file = "psutil-5.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:67b33f27fc0427483b61563a16c90d9f3b547eeb7af0ef1b9fe024cdc9b3a6ea"},
+ {file = "psutil-5.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3591616fa07b15050b2f87e1cdefd06a554382e72866fcc0ab2be9d116486c8"},
+ {file = "psutil-5.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b29f581b5edab1f133563272a6011925401804d52d603c5c606936b49c8b97"},
+ {file = "psutil-5.9.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4642fd93785a29353d6917a23e2ac6177308ef5e8be5cc17008d885cb9f70f12"},
+ {file = "psutil-5.9.2-cp39-cp39-win32.whl", hash = "sha256:ed29ea0b9a372c5188cdb2ad39f937900a10fb5478dc077283bf86eeac678ef1"},
+ {file = "psutil-5.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:68b35cbff92d1f7103d8f1db77c977e72f49fcefae3d3d2b91c76b0e7aef48b8"},
+ {file = "psutil-5.9.2.tar.gz", hash = "sha256:feb861a10b6c3bb00701063b37e4afc754f8217f0f09c42280586bd6ac712b5c"},
+]
+ptable = [
+ {file = "PTable-0.9.2.tar.gz", hash = "sha256:aa7fc151cb40f2dabcd2275ba6f7fd0ff8577a86be3365cd3fb297cbe09cc292"},
+]
+py = [
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
+]
+pycares = [
+ {file = "pycares-4.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5dc6418e87729105d93162155793002b3fa95490e2f2df33afec08b0b0d44989"},
+ {file = "pycares-4.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9481ee42df7e34c9ef7b2f045e534062b980b2c971677868df9f17730b147ceb"},
+ {file = "pycares-4.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05e029e594c27a0066cdb89dfc5bba28ba94e2b27b0ca7aceb94f9aea06812cd"},
+ {file = "pycares-4.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0eb203ceedcf7f9865ed3abb6128dfbb3498c5e76342e3c820c4274cc0c8e873"},
+ {file = "pycares-4.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4a01ba75e8a2947fc0b954850f8db9d52166634a206056febef2f833c8cfa1e"},
+ {file = "pycares-4.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:064543e222e3587a92bccae704fcc5f4ce1ba1ce66aac96483c9cf504d554a67"},
+ {file = "pycares-4.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a5a28f1d041aa2102bd2512e7361671e4ef46bc927e95b6932ed95cc45273480"},
+ {file = "pycares-4.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:650b16f025bd3dad6a331e63bb8c9d55994c1b5d0d289ebe03c0bc16edad114f"},
+ {file = "pycares-4.2.2-cp310-cp310-win32.whl", hash = "sha256:f8b76c13275b319b850e28bb9b3f5815de7521b1e0a581453d1acf10011bafef"},
+ {file = "pycares-4.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:bcfcafbb376376c9cca6d37a8497dfd6dbd82333bf37627067b34dcaf5039612"},
+ {file = "pycares-4.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ae5accd693c6910bbd1a99d1f4551a9e99decd65d792a80f10c27b8fcc32b497"},
+ {file = "pycares-4.2.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f1901b309cb5cf7ade5822d74b904f55c49369e4ff9328818e554d4c34b4714"},
+ {file = "pycares-4.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc61edb98aff9cb4b2e07c25383100b81459a676ca0b0bd5fe77226eb1f850e"},
+ {file = "pycares-4.2.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:241155687db7b45cb4ef84a18755ebc78c3ad624fd2578b48ea52ac16a4c8d9f"},
+ {file = "pycares-4.2.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:27a21184ba35fff12eec36375d5b064516a0c3401dbf66a7eded7da34c5ca282"},
+ {file = "pycares-4.2.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8a376e637ecd79db62761ca40cda080b9383a07d6dedbc799dd1a31e053862d9"},
+ {file = "pycares-4.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6c411610be8de17cd5257845ebba5104b8e6356c62e66768728985a2ac0e9d1c"},
+ {file = "pycares-4.2.2-cp36-cp36m-win32.whl", hash = "sha256:6a5af6443a1cefb36ddca47af37e29cae94a734c6c7cea3eb94e5de5cc2a4f1a"},
+ {file = "pycares-4.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a01ab41405dd4dd8449f9323b2dac25e1d856ef02d85c8aedea0130b65275b2a"},
+ {file = "pycares-4.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9a2053b34163d13d6d135248c65e71cefce3f25b3611677a1428ec7a57bae856"},
+ {file = "pycares-4.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8064eaae5084e5155008b8f9d089055a432ff2115960273fc570f55dccedf666"},
+ {file = "pycares-4.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc045040c094068d5de28e61a6fd0babe8522e8f61829839b893f7aff928173b"},
+ {file = "pycares-4.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:135a356d52773f02d7babd2b38ad64493418363274972cc786fdce847875ca03"},
+ {file = "pycares-4.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:512fb2c04c28e0e5a7de0b61624ab9c15d2df52db113f63a0aba6c6f1174b92f"},
+ {file = "pycares-4.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eb374525c6231920509612f197ca47bdaa6ec9a0728aa199ba536dc0c25bb55"},
+ {file = "pycares-4.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:47c6e18bbe6f2f4ce42fbdfa4ab2602268590f76110f06af60d02f964b72fada"},
+ {file = "pycares-4.2.2-cp37-cp37m-win32.whl", hash = "sha256:a2c7fb5d3cb633e3f23344194da9b5caa54eb40da34dbe4465f0ebcede2e1e1a"},
+ {file = "pycares-4.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:90f374fae2af5eb728841b4c2a0c8038a6889ba2a5a421e4c4e4e0f15bcc5912"},
+ {file = "pycares-4.2.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c0a7e0f9371c47cf028e2389f11385e906ba2797900419509adfa86587a2ac"},
+ {file = "pycares-4.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0fb3944af9492cfde6e1167780c9b8a701a56cf7d3fb29086cfb906b8261648f"},
+ {file = "pycares-4.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7466315e76ab0ca4dc1354f3d7cb53f6d99d365b3778d9849e52643270beb6f2"},
+ {file = "pycares-4.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f58398bd9fa99cc2dd79f7fecddc85837ccb452d673168037ea603b15aa11b"},
+ {file = "pycares-4.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47eae9809826cea5c0eb08eec9da584dd6330e51c075c2f6963ca2067555cd07"},
+ {file = "pycares-4.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6cbd4df536d2c32d2d74b854db25f1d15cc61cdd182b03206afbd7ccbe7b8f11"},
+ {file = "pycares-4.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3e4519bc51b744331c968eef0bd0071ca9c3e5863b8b8c1d99540ab8bfb04235"},
+ {file = "pycares-4.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e2af8ca3bc49894a87d2b5629b993c22b0e602ecb7fd2fad660ebb9be584829"},
+ {file = "pycares-4.2.2-cp38-cp38-win32.whl", hash = "sha256:f6b5360e2278fae1e79479a4b56198fc7faf46ab350da18756c4de789835dbcd"},
+ {file = "pycares-4.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:4304e5f0c10281abcee3c2547140a6b280c70866f2828956c9bcb2de6cffa211"},
+ {file = "pycares-4.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9155e95cbe26b4b57ca691e9d8bfb5a002c7ce14ac02ddfcfe7849d4d349badb"},
+ {file = "pycares-4.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:612a20514685a3d999dd0a99eede9da851be11171d599b211fac287eee452ff1"},
+ {file = "pycares-4.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:075d4bdde10590a2d0456eab20028aab997207e45469d30dd01a4a65caa7f8da"},
+ {file = "pycares-4.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6eebdf34477c9bfb00497f8e58a674fd22b348bd928d19d29c84e8923554e1"},
+ {file = "pycares-4.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55d39f2c38d1285d1ae248b9d2d965b161dcf51a4b6eacf97ff056da6f09dd30"},
+ {file = "pycares-4.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:64261640fd52910e7960f30888abeca4e6a7a91371d351ccebc70ac1625ca74e"},
+ {file = "pycares-4.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:72184b1510866c9bc97a6daca7d8218a6954c4a78640197f0840e604ba1182f9"},
+ {file = "pycares-4.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02fdf5ce48b21da6eafc5cb4508d344a0d48ac1a31e8df178f7c2fb548fcbc14"},
+ {file = "pycares-4.2.2-cp39-cp39-win32.whl", hash = "sha256:fe8e0f8ed7fd795868bfc2211e345963174a9f4d1e2125753e1715a60441c8a0"},
+ {file = "pycares-4.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:bb09c084de909206e6db1f014d4c6d662c7df04194df31f4831088d426afe8f1"},
+ {file = "pycares-4.2.2.tar.gz", hash = "sha256:e1f57a8004370080694bd6fb969a1ffc9171a59c6824d54f791c1b2e4d298385"},
+]
+pycodestyle = [
+ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
+ {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
+]
+pycparser = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
+pydantic = [
+ {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"},
+ {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"},
+ {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"},
+ {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"},
+ {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"},
+ {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"},
+ {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"},
+ {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"},
+ {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"},
+ {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"},
+ {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"},
+ {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"},
+ {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"},
+ {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"},
+ {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"},
+ {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"},
+ {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"},
+ {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"},
+ {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"},
+ {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"},
+ {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"},
+ {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"},
+ {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"},
+ {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"},
+ {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"},
+ {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"},
+ {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"},
+ {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"},
+ {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"},
+ {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"},
+ {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"},
+ {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"},
+ {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"},
+ {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"},
+ {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"},
+ {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"},
+]
+pydocstyle = [
+ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
+ {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
+]
+pyflakes = [
+ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
+ {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
+]
pyparsing = []
-pyreadline3 = []
-pytest = []
-pytest-cov = []
-pytest-forked = []
-pytest-xdist = []
-python-dateutil = []
-python-dotenv = []
-python-frontmatter = []
-pyyaml = []
-rapidfuzz = []
-redis = []
-regex = []
+pyreadline3 = [
+ {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
+ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
+]
+pytest = [
+ {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
+ {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
+]
+pytest-cov = [
+ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"},
+ {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"},
+]
+pytest-forked = [
+ {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"},
+ {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"},
+]
+pytest-xdist = [
+ {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"},
+ {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"},
+]
+python-dateutil = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+python-dotenv = [
+ {file = "python-dotenv-0.20.0.tar.gz", hash = "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f"},
+ {file = "python_dotenv-0.20.0-py3-none-any.whl", hash = "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938"},
+]
+python-frontmatter = [
+ {file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"},
+ {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"},
+]
+pyyaml = [
+ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
+ {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
+ {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
+ {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
+ {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
+ {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
+ {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
+ {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
+ {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
+ {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
+ {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
+ {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
+ {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
+ {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
+ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
+ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
+]
+rapidfuzz = [
+ {file = "rapidfuzz-2.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:40b70a4277c73401bf06cfe833317c4c6616943e74497689412634721601a3c6"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd956594eedcc8ee9d10c6907f89b22e09cdd1b707a66ed77ecfdb31c707addb"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:06121384ceca291c1b5215d44cdbbf3d87db8880c3eab2297f933ca1ce5128ad"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c265ba6648f20cbb03f146bf75426b162814ac55bd466db3d87aba16a275c5b"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a061cab4f8ed57644deeaaebeddf4da8ba4a3fcd77d6106e0e91d77b8ef09c4"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a88097cb0c15f48548a38263d8ef4878ebc7581209cd5e7f34e98208fdc7fc3d"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c268c6cf8fadb50bc710da7d8194f49c221d2e1cf4da229639dd0ba7cc75bddb"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a1f64e599eb3c42371d6d0d5dd5d057991739ca510b8bd6a5c0aafce4ac0c7"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11808afdfd60b7549d31da6320f812eb8b9e7ea6785878486dfd182a53833b6e"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9e5a6c2366d732aab1d820c1f373a05460c94242f62c3eec8d69885d9c81bf3e"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:260c230ce47cbed24bf222e05ec0a5f9a28293c2b566a97f142a6465a53d3350"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:1ea250cd8417ccc7b8de62fdd12428ef2ec70f61a5233643e49f8a9b89fa1ed9"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df947d7a09efd33528a05acfb9676c039b28c964a083018a8004569564daa8ba"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-win32.whl", hash = "sha256:e925443a16e8c4481be9f7394e0e6894937462c193ebfc922669358dad26041d"},
+ {file = "rapidfuzz-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:3f8f70e19a25afe921551d27ab732237a7e824eaa7987610d766f0d810036b63"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dbe2352103bd2e860f40b5b62830b93663889260c65349bae52e493835af9d20"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26ed50072d9c2fc85b75d06dfd5da939c9b19b1240560d0860aa2f4f2d689e56"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8221952fa345e9a4e3716e73d7ea773a96b4b22d7e86a0cfc659d5597ce7025d"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30169918f58d4695efed40d0cf4752f9a8700fbc78c370f019de228d4aa2cd13"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc10f600b7cf15ff5158509287b0095b954a1f5498c5ea23f102ef06c1b0cd3a"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51606a7dd226cece687730566bcbab69db215739c0bfdc0ebf3c1f61841862bc"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:bdae4684f90cbe97a04c06fea2809842dd443ddad822da3d4b1c1ceaf8e023ec"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62204b908ebadb8056851f3ea2ba8eace16b84a8e9d65350ce7087b95ba45dbe"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:2d40f26f9782403335574d07062e0ebf64937f829b29992a2df2dc9d7aff626d"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:35ff00448a75649ab181e03c264bdf0c3fc46eae29382f6ec7a635fdcb49346c"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5302ab15055150e232bf9506ad4d33170fb81337943c49e5ad1f98907ab0e015"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-win32.whl", hash = "sha256:23c1a16e0930b74d9766b80fb6fea3d78536cccf127c7e0d693f5c9a8655460a"},
+ {file = "rapidfuzz-2.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2fc9bf751408396260fe2b6cd0e2ba5361229e22f26e3b59b08c08815830968b"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f57ab7f66b5e8f8ad66b7115b2f04316fce6b6beb19b89f151b590a53c7c7a6"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99657acaeeace8add81da35b8625783e06f251685ff2b582fc748f501a82826c"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d830db60d293ff0602ba7798e792bcbc0ec9eda6bb80bac6df2850056dc0705"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76cb19a9454dd1773a81f0c65cc33eadc8dceea892f5be86646150dbbc4fef70"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:229daf8051cac2f60c380e8385fd91d8c461201957b547867b5f9d50c0099c17"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95b28d765d53111f932f5a57925b8191783015156e386533b51fc3a4c442d828"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3fe0a4d0f545efab85232a2a67628fd371fd4430cf832f5617f7cbf09b2f0ea2"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:66e70f89e80640f1351d116806e4ce57b3581725b47ca4a7ebca193fab6033a5"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8b4618e4879924726c9dcde0da82ca20b6115dfd65fadc959732fe65019a4ffd"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b933103b3264927fc9c561709277aa6de199371560a09b5db4177bca7bc1ac82"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:31fc180736bc871718d9d992c36241818e1d280091bb57059f90275cdf398fdd"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-win32.whl", hash = "sha256:480eba6f533e53d516f0e3477a67324b2b9a2f67f57a169784b8e4a62ad788c8"},
+ {file = "rapidfuzz-2.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d649358105683c0bb4422b934a2fa9aedd25854648ba5041826a99607e274f40"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4307eeea7e37097ab4d9e22a6553a753cd6beaf7727422a92d18b156d48f7145"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08093590a70d8bd2a66fc26b29635017779a5c6380e795f6050e79d6edc78911"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2cf4ab0bd45892e9a9ed2e6d7d41d1cf571e479ff0f3358deac029086fd8c920"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81f01fce0a5fad68c6108143911abae0d765acb85b1c1379341b26590154056f"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54ce466224791171ef0ece47bdfe5fdbda459a644c0090af804a7384bba9fc96"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e67a6c2131661ef5633c6e36d77dc65e3436088f77a44e802f6410bc292af6"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:975ed43da9f3f4e98c47fbc503859438c734ff283fa485a532c81f5b844d7781"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee7a3745a0e76565de1036af4591a64489319834e3abd8d61565e1c86ae35e8"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cb086c659a913fc8a1fb0746557abaeb1d0c2ac4dc60905c46478d80898e3448"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a7aa7ab003f4ec26389521d280ce8d43c471db7ca30eda0c22a9279bed58c839"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3b96fbf1c1ff5fc8c07a77bb2ed20dcb95608963cb08cb8fe3f6265c90f7cba2"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:287a0e70ffe26275325fae08739a5034e013434e3af163e8ccadb6131b292d64"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e55fc8ade3768ea4738ee89b725a290f689cd2c3ee57f61383be04782f2bbe18"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-win32.whl", hash = "sha256:6c67598d1d074d5f81d36ca651558d252b0cc7fbd5302389a9c1206e021dcf55"},
+ {file = "rapidfuzz-2.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:699be4042f420c9469be7297c2f465aac1ece48d8ab5efcb1b071d54224b946e"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:44aa685ff81fea37487e2255205b5db5d448e7f07cb483ca0896350aa38f920a"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af2aec580734e04933533d94ba76f7c6d7b590aff7246d960e30049ef412bc0e"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3dcc7744a3b5f488b48c39d9e7a7e4e059a5c419bf2fdcc373af873fb565eb3"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44809e3200d9cd78bf5093fd149857de61be5ab7aeb633ba1bea9e84719a0e2"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e985a889e23770061b7c221170a41d65a3df53a3488bfd422f2a315ae1299fb"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f51eb2effe7c3061840821f8c9102aab58a2d46a3a4dd6a2df99e08067e6e3f"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11c40d1094aa638b18968ede4661e2562753c04ab2d8a04c6184f845c0ea8d1b"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c00cfa91551c58aa3f6646ae0f06d4a28391f3b0fc78975999c2275272872b8"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:58ef0edebb11c8f076759b0b54c34ae9a05209e2a4725bd5308db08606526f65"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:34dc7096e76ffa0eab7d4071d0020c3c7e2588ba70287156d3694a5e4a202451"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c5bba51120e00320e7e2f6866a0cd6757848b73eb11795cc4247a3cea8555f29"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f7a90ac59dda626f981f958b42158956b57e8c61c2577c91d2459a0595c9a79"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f779edfe9abf56bd10e6cbfddb156b471e63a9853802a897ec54762383a3"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-win32.whl", hash = "sha256:70339dbb6b2111d3f2f570c172ba53664b3775218ea59183e4c7f9246c2aa06d"},
+ {file = "rapidfuzz-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:abc22ee309d4c41c014b1c3f697072a5c1dff2ec72b2fa32dcdfb6adc224ce14"},
+ {file = "rapidfuzz-2.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:605a8f1a29500cd37ba80e9152565424dfc8d9a5c4de2cefca6317535464839a"},
+ {file = "rapidfuzz-2.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ecb511f6521f892fd69833765422d708a09a8f486b4d1cdf4921095a2c1ca3"},
+ {file = "rapidfuzz-2.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fd10265f4c8ff480529e615b4ad115a63ee8c346c0752118f25381ece22a253"},
+ {file = "rapidfuzz-2.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b4fa5ddb212baa5b73cf9cc124532d9fd641812afb301d04b16472844330ee9"},
+ {file = "rapidfuzz-2.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:612ef2909fa89b01eff9a549b814390a28a1a293eae78ccd9d1d1353ae6f5c41"},
+ {file = "rapidfuzz-2.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:137f0711db4908b68cbedadf1e1a60fa112e7f5c3b90bba561aad32d740cb84d"},
+ {file = "rapidfuzz-2.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d95b5550dcd195b10fee90686d853b5b86fc408e441b5d5e2f0f0d62ee6c7c9"},
+ {file = "rapidfuzz-2.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca9e9472957d8bd84f1cda25b1da6b815220c7a0127f7a1c4b34ecce6875f858"},
+ {file = "rapidfuzz-2.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:96c16c807803bbebf59e6bf54a7b970791040d5728f05c5022dcdcab3af8c885"},
+ {file = "rapidfuzz-2.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d339bb04dbb1f07fc07b8ed51211cee579a2cc72127095a04b2b4f1693efc0c"},
+ {file = "rapidfuzz-2.3.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb670143463cca6fb3346e7d61eebd53041547d91729df08f6f4827bab83e8d7"},
+ {file = "rapidfuzz-2.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b9f5c1681ec4bfcfcf2975453892346ec1ddf00a70d489c0cc67dfd8cc8e23"},
+ {file = "rapidfuzz-2.3.0.tar.gz", hash = "sha256:426d176c7d17f3ae0954d2bb30a3352cc9fa9f819daf458b5af7980e8e4dcd93"},
+]
+redis = [
+ {file = "redis-4.3.4-py3-none-any.whl", hash = "sha256:a52d5694c9eb4292770084fa8c863f79367ca19884b329ab574d5cb2036b3e54"},
+ {file = "redis-4.3.4.tar.gz", hash = "sha256:ddf27071df4adf3821c4f2ca59d67525c3a82e5f268bed97b813cb4fabf87880"},
+]
+regex = [
+ {file = "regex-2022.7.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:55911aba9bae9ad826971d2c80428425625a3dd0c00b94e9bb19361888b983a6"},
+ {file = "regex-2022.7.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1dee18c683a0603445ff9e77ffc39f1a3997f43ee07ae04ac80228fc5565fc4d"},
+ {file = "regex-2022.7.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42702dba0281bcafbcf194770ecb987d60854946071c622777e6d207b3c169bc"},
+ {file = "regex-2022.7.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff0e0c3a48c635529a1723d2fea9326da1dacdba5db20be1a4eeaf56580e3949"},
+ {file = "regex-2022.7.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5f1e598b9b823fb37f2f1baf930bb5f30ae4a3d9b67dfdc63f8f2374f336679"},
+ {file = "regex-2022.7.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e19695f7b8de8a3b7d940288abedf48dfcfc0cd8d36f360e5b1bc5e1c3f02a72"},
+ {file = "regex-2022.7.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd0b115c4fab388b1131c89518cdd98db38d88c55cedfffc71de33c92eeee9c6"},
+ {file = "regex-2022.7.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e324436b7f8bbb8e7b3c4593b01d1dce7215befc83a60569ff34a38d6c250ae"},
+ {file = "regex-2022.7.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:39ed69803697f1e1e9f1fb1e0b5a8116c55c130745ecd39485cc6255d3b9f046"},
+ {file = "regex-2022.7.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:513be18bcf5f27076990dd111f72270d33188653e772023985be92a2c5438382"},
+ {file = "regex-2022.7.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e4a72f70ad7aa3df8244da55cf21e28b6f0640a8d8e0065dfa7ec477dd2b4ea4"},
+ {file = "regex-2022.7.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3ef5a4ced251a501962d1c8797d15978dd97661721e337cbe88d8bcdb9cd0d56"},
+ {file = "regex-2022.7.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f86be4e30cf2ffcd67845251c8549d70740cd6eec77bd38d977c4c0640eefc24"},
+ {file = "regex-2022.7.25-cp310-cp310-win32.whl", hash = "sha256:4d4640ab9fd3659378eab2ee6f47c3e04b4a269bf206475652c6d8520a9301cc"},
+ {file = "regex-2022.7.25-cp310-cp310-win_amd64.whl", hash = "sha256:af3d5c74af5ae5d04d597ea61e5e9e0b84e84509e58d1e52aaefbae81cb697bb"},
+ {file = "regex-2022.7.25-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a23653a18c1d69760a2d8b6793478815cf5dc8c12f3b6e608e50aed49829f0ef"},
+ {file = "regex-2022.7.25-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccf10d7d0f25a3c5e123c97ffbab8d4b1429a3c25fbd50812010075bd5d844fd"},
+ {file = "regex-2022.7.25-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:933752abc9931cb53eccbd4ab3aedbcd0f1797c0a1b19ed385952e265636b2b6"},
+ {file = "regex-2022.7.25-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:750b5de7982e568c1bb60388dea1c3abd674d1d579b87ef1b945ba4da53eb5e2"},
+ {file = "regex-2022.7.25-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac0dd2f11a165a79e271a04226378a008c83368031c6a9294a6df9cd1c13c05"},
+ {file = "regex-2022.7.25-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48018c71ce7b2fe80c1eb16b9104d7d04d07567e9333159810a4ae5ef8cdf01f"},
+ {file = "regex-2022.7.25-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:15bc8cddffe3a9181572c6bcdf45b145691fff1b5712767e7d7a6ef5d32f424f"},
+ {file = "regex-2022.7.25-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:50dd20fd10dafd9b697f1c0629285790d86e66946caa2c6a1135f67846d9b495"},
+ {file = "regex-2022.7.25-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:438b36fbf9446b94325eaeeb1336e2291cd81daeef91b9c728c0946ffbc42ba4"},
+ {file = "regex-2022.7.25-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:7378a6fba8a043b3c5fb8cf915044c814ebb2463b0a7137ec09ae0b1b10f5484"},
+ {file = "regex-2022.7.25-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:609a97626bf310e8cd7c79173e6ed8acab7f01ed4519b7936e998b54b3eb8d31"},
+ {file = "regex-2022.7.25-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9b8d411a547b47852020242f9c384da35d4c65ccf159ae55a3ba0e50b6220932"},
+ {file = "regex-2022.7.25-cp36-cp36m-win32.whl", hash = "sha256:fbbf9858a3043f632c9da2a82e4ce895016dfb401f59ab110900121121ee73b7"},
+ {file = "regex-2022.7.25-cp36-cp36m-win_amd64.whl", hash = "sha256:1903a2a6c4463488452e953a49f7e6663cfea9ff5e75b09333cbcc840e727a5b"},
+ {file = "regex-2022.7.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76696de39cbbbf976aa85cbd7b1f3ea2d98b3bc9889f6739fdb6cda85a7f05aa"},
+ {file = "regex-2022.7.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c12e5c14eeb5e484c688f2db57ca4a8182d09b40ab69f73147dc32bcdf849d"},
+ {file = "regex-2022.7.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc0c5b350036ce49a8fd6015a29e4621de725fa99d9e985d3d76b820d44e5a9"},
+ {file = "regex-2022.7.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c942696b541ce6be4e3cc2c963b48671277b38ebd4a28af803b511b2885759b7"},
+ {file = "regex-2022.7.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddd2ef742f05a18fde1d1c74df12fa6f426945cfb6fefba3fa1c5380e2dd2bf"},
+ {file = "regex-2022.7.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1b83baa19355c8dd0ec23e725f18450be01bc464ba1f1865cfada03594fa629"},
+ {file = "regex-2022.7.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3ef700d411b900fcff91f1ef16771bf085a9f9a376d16d8a643e8a20ff6dcb7b"},
+ {file = "regex-2022.7.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b24133df3d3c57a901f6a63ba3783d6eed1d0561ed1cafd027f0789e76a10615"},
+ {file = "regex-2022.7.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1228f5a6be5b45ce7b66a69a77682632f0ce64cea1d7da505f33972e01f1f3fe"},
+ {file = "regex-2022.7.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:9eec276e6419de4f93824f9373b28a2a8eaed04f28514000cc6a41b64703d804"},
+ {file = "regex-2022.7.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ab950bbafafe9bf2e0a75b9f17291500fa7794f398834f1f4a71c18dddece130"},
+ {file = "regex-2022.7.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a60840ebd37fe0152b5be50b56e8a958e1430837475311986f867dabad1c7474"},
+ {file = "regex-2022.7.25-cp37-cp37m-win32.whl", hash = "sha256:a0c38edcc78556625cbadf48eb87decd5d3c5e82fc4810dd22c19a5498d2329d"},
+ {file = "regex-2022.7.25-cp37-cp37m-win_amd64.whl", hash = "sha256:f755fba215ddafa26211e33ac91b48dcebf84ff28590790e5b7711b46fa4095d"},
+ {file = "regex-2022.7.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8d928237cf78cfe3b46b608f87e255c45a1e11d04e7dd2c49cb60200cbd6f987"},
+ {file = "regex-2022.7.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ea9f01224c25101c5f2c6dceebd29d1431525637d596241935640e4de0fbb822"},
+ {file = "regex-2022.7.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d2a85a4a134011eb517f2a752f4e488b0a4f6b6ad00ef247f9fac57f9ff4f0"},
+ {file = "regex-2022.7.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9163ef45bfebc39838848330cb94f79b563f738c60fc0a20a7f0a30f13ec1573"},
+ {file = "regex-2022.7.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0798f6b97c3f8139c95af7b128a60909f5305b2e431a012083063298b2481e5d"},
+ {file = "regex-2022.7.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cdd06061426378a83e8a5bdec9cc71b964c35e329f68fb7058d08791780c83"},
+ {file = "regex-2022.7.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f898bf0a9613cc8b7f7af6fdcd80cc8e7659787908834c63391f22271fdb1c14"},
+ {file = "regex-2022.7.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b131c7c94da56f8f1c59b4540c37c20973119608ec8cf42b3ebb40a94f3afc2c"},
+ {file = "regex-2022.7.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a2afa24d06301f4ffcb00244d30df1c12e65cabf30dcb0ba8b871d6b0c54d19e"},
+ {file = "regex-2022.7.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3ce546e54cfafa9dee60b11b7f99b87058d81ab62bd05e366fc5bf6b2c1383a"},
+ {file = "regex-2022.7.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f7329e66c6bd9950eb428f225db3982e5f54e53d3d95951da424dce9aa621eae"},
+ {file = "regex-2022.7.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ae6cd6ce16681d345592d74a0a92b25a9530d4055be460af425e654d891cdee4"},
+ {file = "regex-2022.7.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fddd7ddd520661085ffd91f1db74b18e4cf5ed9b6e939aa7d31ca1ea67bc7621"},
+ {file = "regex-2022.7.25-cp38-cp38-win32.whl", hash = "sha256:f049a9fdacdbc4e84afcec7a3b14a8309699a7347c95a525d49c4b9a9c353cee"},
+ {file = "regex-2022.7.25-cp38-cp38-win_amd64.whl", hash = "sha256:50497f3d8a1e8d8055c6da1768c98f5b618039e572aacdcccd642704db6077eb"},
+ {file = "regex-2022.7.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:89f4c531409ef01aa12b7c15bb489415e219c186725d44bc12a8f279afde3fe2"},
+ {file = "regex-2022.7.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:535a2392a0f11f7df80f43e63a5b69c51bb29a10a690e4ae5ad721b9fe50684d"},
+ {file = "regex-2022.7.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f3de4baf25e960a3048a6ecd0246cedcdfeb462a741d55e9a42e91add5a4a99"},
+ {file = "regex-2022.7.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2c8f542c5afd36e60237dbbabc95722135047d4c2844b9c4bff74c7177a50a1"},
+ {file = "regex-2022.7.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc49d9c6289df4c7895c85094872ef98ce7f609ba0ecbeb77acdd7f8362cda7d"},
+ {file = "regex-2022.7.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:730cc311757153d59bf2bcf06d4026e3c998c1919c06557ad0e382235049b376"},
+ {file = "regex-2022.7.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14882770017436aabe4cfa2651a9777f9faa2625bc0f6cdaec362697a8a964c3"},
+ {file = "regex-2022.7.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1991348464df42a6bc04601e1241dfa4a9ec4d599338dc64760f2c299e1cb996"},
+ {file = "regex-2022.7.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03d7ff80e3a276ef460baaa745d425162c19d8ea093d60ecf47f52ffee37aea5"},
+ {file = "regex-2022.7.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ed42feff196aaf262db1878d5ac553a3bcef147caf1362e7095f1115b71ae0e1"},
+ {file = "regex-2022.7.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4433690ff474fd95a3058085aed5fe12ac4e09d4f4b2b983de35e3a6c899afa0"},
+ {file = "regex-2022.7.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:454c2c81d34eb4e1d015acbca0488789c17fc84188e336365eaa31a16c964c04"},
+ {file = "regex-2022.7.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a06d6ada6bef79aaa550ef37c7d529da60b81c02838d9dd9c5ab788becfc57d4"},
+ {file = "regex-2022.7.25-cp39-cp39-win32.whl", hash = "sha256:cc018ce0f1b62df155a5b9c9a81464040a87e97fd9bd05e0febe92568c63e678"},
+ {file = "regex-2022.7.25-cp39-cp39-win_amd64.whl", hash = "sha256:26d6e9a6431626c20821d0165a4c4508acb20a57e4c04ee77c96f01b7fe4c09c"},
+ {file = "regex-2022.7.25.tar.gz", hash = "sha256:bd0883e86964cd61360ffc36dbebbc49b928e92a306f886eab02c11dfde5b7aa"},
+]
requests = []
-requests-file = []
-sentry-sdk = []
-sgmllib3k = []
-six = []
-snowballstemmer = []
-sortedcontainers = []
-soupsieve = []
-statsd = []
-taskipy = []
-tldextract = []
-toml = []
-tomli = []
-urllib3 = []
-virtualenv = []
+requests-file = [
+ {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"},
+ {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"},
+]
+sentry-sdk = [
+ {file = "sentry-sdk-1.8.0.tar.gz", hash = "sha256:9c68e82f7b1ad78aee6cdef57c2c0f6781ddd9ffa8848f4503c5a8e02b360eea"},
+ {file = "sentry_sdk-1.8.0-py2.py3-none-any.whl", hash = "sha256:5daae00f91dd72d9bb1a65307221fe291417a7b9c30527de3a6f0d9be4ddf08d"},
+]
+sgmllib3k = [
+ {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
+]
+six = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+snowballstemmer = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+sortedcontainers = [
+ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
+ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
+]
+soupsieve = [
+ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"},
+ {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"},
+]
+statsd = [
+ {file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"},
+ {file = "statsd-3.3.0.tar.gz", hash = "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f"},
+]
+taskipy = [
+ {file = "taskipy-1.10.2-py3-none-any.whl", hash = "sha256:58d5382d90d5dd94ca8c612855377e5a98b9cb669c208ebb55d6a45946de3f9b"},
+ {file = "taskipy-1.10.2.tar.gz", hash = "sha256:eae4feb74909da3ad0ca0275802e1c2f56048612529bd763feb922d284d8a253"},
+]
+tldextract = [
+ {file = "tldextract-3.3.1-py3-none-any.whl", hash = "sha256:35a0260570e214d8d3cfeeb403992fe9e2b686925f63c9b03c5933408ac2aa5a"},
+ {file = "tldextract-3.3.1.tar.gz", hash = "sha256:fe15ac3205e5a25b61689369f98cb45c7778a8f2af113d7c11559ece5195f2d6"},
+]
+toml = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
+tomli = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+typing-extensions = []
+urllib3 = [
+ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
+ {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
+]
+virtualenv = [
+ {file = "virtualenv-20.16.5-py3-none-any.whl", hash = "sha256:d07dfc5df5e4e0dbc92862350ad87a36ed505b978f6c39609dc489eadd5b0d27"},
+ {file = "virtualenv-20.16.5.tar.gz", hash = "sha256:227ea1b9994fdc5ea31977ba3383ef296d7472ea85be9d6732e42a91c04e80da"},
+]
wrapt = []
-yarl = []
+yarl = [
+ {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"},
+ {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"},
+ {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"},
+ {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"},
+ {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"},
+ {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"},
+ {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"},
+ {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"},
+ {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"},
+ {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"},
+ {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"},
+ {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"},
+ {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"},
+ {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"},
+ {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"},
+ {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"},
+ {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"},
+ {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"},
+ {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"},
+ {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"},
+ {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"},
+ {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"},
+ {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"},
+ {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"},
+ {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"},
+ {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"},
+ {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"},
+ {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"},
+ {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"},
+ {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"},
+ {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"},
+ {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"},
+ {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"},
+ {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"},
+ {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"},
+ {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"},
+ {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"},
+ {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"},
+ {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"},
+ {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"},
+ {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"},
+ {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"},
+ {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"},
+ {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"},
+ {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"},
+ {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"},
+ {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"},
+ {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"},
+ {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"},
+ {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"},
+ {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"},
+ {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"},
+ {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"},
+ {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"},
+ {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"},
+ {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"},
+ {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"},
+ {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"},
+ {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"},
+]
diff --git a/pyproject.toml b/pyproject.toml
index 36c3b5392..66fdc3a0c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,8 +27,8 @@ lxml = "4.9.1"
# Must be kept on this version unless doc command output is fixed
# See https://github.com/python-discord/bot/pull/2156
markdownify = "0.6.1"
-
more_itertools = "8.13.0"
+pydantic = "1.10.2"
python-dateutil = "2.8.2"
python-frontmatter = "1.0.0"
pyyaml = "6.0"
diff --git a/tests/bot/exts/filtering/__init__.py b/tests/bot/exts/filtering/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/bot/exts/filtering/__init__.py
diff --git a/tests/bot/exts/filtering/test_filters.py b/tests/bot/exts/filtering/test_filters.py
new file mode 100644
index 000000000..214637b52
--- /dev/null
+++ b/tests/bot/exts/filtering/test_filters.py
@@ -0,0 +1,41 @@
+import unittest
+
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._filters.token import TokenFilter
+from tests.helpers import MockMember, MockMessage, MockTextChannel
+
+
+class FilterTests(unittest.TestCase):
+ """Test functionality of the token filter."""
+
+ def setUp(self) -> None:
+ member = MockMember(id=123)
+ channel = MockTextChannel(id=345)
+ message = MockMessage(author=member, channel=channel)
+ self.ctx = FilterContext(Event.MESSAGE, member, channel, "", message)
+
+ def test_token_filter_triggers(self):
+ """The filter should evaluate to True only if its token is found in the context content."""
+ test_cases = (
+ (r"hi", "oh hi there", True),
+ (r"hi", "goodbye", False),
+ (r"bla\d{2,4}", "bla18", True),
+ (r"bla\d{2,4}", "bla1", False)
+ )
+
+ for pattern, content, expected in test_cases:
+ with self.subTest(
+ pattern=pattern,
+ content=content,
+ expected=expected,
+ ):
+ filter_ = TokenFilter({
+ "id": 1,
+ "content": pattern,
+ "description": None,
+ "settings": {},
+ "additional_field": "{}" # noqa: P103
+ })
+ self.ctx.content = content
+ result = filter_.triggered_on(self.ctx)
+ self.assertEqual(result, expected)
diff --git a/tests/bot/exts/filtering/test_settings.py b/tests/bot/exts/filtering/test_settings.py
new file mode 100644
index 000000000..ac21a5d47
--- /dev/null
+++ b/tests/bot/exts/filtering/test_settings.py
@@ -0,0 +1,20 @@
+import unittest
+
+import bot.exts.filtering._settings
+from bot.exts.filtering._settings import create_settings
+
+
+class FilterTests(unittest.TestCase):
+ """Test functionality of the Settings class and its subclasses."""
+
+ def test_create_settings_returns_none_for_empty_data(self):
+ """`create_settings` should return a tuple of two Nones when passed an empty dict."""
+ result = create_settings({})
+
+ self.assertEquals(result, (None, None))
+
+ def test_unrecognized_entry_makes_a_warning(self):
+ """When an unrecognized entry name is passed to `create_settings`, it should be added to `_already_warned`."""
+ create_settings({"abcd": {}})
+
+ self.assertIn("abcd", bot.exts.filtering._settings._already_warned)
diff --git a/tests/bot/exts/filtering/test_settings_entries.py b/tests/bot/exts/filtering/test_settings_entries.py
new file mode 100644
index 000000000..d18861bd6
--- /dev/null
+++ b/tests/bot/exts/filtering/test_settings_entries.py
@@ -0,0 +1,272 @@
+import unittest
+
+from bot.exts.filtering._filter_context import Event, FilterContext
+from bot.exts.filtering._settings_types.bypass_roles import RoleBypass
+from bot.exts.filtering._settings_types.channel_scope import ChannelScope
+from bot.exts.filtering._settings_types.filter_dm import FilterDM
+from bot.exts.filtering._settings_types.infraction_and_notification import (
+ Infraction, InfractionAndNotification, superstar
+)
+from tests.helpers import MockCategoryChannel, MockDMChannel, MockMember, MockMessage, MockRole, MockTextChannel
+
+
+class FilterTests(unittest.TestCase):
+ """Test functionality of the Settings class and its subclasses."""
+
+ def setUp(self) -> None:
+ member = MockMember(id=123)
+ channel = MockTextChannel(id=345)
+ message = MockMessage(author=member, channel=channel)
+ self.ctx = FilterContext(Event.MESSAGE, member, channel, "", message)
+
+ def test_role_bypass_is_off_for_user_without_roles(self):
+ """The role bypass should trigger when a user has no roles."""
+ member = MockMember()
+ self.ctx.author = member
+ bypass_entry = RoleBypass(["123"])
+
+ result = bypass_entry.triggers_on(self.ctx)
+
+ self.assertTrue(result)
+
+ def test_role_bypass_is_on_for_a_user_with_the_right_role(self):
+ """The role bypass should not trigger when the user has one of its roles."""
+ cases = (
+ ([123], ["123"]),
+ ([123, 234], ["123"]),
+ ([123], ["123", "234"]),
+ ([123, 234], ["123", "234"])
+ )
+
+ for user_role_ids, bypasses in cases:
+ with self.subTest(user_role_ids=user_role_ids, bypasses=bypasses):
+ user_roles = [MockRole(id=role_id) for role_id in user_role_ids]
+ member = MockMember(roles=user_roles)
+ self.ctx.author = member
+ bypass_entry = RoleBypass(bypasses)
+
+ result = bypass_entry.triggers_on(self.ctx)
+
+ self.assertFalse(result)
+
+ def test_context_doesnt_trigger_for_empty_channel_scope(self):
+ """A filter is enabled for all channels by default."""
+ channel = MockTextChannel()
+ scope = ChannelScope({"disabled_channels": None, "disabled_categories": None, "enabled_channels": None})
+ self.ctx.channel = channel
+
+ result = scope.triggers_on(self.ctx)
+
+ self.assertTrue(result)
+
+ def test_context_doesnt_trigger_for_disabled_channel(self):
+ """A filter shouldn't trigger if it's been disabled in the channel."""
+ channel = MockTextChannel(id=123)
+ scope = ChannelScope({"disabled_channels": ["123"], "disabled_categories": None, "enabled_channels": None})
+ self.ctx.channel = channel
+
+ result = scope.triggers_on(self.ctx)
+
+ self.assertFalse(result)
+
+ def test_context_doesnt_trigger_in_disabled_category(self):
+ """A filter shouldn't trigger if it's been disabled in the category."""
+ channel = MockTextChannel(category=MockCategoryChannel(id=456))
+ scope = ChannelScope({
+ "disabled_channels": None, "disabled_categories": ["456"], "enabled_channels": None
+ })
+ self.ctx.channel = channel
+
+ result = scope.triggers_on(self.ctx)
+
+ self.assertFalse(result)
+
+ def test_context_triggers_in_enabled_channel_in_disabled_category(self):
+ """A filter should trigger in an enabled channel even if it's been disabled in the category."""
+ channel = MockTextChannel(id=123, category=MockCategoryChannel(id=234))
+ scope = ChannelScope({"disabled_channels": None, "disabled_categories": ["234"], "enabled_channels": ["123"]})
+ self.ctx.channel = channel
+
+ result = scope.triggers_on(self.ctx)
+
+ self.assertTrue(result)
+
+ def test_filtering_dms_when_necessary(self):
+ """A filter correctly ignores or triggers in a channel depending on the value of FilterDM."""
+ cases = (
+ (True, MockDMChannel(), True),
+ (False, MockDMChannel(), False),
+ (True, MockTextChannel(), True),
+ (False, MockTextChannel(), True)
+ )
+
+ for apply_in_dms, channel, expected in cases:
+ with self.subTest(apply_in_dms=apply_in_dms, channel=channel):
+ filter_dms = FilterDM(apply_in_dms)
+ self.ctx.channel = channel
+
+ result = filter_dms.triggers_on(self.ctx)
+
+ self.assertEqual(expected, result)
+
+ def test_infraction_merge_of_same_infraction_type(self):
+ """When both infractions are of the same type, the one with the longer duration wins."""
+ infraction1 = InfractionAndNotification({
+ "infraction_type": "mute",
+ "infraction_reason": "hi",
+ "infraction_duration": 10,
+ "dm_content": "how",
+ "dm_embed": "what is"
+ })
+ infraction2 = InfractionAndNotification({
+ "infraction_type": "mute",
+ "infraction_reason": "there",
+ "infraction_duration": 20,
+ "dm_content": "are you",
+ "dm_embed": "your name"
+ })
+
+ result = infraction1 | infraction2
+
+ self.assertDictEqual(
+ result.to_dict(),
+ {
+ "infraction_type": Infraction.MUTE,
+ "infraction_reason": "there",
+ "infraction_duration": 20.0,
+ "dm_content": "are you",
+ "dm_embed": "your name",
+ "_superstar": None
+ }
+ )
+
+ def test_infraction_merge_of_different_infraction_types(self):
+ """If there are two different infraction types, the one higher up the hierarchy should be picked."""
+ infraction1 = InfractionAndNotification({
+ "infraction_type": "mute",
+ "infraction_reason": "hi",
+ "infraction_duration": 20,
+ "dm_content": "",
+ "dm_embed": ""
+ })
+ infraction2 = InfractionAndNotification({
+ "infraction_type": "ban",
+ "infraction_reason": "",
+ "infraction_duration": 10,
+ "dm_content": "there",
+ "dm_embed": ""
+ })
+
+ result = infraction1 | infraction2
+
+ self.assertDictEqual(
+ result.to_dict(),
+ {
+ "infraction_type": Infraction.BAN,
+ "infraction_reason": "",
+ "infraction_duration": 10.0,
+ "dm_content": "there",
+ "dm_embed": "",
+ "_superstar": None
+ }
+ )
+
+ def test_infraction_merge_with_a_superstar(self):
+ """If there is a superstar infraction, it should be added to a separate field."""
+ infraction1 = InfractionAndNotification({
+ "infraction_type": "mute",
+ "infraction_reason": "hi",
+ "infraction_duration": 20,
+ "dm_content": "there",
+ "dm_embed": ""
+ })
+ infraction2 = InfractionAndNotification({
+ "infraction_type": "superstar",
+ "infraction_reason": "hello",
+ "infraction_duration": 10,
+ "dm_content": "you",
+ "dm_embed": ""
+ })
+
+ result = infraction1 | infraction2
+
+ self.assertDictEqual(
+ result.to_dict(),
+ {
+ "infraction_type": Infraction.MUTE,
+ "infraction_reason": "hi",
+ "infraction_duration": 20.0,
+ "dm_content": "there",
+ "dm_embed": "",
+ "_superstar": superstar("hello", 10.0)
+ }
+ )
+
+ def test_merge_two_superstar_infractions(self):
+ """When two superstar infractions are merged, the infraction type remains a superstar."""
+ infraction1 = InfractionAndNotification({
+ "infraction_type": "superstar",
+ "infraction_reason": "hi",
+ "infraction_duration": 20,
+ "dm_content": "",
+ "dm_embed": ""
+ })
+ infraction2 = InfractionAndNotification({
+ "infraction_type": "superstar",
+ "infraction_reason": "",
+ "infraction_duration": 10,
+ "dm_content": "there",
+ "dm_embed": ""
+ })
+
+ result = infraction1 | infraction2
+
+ self.assertDictEqual(
+ result.to_dict(),
+ {
+ "infraction_type": Infraction.SUPERSTAR,
+ "infraction_reason": "hi",
+ "infraction_duration": 20.0,
+ "dm_content": "",
+ "dm_embed": "",
+ "_superstar": None
+ }
+ )
+
+ def test_merge_a_voiceban_and_a_superstar_with_another_superstar(self):
+ """An infraction with a superstar merged with a superstar should combine under `_superstar`."""
+ infraction1 = InfractionAndNotification({
+ "infraction_type": "voice ban",
+ "infraction_reason": "hi",
+ "infraction_duration": 20,
+ "dm_content": "hello",
+ "dm_embed": ""
+ })
+ infraction2 = InfractionAndNotification({
+ "infraction_type": "superstar",
+ "infraction_reason": "bla",
+ "infraction_duration": 10,
+ "dm_content": "there",
+ "dm_embed": ""
+ })
+ infraction3 = InfractionAndNotification({
+ "infraction_type": "superstar",
+ "infraction_reason": "blabla",
+ "infraction_duration": 20,
+ "dm_content": "there",
+ "dm_embed": ""
+ })
+
+ result = infraction1 | infraction2 | infraction3
+
+ self.assertDictEqual(
+ result.to_dict(),
+ {
+ "infraction_type": Infraction.VOICE_BAN,
+ "infraction_reason": "hi",
+ "infraction_duration": 20,
+ "dm_content": "hello",
+ "dm_embed": "",
+ "_superstar": superstar("blabla", 20)
+ }
+ )
diff --git a/tests/bot/exts/filters/test_antimalware.py b/tests/bot/exts/filters/test_antimalware.py
deleted file mode 100644
index 7282334e2..000000000
--- a/tests/bot/exts/filters/test_antimalware.py
+++ /dev/null
@@ -1,202 +0,0 @@
-import unittest
-from unittest.mock import AsyncMock, Mock
-
-from discord import NotFound
-
-from bot.constants import Channels, STAFF_ROLES
-from bot.exts.filters import antimalware
-from tests.helpers import MockAttachment, MockBot, MockMessage, MockRole
-
-
-class AntiMalwareCogTests(unittest.IsolatedAsyncioTestCase):
- """Test the AntiMalware cog."""
-
- def setUp(self):
- """Sets up fresh objects for each test."""
- self.bot = MockBot()
- self.bot.filter_list_cache = {
- "FILE_FORMAT.True": {
- ".first": {},
- ".second": {},
- ".third": {},
- }
- }
- self.cog = antimalware.AntiMalware(self.bot)
- self.message = MockMessage()
- self.message.webhook_id = None
- self.message.author.bot = None
- self.whitelist = [".first", ".second", ".third"]
-
- async def test_message_with_allowed_attachment(self):
- """Messages with allowed extensions should not be deleted"""
- attachment = MockAttachment(filename="python.first")
- self.message.attachments = [attachment]
-
- await self.cog.on_message(self.message)
- self.message.delete.assert_not_called()
-
- async def test_message_without_attachment(self):
- """Messages without attachments should result in no action."""
- await self.cog.on_message(self.message)
- self.message.delete.assert_not_called()
-
- async def test_direct_message_with_attachment(self):
- """Direct messages should have no action taken."""
- attachment = MockAttachment(filename="python.disallowed")
- self.message.attachments = [attachment]
- self.message.guild = None
-
- await self.cog.on_message(self.message)
-
- self.message.delete.assert_not_called()
-
- async def test_webhook_message_with_illegal_extension(self):
- """A webhook message containing an illegal extension should be ignored."""
- attachment = MockAttachment(filename="python.disallowed")
- self.message.webhook_id = 697140105563078727
- self.message.attachments = [attachment]
-
- await self.cog.on_message(self.message)
-
- self.message.delete.assert_not_called()
-
- async def test_bot_message_with_illegal_extension(self):
- """A bot message containing an illegal extension should be ignored."""
- attachment = MockAttachment(filename="python.disallowed")
- self.message.author.bot = 409107086526644234
- self.message.attachments = [attachment]
-
- await self.cog.on_message(self.message)
-
- self.message.delete.assert_not_called()
-
- async def test_message_with_illegal_extension_gets_deleted(self):
- """A message containing an illegal extension should send an embed."""
- attachment = MockAttachment(filename="python.disallowed")
- self.message.attachments = [attachment]
-
- await self.cog.on_message(self.message)
-
- self.message.delete.assert_called_once()
-
- async def test_message_send_by_staff(self):
- """A message send by a member of staff should be ignored."""
- staff_role = MockRole(id=STAFF_ROLES[0])
- self.message.author.roles.append(staff_role)
- attachment = MockAttachment(filename="python.disallowed")
- self.message.attachments = [attachment]
-
- await self.cog.on_message(self.message)
-
- self.message.delete.assert_not_called()
-
- async def test_python_file_redirect_embed_description(self):
- """A message containing a .py file should result in an embed redirecting the user to our paste site"""
- attachment = MockAttachment(filename="python.py")
- self.message.attachments = [attachment]
- self.message.channel.send = AsyncMock()
-
- await self.cog.on_message(self.message)
- self.message.channel.send.assert_called_once()
- args, kwargs = self.message.channel.send.call_args
- embed = kwargs.pop("embed")
-
- self.assertEqual(embed.description, antimalware.PY_EMBED_DESCRIPTION)
-
- async def test_txt_file_redirect_embed_description(self):
- """A message containing a .txt/.json/.csv file should result in the correct embed."""
- test_values = (
- ("text", ".txt"),
- ("json", ".json"),
- ("csv", ".csv"),
- )
-
- for file_name, disallowed_extension in test_values:
- with self.subTest(file_name=file_name, disallowed_extension=disallowed_extension):
-
- attachment = MockAttachment(filename=f"{file_name}{disallowed_extension}")
- self.message.attachments = [attachment]
- self.message.channel.send = AsyncMock()
- antimalware.TXT_EMBED_DESCRIPTION = Mock()
- antimalware.TXT_EMBED_DESCRIPTION.format.return_value = "test"
-
- await self.cog.on_message(self.message)
- self.message.channel.send.assert_called_once()
- args, kwargs = self.message.channel.send.call_args
- embed = kwargs.pop("embed")
- cmd_channel = self.bot.get_channel(Channels.bot_commands)
-
- self.assertEqual(
- embed.description,
- antimalware.TXT_EMBED_DESCRIPTION.format.return_value
- )
- antimalware.TXT_EMBED_DESCRIPTION.format.assert_called_with(
- blocked_extension=disallowed_extension,
- cmd_channel_mention=cmd_channel.mention
- )
-
- async def test_other_disallowed_extension_embed_description(self):
- """Test the description for a non .py/.txt/.json/.csv disallowed extension."""
- attachment = MockAttachment(filename="python.disallowed")
- self.message.attachments = [attachment]
- self.message.channel.send = AsyncMock()
- antimalware.DISALLOWED_EMBED_DESCRIPTION = Mock()
- antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value = "test"
-
- await self.cog.on_message(self.message)
- self.message.channel.send.assert_called_once()
- args, kwargs = self.message.channel.send.call_args
- embed = kwargs.pop("embed")
- meta_channel = self.bot.get_channel(Channels.meta)
-
- self.assertEqual(embed.description, antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value)
- antimalware.DISALLOWED_EMBED_DESCRIPTION.format.assert_called_with(
- joined_whitelist=", ".join(self.whitelist),
- blocked_extensions_str=".disallowed",
- meta_channel_mention=meta_channel.mention
- )
-
- async def test_removing_deleted_message_logs(self):
- """Removing an already deleted message logs the correct message"""
- attachment = MockAttachment(filename="python.disallowed")
- self.message.attachments = [attachment]
- self.message.delete = AsyncMock(side_effect=NotFound(response=Mock(status=""), message=""))
-
- with self.assertLogs(logger=antimalware.log, level="INFO"):
- await self.cog.on_message(self.message)
- self.message.delete.assert_called_once()
-
- async def test_message_with_illegal_attachment_logs(self):
- """Deleting a message with an illegal attachment should result in a log."""
- attachment = MockAttachment(filename="python.disallowed")
- self.message.attachments = [attachment]
-
- with self.assertLogs(logger=antimalware.log, level="INFO"):
- await self.cog.on_message(self.message)
-
- async def test_get_disallowed_extensions(self):
- """The return value should include all non-whitelisted extensions."""
- test_values = (
- ([], []),
- (self.whitelist, []),
- ([".first"], []),
- ([".first", ".disallowed"], [".disallowed"]),
- ([".disallowed"], [".disallowed"]),
- ([".disallowed", ".illegal"], [".disallowed", ".illegal"]),
- )
-
- for extensions, expected_disallowed_extensions in test_values:
- with self.subTest(extensions=extensions, expected_disallowed_extensions=expected_disallowed_extensions):
- self.message.attachments = [MockAttachment(filename=f"filename{extension}") for extension in extensions]
- disallowed_extensions = self.cog._get_disallowed_extensions(self.message)
- self.assertCountEqual(disallowed_extensions, expected_disallowed_extensions)
-
-
-class AntiMalwareSetupTests(unittest.IsolatedAsyncioTestCase):
- """Tests setup of the `AntiMalware` cog."""
-
- async def test_setup(self):
- """Setup of the extension should call add_cog."""
- bot = MockBot()
- await antimalware.setup(bot)
- bot.add_cog.assert_awaited_once()
diff --git a/tests/bot/exts/filters/test_antispam.py b/tests/bot/exts/filters/test_antispam.py
deleted file mode 100644
index 6a0e4fded..000000000
--- a/tests/bot/exts/filters/test_antispam.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import unittest
-
-from bot.exts.filters import antispam
-
-
-class AntispamConfigurationValidationTests(unittest.TestCase):
- """Tests validation of the antispam cog configuration."""
-
- def test_default_antispam_config_is_valid(self):
- """The default antispam configuration is valid."""
- validation_errors = antispam.validate_config()
- self.assertEqual(validation_errors, {})
-
- def test_unknown_rule_returns_error(self):
- """Configuring an unknown rule returns an error."""
- self.assertEqual(
- antispam.validate_config({'invalid-rule': {}}),
- {'invalid-rule': "`invalid-rule` is not recognized as an antispam rule."}
- )
-
- def test_missing_keys_returns_error(self):
- """Not configuring required keys returns an error."""
- keys = (('interval', 'max'), ('max', 'interval'))
- for configured_key, unconfigured_key in keys:
- with self.subTest(
- configured_key=configured_key,
- unconfigured_key=unconfigured_key
- ):
- config = {'burst': {configured_key: 10}}
- error = f"Key `{unconfigured_key}` is required but not set for rule `burst`"
-
- self.assertEqual(
- antispam.validate_config(config),
- {'burst': error}
- )
diff --git a/tests/bot/exts/filters/test_filtering.py b/tests/bot/exts/filters/test_filtering.py
deleted file mode 100644
index bd26532f1..000000000
--- a/tests/bot/exts/filters/test_filtering.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import unittest
-from unittest.mock import patch
-
-from bot.exts.filters import filtering
-from tests.helpers import MockBot, autospec
-
-
-class FilteringCogTests(unittest.IsolatedAsyncioTestCase):
- """Tests the `Filtering` cog."""
-
- def setUp(self):
- """Instantiate the bot and cog."""
- self.bot = MockBot()
- with patch("botcore.utils.scheduling.create_task", new=lambda task, **_: task.close()):
- self.cog = filtering.Filtering(self.bot)
-
- @autospec(filtering.Filtering, "_get_filterlist_items", pass_mocks=False, return_value=["TOKEN"])
- async def test_token_filter(self):
- """Ensure that a filter token is correctly detected in a message."""
- messages = {
- "": False,
- "no matches": False,
- "TOKEN": True,
-
- # See advisory https://github.com/python-discord/bot/security/advisories/GHSA-j8c3-8x46-8pp6
- "https://google.com TOKEN": True,
- "https://google.com something else": False,
- }
-
- for message, match in messages.items():
- with self.subTest(input=message, match=match):
- result, _ = await self.cog._has_watch_regex_match(message)
-
- self.assertEqual(
- match,
- bool(result),
- msg=f"Hit was {'expected' if match else 'not expected'} for this input."
- )
- if result:
- self.assertEqual("TOKEN", result.group())
diff --git a/tests/bot/exts/filters/test_security.py b/tests/bot/exts/filters/test_security.py
deleted file mode 100644
index 007b7b1eb..000000000
--- a/tests/bot/exts/filters/test_security.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import unittest
-
-from discord.ext.commands import NoPrivateMessage
-
-from bot.exts.filters import security
-from tests.helpers import MockBot, MockContext
-
-
-class SecurityCogTests(unittest.TestCase):
- """Tests the `Security` cog."""
-
- def setUp(self):
- """Attach an instance of the cog to the class for tests."""
- self.bot = MockBot()
- self.cog = security.Security(self.bot)
- self.ctx = MockContext()
-
- def test_check_additions(self):
- """The cog should add its checks after initialization."""
- self.bot.check.assert_any_call(self.cog.check_on_guild)
- self.bot.check.assert_any_call(self.cog.check_not_bot)
-
- def test_check_not_bot_returns_false_for_humans(self):
- """The bot check should return `True` when invoked with human authors."""
- self.ctx.author.bot = False
- self.assertTrue(self.cog.check_not_bot(self.ctx))
-
- def test_check_not_bot_returns_true_for_robots(self):
- """The bot check should return `False` when invoked with robotic authors."""
- self.ctx.author.bot = True
- self.assertFalse(self.cog.check_not_bot(self.ctx))
-
- def test_check_on_guild_raises_when_outside_of_guild(self):
- """When invoked outside of a guild, `check_on_guild` should cause an error."""
- self.ctx.guild = None
-
- with self.assertRaises(NoPrivateMessage, msg="This command cannot be used in private messages."):
- self.cog.check_on_guild(self.ctx)
-
- def test_check_on_guild_returns_true_inside_of_guild(self):
- """When invoked inside of a guild, `check_on_guild` should return `True`."""
- self.ctx.guild = "lemon's lemonade stand"
- self.assertTrue(self.cog.check_on_guild(self.ctx))
-
-
-class SecurityCogLoadTests(unittest.IsolatedAsyncioTestCase):
- """Tests loading the `Security` cog."""
-
- async def test_security_cog_load(self):
- """Setup of the extension should call add_cog."""
- bot = MockBot()
- await security.setup(bot)
- bot.add_cog.assert_awaited_once()
diff --git a/tests/bot/exts/filters/test_token_remover.py b/tests/bot/exts/filters/test_token_remover.py
deleted file mode 100644
index c1f3762ac..000000000
--- a/tests/bot/exts/filters/test_token_remover.py
+++ /dev/null
@@ -1,409 +0,0 @@
-import unittest
-from re import Match
-from unittest import mock
-from unittest.mock import MagicMock
-
-from discord import Colour, NotFound
-
-from bot import constants
-from bot.exts.filters import token_remover
-from bot.exts.filters.token_remover import Token, TokenRemover
-from bot.exts.moderation.modlog import ModLog
-from bot.utils.messages import format_user
-from tests.helpers import MockBot, MockMessage, autospec
-
-
-class TokenRemoverTests(unittest.IsolatedAsyncioTestCase):
- """Tests the `TokenRemover` cog."""
-
- def setUp(self):
- """Adds the cog, a bot, and a message to the instance for usage in tests."""
- self.bot = MockBot()
- self.cog = TokenRemover(bot=self.bot)
-
- self.msg = MockMessage(id=555, content="hello world")
- self.msg.channel.mention = "#lemonade-stand"
- self.msg.guild.get_member.return_value.bot = False
- self.msg.guild.get_member.return_value.__str__.return_value = "Woody"
- self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name)
- self.msg.author.display_avatar.url = "picture-lemon.png"
-
- def test_extract_user_id_valid(self):
- """Should consider user IDs valid if they decode into an integer ID."""
- id_pairs = (
- ("NDcyMjY1OTQzMDYyNDEzMzMy", 472265943062413332),
- ("NDc1MDczNjI5Mzk5NTQ3OTA0", 475073629399547904),
- ("NDY3MjIzMjMwNjUwNzc3NjQx", 467223230650777641),
- )
-
- for token_id, user_id in id_pairs:
- with self.subTest(token_id=token_id):
- result = TokenRemover.extract_user_id(token_id)
- self.assertEqual(result, user_id)
-
- def test_extract_user_id_invalid(self):
- """Should consider non-digit and non-ASCII IDs invalid."""
- ids = (
- ("SGVsbG8gd29ybGQ", "non-digit ASCII"),
- ("0J_RgNC40LLQtdGCINC80LjRgA", "cyrillic text"),
- ("4pO14p6L4p6C4pG34p264pGl8J-EiOKSj-KCieKBsA", "Unicode digits"),
- ("4oaA4oaB4oWh4oWi4Lyz4Lyq4Lyr4LG9", "Unicode numerals"),
- ("8J2fjvCdn5nwnZ-k8J2fr_Cdn7rgravvvJngr6c", "Unicode decimals"),
- ("{hello}[world]&(bye!)", "ASCII invalid Base64"),
- ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"),
- )
-
- for user_id, msg in ids:
- with self.subTest(msg=msg):
- result = TokenRemover.extract_user_id(user_id)
- self.assertIsNone(result)
-
- def test_is_valid_timestamp_valid(self):
- """Should consider timestamps valid if they're greater than the Discord epoch."""
- timestamps = (
- "XsyRkw",
- "Xrim9Q",
- "XsyR-w",
- "XsySD_",
- "Dn9r_A",
- )
-
- for timestamp in timestamps:
- with self.subTest(timestamp=timestamp):
- result = TokenRemover.is_valid_timestamp(timestamp)
- self.assertTrue(result)
-
- def test_is_valid_timestamp_invalid(self):
- """Should consider timestamps invalid if they're before Discord epoch or can't be parsed."""
- timestamps = (
- ("B4Yffw", "DISCORD_EPOCH - TOKEN_EPOCH - 1"),
- ("ew", "123"),
- ("AoIKgA", "42076800"),
- ("{hello}[world]&(bye!)", "ASCII invalid Base64"),
- ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"),
- )
-
- for timestamp, msg in timestamps:
- with self.subTest(msg=msg):
- result = TokenRemover.is_valid_timestamp(timestamp)
- self.assertFalse(result)
-
- def test_is_valid_hmac_valid(self):
- """Should consider an HMAC valid if it has at least 3 unique characters."""
- valid_hmacs = (
- "VXmErH7j511turNpfURmb0rVNm8",
- "Ysnu2wacjaKs7qnoo46S8Dm2us8",
- "sJf6omBPORBPju3WJEIAcwW9Zds",
- "s45jqDV_Iisn-symw0yDRrk_jf4",
- )
-
- for hmac in valid_hmacs:
- with self.subTest(msg=hmac):
- result = TokenRemover.is_maybe_valid_hmac(hmac)
- self.assertTrue(result)
-
- def test_is_invalid_hmac_invalid(self):
- """Should consider an HMAC invalid if has fewer than 3 unique characters."""
- invalid_hmacs = (
- ("xxxxxxxxxxxxxxxxxx", "Single character"),
- ("XxXxXxXxXxXxXxXxXx", "Single character alternating case"),
- ("ASFasfASFasfASFASsf", "Three characters alternating-case"),
- ("asdasdasdasdasdasdasd", "Three characters one case"),
- )
-
- for hmac, msg in invalid_hmacs:
- with self.subTest(msg=msg):
- result = TokenRemover.is_maybe_valid_hmac(hmac)
- self.assertFalse(result)
-
- def test_mod_log_property(self):
- """The `mod_log` property should ask the bot to return the `ModLog` cog."""
- self.bot.get_cog.return_value = 'lemon'
- self.assertEqual(self.cog.mod_log, self.bot.get_cog.return_value)
- self.bot.get_cog.assert_called_once_with('ModLog')
-
- async def test_on_message_edit_uses_on_message(self):
- """The edit listener should delegate handling of the message to the normal listener."""
- self.cog.on_message = mock.create_autospec(self.cog.on_message, spec_set=True)
-
- await self.cog.on_message_edit(MockMessage(), self.msg)
- self.cog.on_message.assert_awaited_once_with(self.msg)
-
- @autospec(TokenRemover, "find_token_in_message", "take_action")
- async def test_on_message_takes_action(self, find_token_in_message, take_action):
- """Should take action if a valid token is found when a message is sent."""
- cog = TokenRemover(self.bot)
- found_token = "foobar"
- find_token_in_message.return_value = found_token
-
- await cog.on_message(self.msg)
-
- find_token_in_message.assert_called_once_with(self.msg)
- take_action.assert_awaited_once_with(cog, self.msg, found_token)
-
- @autospec(TokenRemover, "find_token_in_message", "take_action")
- async def test_on_message_skips_missing_token(self, find_token_in_message, take_action):
- """Shouldn't take action if a valid token isn't found when a message is sent."""
- cog = TokenRemover(self.bot)
- find_token_in_message.return_value = False
-
- await cog.on_message(self.msg)
-
- find_token_in_message.assert_called_once_with(self.msg)
- take_action.assert_not_awaited()
-
- @autospec(TokenRemover, "find_token_in_message")
- async def test_on_message_ignores_dms_bots(self, find_token_in_message):
- """Shouldn't parse a message if it is a DM or authored by a bot."""
- cog = TokenRemover(self.bot)
- dm_msg = MockMessage(guild=None)
- bot_msg = MockMessage(author=MagicMock(bot=True))
-
- for msg in (dm_msg, bot_msg):
- await cog.on_message(msg)
- find_token_in_message.assert_not_called()
-
- @autospec("bot.exts.filters.token_remover", "TOKEN_RE")
- def test_find_token_no_matches(self, token_re):
- """None should be returned if the regex matches no tokens in a message."""
- token_re.finditer.return_value = ()
-
- return_value = TokenRemover.find_token_in_message(self.msg)
-
- self.assertIsNone(return_value)
- token_re.finditer.assert_called_once_with(self.msg.content)
-
- @autospec(TokenRemover, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac")
- @autospec("bot.exts.filters.token_remover", "Token")
- @autospec("bot.exts.filters.token_remover", "TOKEN_RE")
- def test_find_token_valid_match(
- self,
- token_re,
- token_cls,
- extract_user_id,
- is_valid_timestamp,
- is_maybe_valid_hmac,
- ):
- """The first match with a valid user ID, timestamp, and HMAC should be returned as a `Token`."""
- matches = [
- mock.create_autospec(Match, spec_set=True, instance=True),
- mock.create_autospec(Match, spec_set=True, instance=True),
- ]
- tokens = [
- mock.create_autospec(Token, spec_set=True, instance=True),
- mock.create_autospec(Token, spec_set=True, instance=True),
- ]
-
- token_re.finditer.return_value = matches
- token_cls.side_effect = tokens
- extract_user_id.side_effect = (None, True) # The 1st match will be invalid, 2nd one valid.
- is_valid_timestamp.return_value = True
- is_maybe_valid_hmac.return_value = True
-
- return_value = TokenRemover.find_token_in_message(self.msg)
-
- self.assertEqual(tokens[1], return_value)
- token_re.finditer.assert_called_once_with(self.msg.content)
-
- @autospec(TokenRemover, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac")
- @autospec("bot.exts.filters.token_remover", "Token")
- @autospec("bot.exts.filters.token_remover", "TOKEN_RE")
- def test_find_token_invalid_matches(
- self,
- token_re,
- token_cls,
- extract_user_id,
- is_valid_timestamp,
- is_maybe_valid_hmac,
- ):
- """None should be returned if no matches have valid user IDs, HMACs, and timestamps."""
- token_re.finditer.return_value = [mock.create_autospec(Match, spec_set=True, instance=True)]
- token_cls.return_value = mock.create_autospec(Token, spec_set=True, instance=True)
- extract_user_id.return_value = None
- is_valid_timestamp.return_value = False
- is_maybe_valid_hmac.return_value = False
-
- return_value = TokenRemover.find_token_in_message(self.msg)
-
- self.assertIsNone(return_value)
- token_re.finditer.assert_called_once_with(self.msg.content)
-
- def test_regex_invalid_tokens(self):
- """Messages without anything looking like a token are not matched."""
- tokens = (
- "",
- "lemon wins",
- "..",
- "x.y",
- "x.y.",
- ".y.z",
- ".y.",
- "..z",
- "x..z",
- " . . ",
- "\n.\n.\n",
- "hellö.world.bye",
- "base64.nötbåse64.morebase64",
- "19jd3J.dfkm3d.€víł§tüff",
- )
-
- for token in tokens:
- with self.subTest(token=token):
- results = token_remover.TOKEN_RE.findall(token)
- self.assertEqual(len(results), 0)
-
- def test_regex_valid_tokens(self):
- """Messages that look like tokens should be matched."""
- # Don't worry, these tokens have been invalidated.
- tokens = (
- "NDcyMjY1OTQzMDYy_DEzMz-y.XsyRkw.VXmErH7j511turNpfURmb0rVNm8",
- "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8",
- "NDc1MDczNjI5Mzk5NTQ3OTA0.XsyR-w.sJf6omBPORBPju3WJEIAcwW9Zds",
- "NDY3MjIzMjMwNjUwNzc3NjQx.XsySD_.s45jqDV_Iisn-symw0yDRrk_jf4",
- )
-
- for token in tokens:
- with self.subTest(token=token):
- results = token_remover.TOKEN_RE.fullmatch(token)
- self.assertIsNotNone(results, f"{token} was not matched by the regex")
-
- def test_regex_matches_multiple_valid(self):
- """Should support multiple matches in the middle of a string."""
- token_1 = "NDY3MjIzMjMwNjUwNzc3NjQx.XsyWGg.uFNEQPCc4ePwGh7egG8UicQssz8"
- token_2 = "NDcyMjY1OTQzMDYyNDEzMzMy.XsyWMw.l8XPnDqb0lp-EiQ2g_0xVFT1pyc"
- message = f"garbage {token_1} hello {token_2} world"
-
- results = token_remover.TOKEN_RE.finditer(message)
- results = [match[0] for match in results]
- self.assertCountEqual((token_1, token_2), results)
-
- @autospec("bot.exts.filters.token_remover", "LOG_MESSAGE")
- def test_format_log_message(self, log_message):
- """Should correctly format the log message with info from the message and token."""
- token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4")
- log_message.format.return_value = "Howdy"
-
- return_value = TokenRemover.format_log_message(self.msg, token)
-
- self.assertEqual(return_value, log_message.format.return_value)
- log_message.format.assert_called_once_with(
- author=format_user(self.msg.author),
- channel=self.msg.channel.mention,
- user_id=token.user_id,
- timestamp=token.timestamp,
- hmac="xxxxxxxxxxxxxxxxxxxxxxxxjf4",
- )
-
- @autospec("bot.exts.filters.token_remover", "UNKNOWN_USER_LOG_MESSAGE")
- async def test_format_userid_log_message_unknown(self, unknown_user_log_message,):
- """Should correctly format the user ID portion when the actual user it belongs to is unknown."""
- token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4")
- unknown_user_log_message.format.return_value = " Partner"
- msg = MockMessage(id=555, content="hello world")
- msg.guild.get_member.return_value = None
- msg.guild.fetch_member.side_effect = NotFound(mock.Mock(status=404), "Not found")
-
- return_value = await TokenRemover.format_userid_log_message(msg, token)
-
- self.assertEqual(return_value, (unknown_user_log_message.format.return_value, False))
- unknown_user_log_message.format.assert_called_once_with(user_id=472265943062413332)
-
- @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE")
- async def test_format_userid_log_message_bot(self, known_user_log_message):
- """Should correctly format the user ID portion when the ID belongs to a known bot."""
- token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4")
- known_user_log_message.format.return_value = " Partner"
- msg = MockMessage(id=555, content="hello world")
- msg.guild.get_member.return_value.__str__.return_value = "Sam"
- msg.guild.get_member.return_value.bot = True
-
- return_value = await TokenRemover.format_userid_log_message(msg, token)
-
- self.assertEqual(return_value, (known_user_log_message.format.return_value, True))
-
- known_user_log_message.format.assert_called_once_with(
- user_id=472265943062413332,
- user_name="Sam",
- kind="BOT",
- )
-
- @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE")
- async def test_format_log_message_user_token_user(self, user_token_message):
- """Should correctly format the user ID portion when the ID belongs to a known user."""
- token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4")
- user_token_message.format.return_value = "Partner"
-
- return_value = await TokenRemover.format_userid_log_message(self.msg, token)
-
- self.assertEqual(return_value, (user_token_message.format.return_value, True))
- user_token_message.format.assert_called_once_with(
- user_id=467223230650777641,
- user_name="Woody",
- kind="USER",
- )
-
- @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock)
- @autospec("bot.exts.filters.token_remover", "log")
- @autospec(TokenRemover, "format_log_message", "format_userid_log_message")
- async def test_take_action(self, format_log_message, format_userid_log_message, logger, mod_log_property):
- """Should delete the message and send a mod log."""
- cog = TokenRemover(self.bot)
- mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True)
- token = mock.create_autospec(Token, spec_set=True, instance=True)
- token.user_id = "no-id"
- log_msg = "testing123"
- userid_log_message = "userid-log-message"
-
- mod_log_property.return_value = mod_log
- format_log_message.return_value = log_msg
- format_userid_log_message.return_value = (userid_log_message, True)
-
- await cog.take_action(self.msg, token)
-
- self.msg.delete.assert_called_once_with()
- self.msg.channel.send.assert_called_once_with(
- token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention)
- )
-
- format_log_message.assert_called_once_with(self.msg, token)
- format_userid_log_message.assert_called_once_with(self.msg, token)
- logger.debug.assert_called_with(log_msg)
- self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens")
-
- mod_log.ignore.assert_called_once_with(constants.Event.message_delete, self.msg.id)
- mod_log.send_log_message.assert_called_once_with(
- icon_url=constants.Icons.token_removed,
- colour=Colour(constants.Colours.soft_red),
- title="Token removed!",
- text=log_msg + "\n" + userid_log_message,
- thumbnail=self.msg.author.display_avatar.url,
- channel_id=constants.Channels.mod_alerts,
- ping_everyone=True,
- )
-
- @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock)
- async def test_take_action_delete_failure(self, mod_log_property):
- """Shouldn't send any messages if the token message can't be deleted."""
- cog = TokenRemover(self.bot)
- mod_log_property.return_value = mock.create_autospec(ModLog, spec_set=True, instance=True)
- self.msg.delete.side_effect = NotFound(MagicMock(), MagicMock())
-
- token = mock.create_autospec(Token, spec_set=True, instance=True)
- await cog.take_action(self.msg, token)
-
- self.msg.delete.assert_called_once_with()
- self.msg.channel.send.assert_not_awaited()
-
-
-class TokenRemoverExtensionTests(unittest.IsolatedAsyncioTestCase):
- """Tests for the token_remover extension."""
-
- @autospec("bot.exts.filters.token_remover", "TokenRemover")
- async def test_extension_setup(self, cog):
- """The TokenRemover cog should be added."""
- bot = MockBot()
- await token_remover.setup(bot)
-
- cog.assert_called_once_with(bot)
- bot.add_cog.assert_awaited_once()
- self.assertTrue(isinstance(bot.add_cog.call_args.args[0], TokenRemover))
diff --git a/tests/bot/rules/__init__.py b/tests/bot/rules/__init__.py
deleted file mode 100644
index 0d570f5a3..000000000
--- a/tests/bot/rules/__init__.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import unittest
-from abc import ABCMeta, abstractmethod
-from typing import Callable, Dict, Iterable, List, NamedTuple, Tuple
-
-from tests.helpers import MockMessage
-
-
-class DisallowedCase(NamedTuple):
- """Encapsulation for test cases expected to fail."""
- recent_messages: List[MockMessage]
- culprits: Iterable[str]
- n_violations: int
-
-
-class RuleTest(unittest.IsolatedAsyncioTestCase, metaclass=ABCMeta):
- """
- Abstract class for antispam rule test cases.
-
- Tests for specific rules should inherit from `RuleTest` and implement
- `relevant_messages` and `get_report`. Each instance should also set the
- `apply` and `config` attributes as necessary.
-
- The execution of test cases can then be delegated to the `run_allowed`
- and `run_disallowed` methods.
- """
-
- apply: Callable # The tested rule's apply function
- config: Dict[str, int]
-
- async def run_allowed(self, cases: Tuple[List[MockMessage], ...]) -> None:
- """Run all `cases` against `self.apply` expecting them to pass."""
- for recent_messages in cases:
- last_message = recent_messages[0]
-
- with self.subTest(
- last_message=last_message,
- recent_messages=recent_messages,
- config=self.config,
- ):
- self.assertIsNone(
- await self.apply(last_message, recent_messages, self.config)
- )
-
- async def run_disallowed(self, cases: Tuple[DisallowedCase, ...]) -> None:
- """Run all `cases` against `self.apply` expecting them to fail."""
- for case in cases:
- recent_messages, culprits, n_violations = case
- last_message = recent_messages[0]
- relevant_messages = self.relevant_messages(case)
- desired_output = (
- self.get_report(case),
- culprits,
- relevant_messages,
- )
-
- with self.subTest(
- last_message=last_message,
- recent_messages=recent_messages,
- relevant_messages=relevant_messages,
- n_violations=n_violations,
- config=self.config,
- ):
- self.assertTupleEqual(
- await self.apply(last_message, recent_messages, self.config),
- desired_output,
- )
-
- @abstractmethod
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- """Give expected relevant messages for `case`."""
- raise NotImplementedError # pragma: no cover
-
- @abstractmethod
- def get_report(self, case: DisallowedCase) -> str:
- """Give expected error report for `case`."""
- raise NotImplementedError # pragma: no cover
diff --git a/tests/bot/rules/test_attachments.py b/tests/bot/rules/test_attachments.py
deleted file mode 100644
index d7e779221..000000000
--- a/tests/bot/rules/test_attachments.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from typing import Iterable
-
-from bot.rules import attachments
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str, total_attachments: int) -> MockMessage:
- """Builds a message with `total_attachments` attachments."""
- return MockMessage(author=author, attachments=list(range(total_attachments)))
-
-
-class AttachmentRuleTests(RuleTest):
- """Tests applying the `attachments` antispam rule."""
-
- def setUp(self):
- self.apply = attachments.apply
- self.config = {"max": 5, "interval": 10}
-
- async def test_allows_messages_without_too_many_attachments(self):
- """Messages without too many attachments are allowed as-is."""
- cases = (
- [make_msg("bob", 0), make_msg("bob", 0), make_msg("bob", 0)],
- [make_msg("bob", 2), make_msg("bob", 2)],
- [make_msg("bob", 2), make_msg("alice", 2), make_msg("bob", 2)],
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_with_too_many_attachments(self):
- """Messages with too many attachments trigger the rule."""
- cases = (
- DisallowedCase(
- [make_msg("bob", 4), make_msg("bob", 0), make_msg("bob", 6)],
- ("bob",),
- 10,
- ),
- DisallowedCase(
- [make_msg("bob", 4), make_msg("alice", 6), make_msg("bob", 2)],
- ("bob",),
- 6,
- ),
- DisallowedCase(
- [make_msg("alice", 6)],
- ("alice",),
- 6,
- ),
- DisallowedCase(
- [make_msg("alice", 1) for _ in range(6)],
- ("alice",),
- 6,
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- last_message = case.recent_messages[0]
- return tuple(
- msg
- for msg in case.recent_messages
- if (
- msg.author == last_message.author
- and len(msg.attachments) > 0
- )
- )
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} attachments in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_burst.py b/tests/bot/rules/test_burst.py
deleted file mode 100644
index 03682966b..000000000
--- a/tests/bot/rules/test_burst.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from typing import Iterable
-
-from bot.rules import burst
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str) -> MockMessage:
- """
- Init a MockMessage instance with author set to `author`.
-
- This serves as a shorthand / alias to keep the test cases visually clean.
- """
- return MockMessage(author=author)
-
-
-class BurstRuleTests(RuleTest):
- """Tests the `burst` antispam rule."""
-
- def setUp(self):
- self.apply = burst.apply
- self.config = {"max": 2, "interval": 10}
-
- async def test_allows_messages_within_limit(self):
- """Cases which do not violate the rule."""
- cases = (
- [make_msg("bob"), make_msg("bob")],
- [make_msg("bob"), make_msg("alice"), make_msg("bob")],
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_beyond_limit(self):
- """Cases where the amount of messages exceeds the limit, triggering the rule."""
- cases = (
- DisallowedCase(
- [make_msg("bob"), make_msg("bob"), make_msg("bob")],
- ("bob",),
- 3,
- ),
- DisallowedCase(
- [make_msg("bob"), make_msg("bob"), make_msg("alice"), make_msg("bob")],
- ("bob",),
- 3,
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- return tuple(msg for msg in case.recent_messages if msg.author in case.culprits)
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} messages in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_burst_shared.py b/tests/bot/rules/test_burst_shared.py
deleted file mode 100644
index 3275143d5..000000000
--- a/tests/bot/rules/test_burst_shared.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from typing import Iterable
-
-from bot.rules import burst_shared
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str) -> MockMessage:
- """
- Init a MockMessage instance with the passed arg.
-
- This serves as a shorthand / alias to keep the test cases visually clean.
- """
- return MockMessage(author=author)
-
-
-class BurstSharedRuleTests(RuleTest):
- """Tests the `burst_shared` antispam rule."""
-
- def setUp(self):
- self.apply = burst_shared.apply
- self.config = {"max": 2, "interval": 10}
-
- async def test_allows_messages_within_limit(self):
- """
- Cases that do not violate the rule.
-
- There really isn't more to test here than a single case.
- """
- cases = (
- [make_msg("spongebob"), make_msg("patrick")],
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_beyond_limit(self):
- """Cases where the amount of messages exceeds the limit, triggering the rule."""
- cases = (
- DisallowedCase(
- [make_msg("bob"), make_msg("bob"), make_msg("bob")],
- {"bob"},
- 3,
- ),
- DisallowedCase(
- [make_msg("bob"), make_msg("bob"), make_msg("alice"), make_msg("bob")],
- {"bob", "alice"},
- 4,
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- return case.recent_messages
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} messages in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_chars.py b/tests/bot/rules/test_chars.py
deleted file mode 100644
index f1e3c76a7..000000000
--- a/tests/bot/rules/test_chars.py
+++ /dev/null
@@ -1,64 +0,0 @@
-from typing import Iterable
-
-from bot.rules import chars
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str, n_chars: int) -> MockMessage:
- """Build a message with arbitrary content of `n_chars` length."""
- return MockMessage(author=author, content="A" * n_chars)
-
-
-class CharsRuleTests(RuleTest):
- """Tests the `chars` antispam rule."""
-
- def setUp(self):
- self.apply = chars.apply
- self.config = {
- "max": 20, # Max allowed sum of chars per user
- "interval": 10,
- }
-
- async def test_allows_messages_within_limit(self):
- """Cases with a total amount of chars within limit."""
- cases = (
- [make_msg("bob", 0)],
- [make_msg("bob", 20)],
- [make_msg("bob", 15), make_msg("alice", 15)],
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_beyond_limit(self):
- """Cases where the total amount of chars exceeds the limit, triggering the rule."""
- cases = (
- DisallowedCase(
- [make_msg("bob", 21)],
- ("bob",),
- 21,
- ),
- DisallowedCase(
- [make_msg("bob", 15), make_msg("bob", 15)],
- ("bob",),
- 30,
- ),
- DisallowedCase(
- [make_msg("alice", 15), make_msg("bob", 20), make_msg("alice", 15)],
- ("alice",),
- 30,
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- last_message = case.recent_messages[0]
- return tuple(
- msg
- for msg in case.recent_messages
- if msg.author == last_message.author
- )
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} characters in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_discord_emojis.py b/tests/bot/rules/test_discord_emojis.py
deleted file mode 100644
index 66c2d9f92..000000000
--- a/tests/bot/rules/test_discord_emojis.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from typing import Iterable
-
-from bot.rules import discord_emojis
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-discord_emoji = "<:abcd:1234>" # Discord emojis follow the format <:name:id>
-unicode_emoji = "🧪"
-
-
-def make_msg(author: str, n_emojis: int, emoji: str = discord_emoji) -> MockMessage:
- """Build a MockMessage instance with content containing `n_emojis` arbitrary emojis."""
- return MockMessage(author=author, content=emoji * n_emojis)
-
-
-class DiscordEmojisRuleTests(RuleTest):
- """Tests for the `discord_emojis` antispam rule."""
-
- def setUp(self):
- self.apply = discord_emojis.apply
- self.config = {"max": 2, "interval": 10}
-
- async def test_allows_messages_within_limit(self):
- """Cases with a total amount of discord and unicode emojis within limit."""
- cases = (
- [make_msg("bob", 2)],
- [make_msg("alice", 1), make_msg("bob", 2), make_msg("alice", 1)],
- [make_msg("bob", 2, unicode_emoji)],
- [
- make_msg("alice", 1, unicode_emoji),
- make_msg("bob", 2, unicode_emoji),
- make_msg("alice", 1, unicode_emoji)
- ],
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_beyond_limit(self):
- """Cases with more than the allowed amount of discord and unicode emojis."""
- cases = (
- DisallowedCase(
- [make_msg("bob", 3)],
- ("bob",),
- 3,
- ),
- DisallowedCase(
- [make_msg("alice", 2), make_msg("bob", 2), make_msg("alice", 2)],
- ("alice",),
- 4,
- ),
- DisallowedCase(
- [make_msg("bob", 3, unicode_emoji)],
- ("bob",),
- 3,
- ),
- DisallowedCase(
- [
- make_msg("alice", 2, unicode_emoji),
- make_msg("bob", 2, unicode_emoji),
- make_msg("alice", 2, unicode_emoji)
- ],
- ("alice",),
- 4
- )
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- return tuple(msg for msg in case.recent_messages if msg.author in case.culprits)
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} emojis in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_duplicates.py b/tests/bot/rules/test_duplicates.py
deleted file mode 100644
index 9bd886a77..000000000
--- a/tests/bot/rules/test_duplicates.py
+++ /dev/null
@@ -1,64 +0,0 @@
-from typing import Iterable
-
-from bot.rules import duplicates
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str, content: str) -> MockMessage:
- """Give a MockMessage instance with `author` and `content` attrs."""
- return MockMessage(author=author, content=content)
-
-
-class DuplicatesRuleTests(RuleTest):
- """Tests the `duplicates` antispam rule."""
-
- def setUp(self):
- self.apply = duplicates.apply
- self.config = {"max": 2, "interval": 10}
-
- async def test_allows_messages_within_limit(self):
- """Cases which do not violate the rule."""
- cases = (
- [make_msg("alice", "A"), make_msg("alice", "A")],
- [make_msg("alice", "A"), make_msg("alice", "B"), make_msg("alice", "C")], # Non-duplicate
- [make_msg("alice", "A"), make_msg("bob", "A"), make_msg("alice", "A")], # Different author
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_beyond_limit(self):
- """Cases with too many duplicate messages from the same author."""
- cases = (
- DisallowedCase(
- [make_msg("alice", "A"), make_msg("alice", "A"), make_msg("alice", "A")],
- ("alice",),
- 3,
- ),
- DisallowedCase(
- [make_msg("bob", "A"), make_msg("alice", "A"), make_msg("bob", "A"), make_msg("bob", "A")],
- ("bob",),
- 3, # 4 duplicate messages, but only 3 from bob
- ),
- DisallowedCase(
- [make_msg("bob", "A"), make_msg("bob", "B"), make_msg("bob", "A"), make_msg("bob", "A")],
- ("bob",),
- 3, # 4 message from bob, but only 3 duplicates
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- last_message = case.recent_messages[0]
- return tuple(
- msg
- for msg in case.recent_messages
- if (
- msg.author == last_message.author
- and msg.content == last_message.content
- )
- )
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} duplicated messages in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_links.py b/tests/bot/rules/test_links.py
deleted file mode 100644
index b091bd9d7..000000000
--- a/tests/bot/rules/test_links.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from typing import Iterable
-
-from bot.rules import links
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str, total_links: int) -> MockMessage:
- """Makes a message with `total_links` links."""
- content = " ".join(["https://pydis.com"] * total_links)
- return MockMessage(author=author, content=content)
-
-
-class LinksTests(RuleTest):
- """Tests applying the `links` rule."""
-
- def setUp(self):
- self.apply = links.apply
- self.config = {
- "max": 2,
- "interval": 10
- }
-
- async def test_links_within_limit(self):
- """Messages with an allowed amount of links."""
- cases = (
- [make_msg("bob", 0)],
- [make_msg("bob", 2)],
- [make_msg("bob", 3)], # Filter only applies if len(messages_with_links) > 1
- [make_msg("bob", 1), make_msg("bob", 1)],
- [make_msg("bob", 2), make_msg("alice", 2)] # Only messages from latest author count
- )
-
- await self.run_allowed(cases)
-
- async def test_links_exceeding_limit(self):
- """Messages with a a higher than allowed amount of links."""
- cases = (
- DisallowedCase(
- [make_msg("bob", 1), make_msg("bob", 2)],
- ("bob",),
- 3
- ),
- DisallowedCase(
- [make_msg("alice", 1), make_msg("alice", 1), make_msg("alice", 1)],
- ("alice",),
- 3
- ),
- DisallowedCase(
- [make_msg("alice", 2), make_msg("bob", 3), make_msg("alice", 1)],
- ("alice",),
- 3
- )
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- last_message = case.recent_messages[0]
- return tuple(
- msg
- for msg in case.recent_messages
- if msg.author == last_message.author
- )
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} links in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_newlines.py b/tests/bot/rules/test_newlines.py
deleted file mode 100644
index e35377773..000000000
--- a/tests/bot/rules/test_newlines.py
+++ /dev/null
@@ -1,102 +0,0 @@
-from typing import Iterable, List
-
-from bot.rules import newlines
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str, newline_groups: List[int]) -> MockMessage:
- """Init a MockMessage instance with `author` and content configured by `newline_groups".
-
- Configure content by passing a list of ints, where each int `n` will generate
- a separate group of `n` newlines.
-
- Example:
- newline_groups=[3, 1, 2] -> content="\n\n\n \n \n\n"
- """
- content = " ".join("\n" * n for n in newline_groups)
- return MockMessage(author=author, content=content)
-
-
-class TotalNewlinesRuleTests(RuleTest):
- """Tests the `newlines` antispam rule against allowed cases and total newline count violations."""
-
- def setUp(self):
- self.apply = newlines.apply
- self.config = {
- "max": 5, # Max sum of newlines in relevant messages
- "max_consecutive": 3, # Max newlines in one group, in one message
- "interval": 10,
- }
-
- async def test_allows_messages_within_limit(self):
- """Cases which do not violate the rule."""
- cases = (
- [make_msg("alice", [])], # Single message with no newlines
- [make_msg("alice", [1, 2]), make_msg("alice", [1, 1])], # 5 newlines in 2 messages
- [make_msg("alice", [2, 2, 1]), make_msg("bob", [2, 3])], # 5 newlines from each author
- [make_msg("bob", [1]), make_msg("alice", [5])], # Alice breaks the rule, but only bob is relevant
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_total(self):
- """Cases which violate the rule by having too many newlines in total."""
- cases = (
- DisallowedCase( # Alice sends a total of 6 newlines (disallowed)
- [make_msg("alice", [2, 2]), make_msg("alice", [2])],
- ("alice",),
- 6,
- ),
- DisallowedCase( # Here we test that only alice's newlines count in the sum
- [make_msg("alice", [2, 2]), make_msg("bob", [3]), make_msg("alice", [3])],
- ("alice",),
- 7,
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- last_author = case.recent_messages[0].author
- return tuple(msg for msg in case.recent_messages if msg.author == last_author)
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} newlines in {self.config['interval']}s"
-
-
-class GroupNewlinesRuleTests(RuleTest):
- """
- Tests the `newlines` antispam rule against max consecutive newline violations.
-
- As these violations yield a different error report, they require a different
- `get_report` implementation.
- """
-
- def setUp(self):
- self.apply = newlines.apply
- self.config = {"max": 5, "max_consecutive": 3, "interval": 10}
-
- async def test_disallows_messages_consecutive(self):
- """Cases which violate the rule due to having too many consecutive newlines."""
- cases = (
- DisallowedCase( # Bob sends a group of newlines too large
- [make_msg("bob", [4])],
- ("bob",),
- 4,
- ),
- DisallowedCase( # Alice sends 5 in total (allowed), but 4 in one group (disallowed)
- [make_msg("alice", [1]), make_msg("alice", [4])],
- ("alice",),
- 4,
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- last_author = case.recent_messages[0].author
- return tuple(msg for msg in case.recent_messages if msg.author == last_author)
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} consecutive newlines in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_role_mentions.py b/tests/bot/rules/test_role_mentions.py
deleted file mode 100644
index 26c05d527..000000000
--- a/tests/bot/rules/test_role_mentions.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from typing import Iterable
-
-from bot.rules import role_mentions
-from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMessage
-
-
-def make_msg(author: str, n_mentions: int) -> MockMessage:
- """Build a MockMessage instance with `n_mentions` role mentions."""
- return MockMessage(author=author, role_mentions=[None] * n_mentions)
-
-
-class RoleMentionsRuleTests(RuleTest):
- """Tests for the `role_mentions` antispam rule."""
-
- def setUp(self):
- self.apply = role_mentions.apply
- self.config = {"max": 2, "interval": 10}
-
- async def test_allows_messages_within_limit(self):
- """Cases with a total amount of role mentions within limit."""
- cases = (
- [make_msg("bob", 2)],
- [make_msg("bob", 1), make_msg("alice", 1), make_msg("bob", 1)],
- )
-
- await self.run_allowed(cases)
-
- async def test_disallows_messages_beyond_limit(self):
- """Cases with more than the allowed amount of role mentions."""
- cases = (
- DisallowedCase(
- [make_msg("bob", 3)],
- ("bob",),
- 3,
- ),
- DisallowedCase(
- [make_msg("alice", 2), make_msg("bob", 2), make_msg("alice", 2)],
- ("alice",),
- 4,
- ),
- )
-
- await self.run_disallowed(cases)
-
- def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
- last_message = case.recent_messages[0]
- return tuple(
- msg
- for msg in case.recent_messages
- if msg.author == last_message.author
- )
-
- def get_report(self, case: DisallowedCase) -> str:
- return f"sent {case.n_violations} role mentions in {self.config['interval']}s"
diff --git a/tests/helpers.py b/tests/helpers.py
index a4b919dcb..28a8e40a7 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -423,7 +423,7 @@ category_channel_instance = discord.CategoryChannel(
class MockCategoryChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
def __init__(self, **kwargs) -> None:
default_kwargs = {'id': next(self.discord_id)}
- super().__init__(**collections.ChainMap(default_kwargs, kwargs))
+ super().__init__(**collections.ChainMap(kwargs, default_kwargs))
# Create a Message instance to get a realistic MagicMock of `discord.Message`