diff options
105 files changed, 8184 insertions, 5055 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7cd00a0d6..816bdf290 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,6 +1,5 @@ # Extensions **/bot/exts/backend/sync/** @MarkKoz -**/bot/exts/filters/*token_remover.py @MarkKoz **/bot/exts/moderation/*silence.py @MarkKoz bot/exts/info/codeblock/** @MarkKoz bot/exts/utils/extensions.py @MarkKoz @@ -8,14 +7,11 @@ bot/exts/utils/snekbox.py @MarkKoz @jb3 bot/exts/moderation/** @mbaruh @Den4200 @ks129 @jb3 bot/exts/info/** @Den4200 @jb3 bot/exts/info/information.py @mbaruh @jb3 -bot/exts/filters/** @mbaruh @jb3 +bot/exts/filtering/** @mbaruh bot/exts/fun/** @ks129 bot/exts/utils/** @ks129 @jb3 bot/exts/recruitment/** @wookie184 -# Rules -bot/rules/** @mbaruh - # Utils bot/utils/function.py @MarkKoz bot/utils/lock.py @MarkKoz diff --git a/bot/bot.py b/bot/bot.py index 6164ba9fd..f56aec38e 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -1,5 +1,4 @@ import asyncio -from collections import defaultdict import aiohttp from pydis_core import BotBase @@ -27,8 +26,6 @@ class Bot(BotBase): super().__init__(*args, **kwargs) - self.filter_list_cache = defaultdict(dict) - async def ping_services(self) -> None: """A helper to make sure all the services the bot relies on are available on startup.""" # Connect Site/API @@ -45,33 +42,10 @@ class Bot(BotBase): raise await asyncio.sleep(constants.URLs.connect_cooldown) - def insert_item_into_filter_list_cache(self, item: dict[str, str]) -> None: - """Add an item to the bots filter_list_cache.""" - type_ = item["type"] - allowed = item["allowed"] - content = item["content"] - - self.filter_list_cache[f"{type_}.{allowed}"][content] = { - "id": item["id"], - "comment": item["comment"], - "created_at": item["created_at"], - "updated_at": item["updated_at"], - } - - async def cache_filter_list_data(self) -> None: - """Cache all the data in the FilterList on the site.""" - full_cache = await self.api_client.get('bot/filter-lists') - - for item in full_cache: - self.insert_item_into_filter_list_cache(item) - async def setup_hook(self) -> None: """Default async initialisation method for discord.py.""" await super().setup_hook() - # Build the FilterList cache - await self.cache_filter_list_data() - # This is not awaited to avoid a deadlock with any cogs that have # wait_until_guild_available in their cog_load method. scheduling.create_task(self.load_extensions(exts)) diff --git a/bot/constants.py b/bot/constants.py index f1fb5471f..17a1b90f8 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -213,30 +213,6 @@ class Redis(metaclass=YAMLGetter): use_fakeredis: bool # If this is True, Bot will use fakeredis.aioredis -class Filter(metaclass=YAMLGetter): - section = "filter" - - filter_domains: bool - filter_everyone_ping: bool - filter_invites: bool - filter_zalgo: bool - watch_regex: bool - watch_rich_embeds: bool - - # Notifications are not expected for "watchlist" type filters - - notify_user_domains: bool - notify_user_everyone_ping: bool - notify_user_invites: bool - notify_user_zalgo: bool - - offensive_msg_delete_days: int - ping_everyone: bool - - channel_whitelist: List[int] - role_whitelist: List[int] - - class Cooldowns(metaclass=YAMLGetter): section = "bot" subsection = "cooldowns" @@ -578,18 +554,6 @@ class Metabase(metaclass=YAMLGetter): max_session_age: int -class AntiSpam(metaclass=YAMLGetter): - section = 'anti_spam' - - cache_size: int - - clean_offending: bool - ping_everyone: bool - - punishment: Dict[str, Dict[str, int]] - rules: Dict[str, Dict[str, int]] - - class BigBrother(metaclass=YAMLGetter): section = 'big_brother' diff --git a/bot/converters.py b/bot/converters.py index 544513c90..21623b597 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -9,7 +9,7 @@ import dateutil.parser import discord from aiohttp import ClientConnectorError from dateutil.relativedelta import relativedelta -from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter +from discord.ext.commands import BadArgument, Context, Converter, IDConverter, MemberConverter, UserConverter from discord.utils import escape_markdown, snowflake_time from pydis_core.site_api import ResponseCodeError from pydis_core.utils import unqualify @@ -68,54 +68,6 @@ class ValidDiscordServerInvite(Converter): raise BadArgument("This does not appear to be a valid Discord server invite.") -class ValidFilterListType(Converter): - """ - A converter that checks whether the given string is a valid FilterList type. - - Raises `BadArgument` if the argument is not a valid FilterList type, and simply - passes through the given argument otherwise. - """ - - @staticmethod - async def get_valid_types(bot: Bot) -> list: - """ - Try to get a list of valid filter list types. - - Raise a BadArgument if the API can't respond. - """ - try: - valid_types = await bot.api_client.get('bot/filter-lists/get-types') - except ResponseCodeError: - raise BadArgument("Cannot validate list_type: Unable to fetch valid types from API.") - - return [enum for enum, classname in valid_types] - - async def convert(self, ctx: Context, list_type: str) -> str: - """Checks whether the given string is a valid FilterList type.""" - valid_types = await self.get_valid_types(ctx.bot) - list_type = list_type.upper() - - if list_type not in valid_types: - - # Maybe the user is using the plural form of this type, - # e.g. "guild_invites" instead of "guild_invite". - # - # This code will support the simple plural form (a single 's' at the end), - # which works for all current list types, but if a list type is added in the future - # which has an irregular plural form (like 'ies'), this code will need to be - # refactored to support this. - if list_type.endswith("S") and list_type[:-1] in valid_types: - list_type = list_type[:-1] - - else: - valid_types_list = '\n'.join([f"• {type_.lower()}" for type_ in valid_types]) - raise BadArgument( - f"You have provided an invalid list type!\n\n" - f"Please provide one of the following: \n{valid_types_list}" - ) - return list_type - - class Extension(Converter): """ Fully qualify the name of an extension and ensure it exists. diff --git a/bot/exts/filtering/FILTERS-DEVELOPMENT.md b/bot/exts/filtering/FILTERS-DEVELOPMENT.md new file mode 100644 index 000000000..d5896d556 --- /dev/null +++ b/bot/exts/filtering/FILTERS-DEVELOPMENT.md @@ -0,0 +1,63 @@ +# Filters Development +This file gives a short overview of the extension, and shows how to perform some basic changes/additions to it. + +## Overview +The main idea is that there is a list of filters each deciding whether they apply to the given content. +For example, there can be a filter that decides it will trigger when the content contains the string "lemon". + +There are several types of filters, and two filters of the same type differ by their content. +For example, filters of type "token" search for a specific token inside the provided string. +One token filter might look for the string "lemon", while another will look for the string "joe". + +Each filter has a set of settings that decide when it triggers (e.g. in which channels), and what happens if it does (e.g. delete the message). +Filters of a specific type can have additional settings that are special to them. + +A list of filters is contained within a filter list. +The filter list gets content to filter, and dispatches it to each of its filters. +It takes the answers from its filters and returns a unified response (e.g. if at least one of the filters says it should be deleted, then the filter list response will include it). + +A filter list has the same set of possible settings, which act as defaults. +If a filter in the list doesn't define a value for a setting (meaning it has a value of None), it will use the value of the containing filter list. + +The cog receives "filtering events". For example, a new message is sent. +It creates a "filtering context" with everything a filtering list needs to know to provide an answer for what should be done. +For example, if the event is a new message, then the content to filter is the content of the message, embeds if any exist, etc. + +The cog dispatches the event to each filter list, gets the result from each, compiles them, and takes any action dictated by them. +For example, if any of the filter lists want the message to be deleted, then the cog will delete it. + +## Example Changes +### Creating a new type of filter list +1. Head over to `bot.exts.filtering._filter_lists` and create a new Python file. +2. Subclass the FilterList class in `bot.exts.filtering._filter_lists.filter_list` and implement its abstract methods. Make sure to set the `name` class attribute. + +You can now add filter lists to the database with the same name defined in the new FilterList subclass. + +### Creating a new type of filter +1. Head over to `bot.exts.filtering._filters` and create a new Python file. +2. Subclass the Filter class in `bot.exts.filtering._filters.filter` and implement its abstract methods. +3. Make sure to set the `name` class attribute, and have one of the FilterList subclasses return this new Filter subclass in `get_filter_type`. + +### Creating a new type of setting +1. Head over to `bot.exts.filtering._settings_types`, and open a new Python file in either `actions` or `validations`, depending on whether you want to subclass `ActionEntry` or `ValidationEntry`. +2. Subclass one of the aforementioned classes, and implement its abstract methods. Make sure to set the `name` and `description` class attributes. + +You can now make the appropriate changes to the site repo: +1. Add a new field in the `Filter` and `FilterList` models. Make sure that on `Filter` it's nullable, and on `FilterList` it isn't. +2. In `serializers.py`, add the new field to `SETTINGS_FIELDS`, and to `ALLOW_BLANK_SETTINGS` or `ALLOW_EMPTY_SETTINGS` if appropriate. If it's not a part of any group of settings, add it `BASE_SETTINGS_FIELDS`, otherwise add it to the appropriate group or create a new one. +3. If you created a new group, make sure it's used in `to_representation`. +4. Update the docs in the filter viewsets. + +You can merge the changes to the bot first - if no such field is loaded from the database it'll just be ignored. + +You can define entries that are a group of fields in the database. +In that case the created subclass should have fields whose names are the names of the fields in the database. +Then, the description will be a dictionary, whose keys are the names of the fields, and values are the descriptions for each field. + +### Creating a new type of filtering event +1. Head over to `bot.exts.filtering._filter_context` and add a new value to the `Event` enum. +2. Implement the dispatching and actioning of the new event in the cog, by either adding it to an existing even listener, or creating a new one. +3. Have the appropriate filter lists subscribe to the event, so they receive it. +4. Have the appropriate unique filters (currently under `unique` and `antispam` in `bot.exts.filtering._filters`) subscribe to the event, so they receive it. + +It should be noted that the filtering events don't need to correspond to Discord events. For example, `nickname` isn't a Discord event and is dispatched when a message is sent. diff --git a/bot/exts/filters/__init__.py b/bot/exts/filtering/README.md index e69de29bb..e69de29bb 100644 --- a/bot/exts/filters/__init__.py +++ b/bot/exts/filtering/README.md diff --git a/tests/bot/exts/filters/__init__.py b/bot/exts/filtering/__init__.py index e69de29bb..e69de29bb 100644 --- a/tests/bot/exts/filters/__init__.py +++ b/bot/exts/filtering/__init__.py diff --git a/bot/exts/filtering/_filter_context.py b/bot/exts/filtering/_filter_context.py new file mode 100644 index 000000000..f5f635f9c --- /dev/null +++ b/bot/exts/filtering/_filter_context.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import typing +from collections.abc import Callable, Coroutine, Iterable +from dataclasses import dataclass, field, replace +from enum import Enum, auto + +from discord import DMChannel, Embed, Member, Message, TextChannel, Thread, User + +from bot.utils.message_cache import MessageCache + +if typing.TYPE_CHECKING: + from bot.exts.filtering._filters.filter import Filter + + +class Event(Enum): + """Types of events that can trigger filtering. Note this does not have to align with gateway event types.""" + + MESSAGE = auto() + MESSAGE_EDIT = auto() + NICKNAME = auto() + + +@dataclass +class FilterContext: + """A dataclass containing the information that should be filtered, and output information of the filtering.""" + + # Input context + event: Event # The type of event + author: User | Member | None # Who triggered the event + channel: TextChannel | Thread | DMChannel | None # The channel involved + content: str | Iterable # What actually needs filtering + message: Message | None # The message involved + embeds: list[Embed] = field(default_factory=list) # Any embeds involved + before_message: Message | None = None + message_cache: MessageCache | None = None + # Output context + dm_content: str = "" # The content to DM the invoker + dm_embed: str = "" # The embed description to DM the invoker + send_alert: bool = False # Whether to send an alert for the moderators + alert_content: str = "" # The content of the alert + alert_embeds: list[Embed] = field(default_factory=list) # Any embeds to add to the alert + action_descriptions: list = field(default_factory=list) # What actions were taken + matches: list[str] = field(default_factory=list) # What exactly was found + notification_domain: str = "" # A domain to send the user for context + filter_info: dict['Filter', str] = field(default_factory=dict) # Additional info from a filter. + messages_deletion: bool = False # Whether the messages were deleted. Can't upload deletion log otherwise. + # Additional actions to perform + additional_actions: list[Callable[[FilterContext], Coroutine]] = field(default_factory=list) + related_messages: set[Message] = field(default_factory=set) # Deletion will include these. + related_channels: set[TextChannel | Thread | DMChannel] = field(default_factory=set) + attachments: dict[int, list[str]] = field(default_factory=dict) # Message ID to attachment URLs. + upload_deletion_logs: bool = True # Whether it's allowed to upload deletion logs. + + @classmethod + def from_message( + cls, event: Event, message: Message, before: Message | None = None, cache: MessageCache | None = None + ) -> FilterContext: + """Create a filtering context from the attributes of a message.""" + return cls(event, message.author, message.channel, message.content, message, message.embeds, before, cache) + + def replace(self, **changes) -> FilterContext: + """Return a new context object assigning new values to the specified fields.""" + return replace(self, **changes) diff --git a/bot/exts/filtering/_filter_lists/__init__.py b/bot/exts/filtering/_filter_lists/__init__.py new file mode 100644 index 000000000..82e0452f9 --- /dev/null +++ b/bot/exts/filtering/_filter_lists/__init__.py @@ -0,0 +1,9 @@ +from os.path import dirname + +from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType, list_type_converter +from bot.exts.filtering._utils import subclasses_in_package + +filter_list_types = subclasses_in_package(dirname(__file__), f"{__name__}.", FilterList) +filter_list_types = {filter_list.name: filter_list for filter_list in filter_list_types} + +__all__ = [filter_list_types, FilterList, ListType, list_type_converter] diff --git a/bot/exts/filtering/_filter_lists/antispam.py b/bot/exts/filtering/_filter_lists/antispam.py new file mode 100644 index 000000000..0e7ab2bdc --- /dev/null +++ b/bot/exts/filtering/_filter_lists/antispam.py @@ -0,0 +1,191 @@ +import asyncio +import typing +from collections.abc import Callable, Coroutine +from dataclasses import dataclass, field +from datetime import timedelta +from functools import reduce +from itertools import takewhile +from operator import add, or_ + +import arrow +from discord import Member +from pydis_core.utils import scheduling +from pydis_core.utils.logging import get_logger + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._filter_lists.filter_list import ListType, SubscribingAtomicList, UniquesListBase +from bot.exts.filtering._filters.antispam import antispam_filter_types +from bot.exts.filtering._filters.filter import Filter, UniqueFilter +from bot.exts.filtering._settings import ActionSettings +from bot.exts.filtering._settings_types.actions.infraction_and_notification import Infraction +from bot.exts.filtering._ui.ui import AlertView, build_mod_alert + +if typing.TYPE_CHECKING: + from bot.exts.filtering.filtering import Filtering + +log = get_logger(__name__) + +ALERT_DELAY = 6 + + +class AntispamList(UniquesListBase): + """ + A list of anti-spam rules. + + Messages from the last X seconds are passed to each rule, which decides whether it triggers across those messages. + + The infraction reason is set dynamically. + """ + + name = "antispam" + + def __init__(self, filtering_cog: 'Filtering'): + super().__init__(filtering_cog) + self.message_deletion_queue: dict[Member, DeletionContext] = dict() + + def get_filter_type(self, content: str) -> type[UniqueFilter] | None: + """Get a subclass of filter matching the filter list and the filter's content.""" + try: + return antispam_filter_types[content] + except KeyError: + if content not in self._already_warned: + log.warning(f"An antispam filter named {content} was supplied, but no matching implementation found.") + self._already_warned.add(content) + return None + + async def actions_for( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, list[str], dict[ListType, list[Filter]]]: + """Dispatch the given event to the list's filters, and return actions to take and messages to relay to mods.""" + if not ctx.message or not ctx.message_cache: + return None, [], {} + + sublist: SubscribingAtomicList = self[ListType.DENY] + potential_filters = [sublist.filters[id_] for id_ in sublist.subscriptions[ctx.event]] + max_interval = max(filter_.extra_fields.interval for filter_ in potential_filters) + + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=max_interval) + relevant_messages = list( + takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.message_cache) + ) + new_ctx = ctx.replace(content=relevant_messages) + triggers = await sublist.filter_list_result(new_ctx) + if not triggers: + return None, [], {} + + if ctx.author not in self.message_deletion_queue: + self.message_deletion_queue[ctx.author] = DeletionContext() + ctx.additional_actions.append(self._create_deletion_context_handler(ctx.author)) + ctx.related_channels |= {msg.channel for msg in ctx.related_messages} + else: # The additional messages found are already part of a deletion context + ctx.related_messages = set() + current_infraction = self.message_deletion_queue[ctx.author].current_infraction + # In case another filter wants an alert, prevent deleted messages from being uploaded now and also for + # the spam alert (upload happens during alerting). + # Deleted messages API doesn't accept duplicates and will error. + # Additional messages are necessarily part of the deletion. + ctx.upload_deletion_logs = False + self.message_deletion_queue[ctx.author].add(ctx, triggers) + + current_actions = sublist.merge_actions(triggers) + # Don't alert yet. + current_actions.pop("ping", None) + current_actions.pop("send_alert", None) + + new_infraction = current_actions["infraction_and_notification"].copy() + # Smaller infraction value => higher in hierarchy. + if not current_infraction or new_infraction.infraction_type.value < current_infraction.value: + # Pick the first triggered filter for the reason, there's no good way to decide between them. + new_infraction.infraction_reason = ( + f"{triggers[0].name.replace('_', ' ')} spam – {ctx.filter_info[triggers[0]]}" + ) + current_actions["infraction_and_notification"] = new_infraction + self.message_deletion_queue[ctx.author].current_infraction = new_infraction.infraction_type + else: + current_actions.pop("infraction_and_notification", None) + + # Provide some message in case another filter list wants there to be an alert. + return current_actions, ["Handling spam event..."], {ListType.DENY: triggers} + + def _create_deletion_context_handler(self, context_id: Member) -> Callable[[FilterContext], Coroutine]: + async def schedule_processing(ctx: FilterContext) -> None: + """ + Schedule a coroutine to process the deletion context. + + It cannot be awaited directly, as it waits ALERT_DELAY seconds, and actioning a filtering context depends on + all actions finishing. + + This is async and takes a context to adhere to the type of ctx.additional_actions. + """ + async def process_deletion_context() -> None: + """Processes the Deletion Context queue.""" + log.trace("Sleeping before processing message deletion queue.") + await asyncio.sleep(ALERT_DELAY) + + if context_id not in self.message_deletion_queue: + log.error(f"Started processing deletion queue for context `{context_id}`, but it was not found!") + return + + deletion_context = self.message_deletion_queue.pop(context_id) + await deletion_context.send_alert(self) + + scheduling.create_task(process_deletion_context()) + + return schedule_processing + + +@dataclass +class DeletionContext: + """Represents a Deletion Context for a single spam event.""" + + contexts: list[FilterContext] = field(default_factory=list) + rules: set[UniqueFilter] = field(default_factory=set) + current_infraction: Infraction | None = None + + def add(self, ctx: FilterContext, rules: list[UniqueFilter]) -> None: + """Adds new rule violation events to the deletion context.""" + self.contexts.append(ctx) + self.rules.update(rules) + + async def send_alert(self, antispam_list: AntispamList) -> None: + """Post the mod alert.""" + if not self.contexts or not self.rules: + return + + webhook = antispam_list.filtering_cog.webhook + if not webhook: + return + + ctx, *other_contexts = self.contexts + new_ctx = FilterContext(ctx.event, ctx.author, ctx.channel, ctx.content, ctx.message) + new_ctx.action_descriptions = reduce( + add, (other_ctx.action_descriptions for other_ctx in other_contexts), ctx.action_descriptions + ) + # It shouldn't ever come to this, but just in case. + if descriptions_num := len(new_ctx.action_descriptions) > 20: + new_ctx.action_descriptions = new_ctx.action_descriptions[:20] + new_ctx.action_descriptions[-1] += f" (+{descriptions_num - 20} other actions)" + new_ctx.related_messages = reduce( + or_, (other_ctx.related_messages for other_ctx in other_contexts), ctx.related_messages + ) | {ctx.message for ctx in other_contexts} + new_ctx.related_channels = reduce( + or_, (other_ctx.related_channels for other_ctx in other_contexts), ctx.related_channels + ) | {ctx.channel for ctx in other_contexts} + new_ctx.attachments = reduce(or_, (other_ctx.attachments for other_ctx in other_contexts), ctx.attachments) + new_ctx.upload_deletion_logs = True + new_ctx.messages_deletion = all(ctx.messages_deletion for ctx in self.contexts) + + rules = list(self.rules) + actions = antispam_list[ListType.DENY].merge_actions(rules) + for action in list(actions): + if action not in ("ping", "send_alert"): + actions.pop(action, None) + await actions.action(new_ctx) + + messages = antispam_list[ListType.DENY].format_messages(rules) + embed = await build_mod_alert(new_ctx, {antispam_list: messages}) + if other_contexts: + embed.set_footer( + text="The list of actions taken includes actions from additional contexts after deletion began." + ) + await webhook.send(username="Anti-Spam", content=ctx.alert_content, embeds=[embed], view=AlertView(new_ctx)) diff --git a/bot/exts/filtering/_filter_lists/domain.py b/bot/exts/filtering/_filter_lists/domain.py new file mode 100644 index 000000000..f4062edfe --- /dev/null +++ b/bot/exts/filtering/_filter_lists/domain.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import re +import typing + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType +from bot.exts.filtering._filters.domain import DomainFilter +from bot.exts.filtering._filters.filter import Filter +from bot.exts.filtering._settings import ActionSettings +from bot.exts.filtering._utils import clean_input + +if typing.TYPE_CHECKING: + from bot.exts.filtering.filtering import Filtering + +URL_RE = re.compile(r"https?://(\S+)", flags=re.IGNORECASE) + + +class DomainsList(FilterList[DomainFilter]): + """ + A list of filters, each looking for a specific domain given by URL. + + The blacklist defaults dictate what happens by default when a filter is matched, and can be overridden by + individual filters. + + Domains are found by looking for a URL schema (http or https). + Filters will also trigger for subdomains. + """ + + name = "domain" + + def __init__(self, filtering_cog: Filtering): + super().__init__() + filtering_cog.subscribe(self, Event.MESSAGE, Event.MESSAGE_EDIT) + + def get_filter_type(self, content: str) -> type[Filter]: + """Get a subclass of filter matching the filter list and the filter's content.""" + return DomainFilter + + @property + def filter_types(self) -> set[type[Filter]]: + """Return the types of filters used by this list.""" + return {DomainFilter} + + async def actions_for( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, list[str], dict[ListType, list[Filter]]]: + """Dispatch the given event to the list's filters, and return actions to take and messages to relay to mods.""" + text = ctx.content + if not text: + return None, [], {} + + text = clean_input(text) + urls = {match.group(1).lower().rstrip("/") for match in URL_RE.finditer(text)} + new_ctx = ctx.replace(content=urls) + + triggers = await self[ListType.DENY].filter_list_result(new_ctx) + ctx.notification_domain = new_ctx.notification_domain + actions = None + messages = [] + if triggers: + actions = self[ListType.DENY].merge_actions(triggers) + messages = self[ListType.DENY].format_messages(triggers) + return actions, messages, {ListType.DENY: triggers} diff --git a/bot/exts/filtering/_filter_lists/extension.py b/bot/exts/filtering/_filter_lists/extension.py new file mode 100644 index 000000000..a739d7191 --- /dev/null +++ b/bot/exts/filtering/_filter_lists/extension.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +import typing +from os.path import splitext + +import bot +from bot.constants import Channels, URLs +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType +from bot.exts.filtering._filters.extension import ExtensionFilter +from bot.exts.filtering._filters.filter import Filter +from bot.exts.filtering._settings import ActionSettings + +if typing.TYPE_CHECKING: + from bot.exts.filtering.filtering import Filtering + + +PY_EMBED_DESCRIPTION = ( + "It looks like you tried to attach a Python file - " + f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}" +) + +TXT_LIKE_FILES = {".txt", ".csv", ".json"} +TXT_EMBED_DESCRIPTION = ( + "You either uploaded a `{blocked_extension}` file or entered a message that was too long. " + f"Please use our [paste bin]({URLs.site_schema}{URLs.site_paste}) instead." +) + +DISALLOWED_EMBED_DESCRIPTION = ( + "It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). " + "We currently allow the following file types: **{joined_whitelist}**.\n\n" + "Feel free to ask in {meta_channel_mention} if you think this is a mistake." +) + + +class ExtensionsList(FilterList[ExtensionFilter]): + """ + A list of filters, each looking for a file attachment with a specific extension. + + If an extension is not explicitly allowed, it will be blocked. + + Whitelist defaults dictate what happens when an extension is *not* explicitly allowed, + and whitelist filters overrides have no effect. + + Items should be added as file extensions preceded by a dot. + """ + + name = "extension" + + def __init__(self, filtering_cog: Filtering): + super().__init__() + filtering_cog.subscribe(self, Event.MESSAGE) + self._whitelisted_description = None + + def get_filter_type(self, content: str) -> type[Filter]: + """Get a subclass of filter matching the filter list and the filter's content.""" + return ExtensionFilter + + @property + def filter_types(self) -> set[type[Filter]]: + """Return the types of filters used by this list.""" + return {ExtensionFilter} + + async def actions_for( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, list[str], dict[ListType, list[Filter]]]: + """Dispatch the given event to the list's filters, and return actions to take and messages to relay to mods.""" + # Return early if the message doesn't have attachments. + if not ctx.message or not ctx.message.attachments: + return None, [], {} + + _, failed = self[ListType.ALLOW].defaults.validations.evaluate(ctx) + if failed: # There's no extension filtering in this context. + return None, [], {} + + # Find all extensions in the message. + all_ext = { + (splitext(attachment.filename.lower())[1], attachment.filename) for attachment in ctx.message.attachments + } + new_ctx = ctx.replace(content={ext for ext, _ in all_ext}) # And prepare the context for the filters to read. + triggered = [ + filter_ for filter_ in self[ListType.ALLOW].filters.values() if await filter_.triggered_on(new_ctx) + ] + allowed_ext = {filter_.content for filter_ in triggered} # Get the extensions in the message that are allowed. + + # See if there are any extensions left which aren't allowed. + not_allowed = {ext: filename for ext, filename in all_ext if ext not in allowed_ext} + + if not not_allowed: # Yes, it's a double negative. Meaning all attachments are allowed :) + return None, [], {ListType.ALLOW: triggered} + + # Something is disallowed. + if ".py" in not_allowed: + # Provide a pastebin link for .py files. + ctx.dm_embed = PY_EMBED_DESCRIPTION + elif txt_extensions := {ext for ext in TXT_LIKE_FILES if ext in not_allowed}: + # Work around Discord auto-conversion of messages longer than 2000 chars to .txt + cmd_channel = bot.instance.get_channel(Channels.bot_commands) + ctx.dm_embed = TXT_EMBED_DESCRIPTION.format( + blocked_extension=txt_extensions.pop(), + cmd_channel_mention=cmd_channel.mention + ) + else: + meta_channel = bot.instance.get_channel(Channels.meta) + if not self._whitelisted_description: + self._whitelisted_description = ', '.join( + filter_.content for filter_ in self[ListType.ALLOW].filters.values() + ) + ctx.dm_embed = DISALLOWED_EMBED_DESCRIPTION.format( + joined_whitelist=self._whitelisted_description, + blocked_extensions_str=", ".join(not_allowed), + meta_channel_mention=meta_channel.mention, + ) + + ctx.matches += not_allowed.values() + return self[ListType.ALLOW].defaults.actions, [f"`{ext}`" for ext in not_allowed], {ListType.ALLOW: triggered} diff --git a/bot/exts/filtering/_filter_lists/filter_list.py b/bot/exts/filtering/_filter_lists/filter_list.py new file mode 100644 index 000000000..bf02071cf --- /dev/null +++ b/bot/exts/filtering/_filter_lists/filter_list.py @@ -0,0 +1,308 @@ +import dataclasses +import typing +from abc import abstractmethod +from collections import defaultdict +from collections.abc import Iterable +from dataclasses import dataclass +from enum import Enum +from functools import reduce +from typing import Any + +import arrow +from discord.ext.commands import BadArgument + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import Filter, UniqueFilter +from bot.exts.filtering._settings import ActionSettings, Defaults, create_settings +from bot.exts.filtering._utils import FieldRequiring, past_tense +from bot.log import get_logger + +if typing.TYPE_CHECKING: + from bot.exts.filtering.filtering import Filtering + +log = get_logger(__name__) + + +class ListType(Enum): + """An enumeration of list types.""" + + DENY = 0 + ALLOW = 1 + + +# Alternative names with which each list type can be specified in commands. +aliases = ( + (ListType.DENY, {"deny", "blocklist", "blacklist", "denylist", "bl", "dl"}), + (ListType.ALLOW, {"allow", "allowlist", "whitelist", "al", "wl"}) +) + + +def list_type_converter(argument: str) -> ListType: + """A converter to get the appropriate list type.""" + argument = argument.lower() + for list_type, list_aliases in aliases: + if argument in list_aliases or argument in map(past_tense, list_aliases): + return list_type + raise BadArgument(f"No matching list type found for {argument!r}.") + + +# AtomicList and its subclasses must have eq=False, otherwise the dataclass deco will replace the hash function. +@dataclass(frozen=True, eq=False) +class AtomicList: + """ + Represents the atomic structure of a single filter list as it appears in the database. + + This is as opposed to the FilterList class which is a combination of several list types. + """ + + id: int + created_at: arrow.Arrow + updated_at: arrow.Arrow + name: str + list_type: ListType + defaults: Defaults + filters: dict[int, Filter] + + @property + def label(self) -> str: + """Provide a short description identifying the list with its name and type.""" + return f"{past_tense(self.list_type.name.lower())} {self.name.lower()}" + + async def filter_list_result(self, ctx: FilterContext) -> list[Filter]: + """ + Sift through the list of filters, and return only the ones which apply to the given context. + + The strategy is as follows: + 1. The default settings are evaluated on the given context. The default answer for whether the filter is + relevant in the given context is whether there aren't any validation settings which returned False. + 2. For each filter, its overrides are considered: + - If there are no overrides, then the filter is relevant if that is the default answer. + - Otherwise it is relevant if there are no failed overrides, and any failing default is overridden by a + successful override. + + If the filter is relevant in context, see if it actually triggers. + """ + return await self._create_filter_list_result(ctx, self.defaults, self.filters.values()) + + async def _create_filter_list_result( + self, ctx: FilterContext, defaults: Defaults, filters: Iterable[Filter] + ) -> list[Filter]: + """A helper function to evaluate the result of `filter_list_result`.""" + passed_by_default, failed_by_default = defaults.validations.evaluate(ctx) + default_answer = not bool(failed_by_default) + + relevant_filters = [] + for filter_ in filters: + if not filter_.validations: + if default_answer and await filter_.triggered_on(ctx): + relevant_filters.append(filter_) + else: + passed, failed = filter_.validations.evaluate(ctx) + if not failed and failed_by_default < passed: + if await filter_.triggered_on(ctx): + relevant_filters.append(filter_) + + if ctx.event == Event.MESSAGE_EDIT and ctx.message and self.list_type == ListType.DENY: + previously_triggered = ctx.message_cache.get_message_metadata(ctx.message.id) + # The message might not be cached. + if previously_triggered: + ignore_filters = previously_triggered[self] + # This updates the cache. Some filters are ignored, but they're necessary if there's another edit. + previously_triggered[self] = relevant_filters + relevant_filters = [filter_ for filter_ in relevant_filters if filter_ not in ignore_filters] + return relevant_filters + + def default(self, setting_name: str) -> Any: + """Get the default value of a specific setting.""" + missing = object() + value = self.defaults.actions.get_setting(setting_name, missing) + if value is missing: + value = self.defaults.validations.get_setting(setting_name, missing) + if value is missing: + raise ValueError(f"Couldn't find a setting named {setting_name!r}.") + return value + + def merge_actions(self, filters: list[Filter]) -> ActionSettings | None: + """ + Merge the settings of the given filters, with the list's defaults as fallback. + + If `merge_default` is True, include it in the merge instead of using it as a fallback. + """ + if not filters: # Nothing to action. + return None + try: + return reduce( + ActionSettings.union, (filter_.actions or self.defaults.actions for filter_ in filters) + ).fallback_to(self.defaults.actions) + except TypeError: + # The sequence fed to reduce is empty, meaning none of the filters have actions, + # meaning they all use the defaults. + return self.defaults.actions + + @staticmethod + def format_messages(triggers: list[Filter], *, expand_single_filter: bool = True) -> list[str]: + """Convert the filters into strings that can be added to the alert embed.""" + if len(triggers) == 1 and expand_single_filter: + message = f"#{triggers[0].id} (`{triggers[0].content}`)" + if triggers[0].description: + message += f" - {triggers[0].description}" + messages = [message] + else: + messages = [f"{filter_.id} (`{filter_.content}`)" for filter_ in triggers] + return messages + + def __hash__(self): + return hash(id(self)) + + +T = typing.TypeVar("T", bound=Filter) + + +class FilterList(dict[ListType, AtomicList], typing.Generic[T], FieldRequiring): + """Dispatches events to lists of _filters, and aggregates the responses into a single list of actions to take.""" + + # Each subclass must define a name matching the filter_list name we're expecting to receive from the database. + # Names must be unique across all filter lists. + name = FieldRequiring.MUST_SET_UNIQUE + + _already_warned = set() + + def add_list(self, list_data: dict) -> AtomicList: + """Add a new type of list (such as a whitelist or a blacklist) this filter list.""" + actions, validations = create_settings(list_data["settings"], keep_empty=True) + list_type = ListType(list_data["list_type"]) + defaults = Defaults(actions, validations) + + filters = {} + for filter_data in list_data["filters"]: + new_filter = self._create_filter(filter_data, defaults) + if new_filter: + filters[filter_data["id"]] = new_filter + + self[list_type] = AtomicList( + list_data["id"], + arrow.get(list_data["created_at"]), + arrow.get(list_data["updated_at"]), + self.name, + list_type, + defaults, + filters + ) + return self[list_type] + + def add_filter(self, list_type: ListType, filter_data: dict) -> T | None: + """Add a filter to the list of the specified type.""" + new_filter = self._create_filter(filter_data, self[list_type].defaults) + if new_filter: + self[list_type].filters[filter_data["id"]] = new_filter + return new_filter + + @abstractmethod + def get_filter_type(self, content: str) -> type[T]: + """Get a subclass of filter matching the filter list and the filter's content.""" + + @property + @abstractmethod + def filter_types(self) -> set[type[T]]: + """Return the types of filters used by this list.""" + + @abstractmethod + async def actions_for( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, list[str], dict[ListType, list[Filter]]]: + """Dispatch the given event to the list's filters, and return actions to take and messages to relay to mods.""" + + def _create_filter(self, filter_data: dict, defaults: Defaults) -> T | None: + """Create a filter from the given data.""" + try: + content = filter_data["content"] + filter_type = self.get_filter_type(content) + if filter_type: + return filter_type(filter_data, defaults) + elif content not in self._already_warned: + log.warning(f"A filter named {content} was supplied, but no matching implementation found.") + self._already_warned.add(content) + return None + except TypeError as e: + log.warning(e) + + def __hash__(self): + return hash(id(self)) + + +@dataclass(frozen=True, eq=False) +class SubscribingAtomicList(AtomicList): + """ + A base class for a list of unique filters. + + Unique filters are ones that should only be run once in a given context. + Each unique filter is subscribed to a subset of events to respond to. + """ + + subscriptions: defaultdict[Event, list[int]] = dataclasses.field(default_factory=lambda: defaultdict(list)) + + def subscribe(self, filter_: UniqueFilter, *events: Event) -> None: + """ + Subscribe a unique filter to the given events. + + The filter is added to a list for each event. When the event is triggered, the filter context will be + dispatched to the subscribed filters. + """ + for event in events: + if filter_ not in self.subscriptions[event]: + self.subscriptions[event].append(filter_.id) + + async def filter_list_result(self, ctx: FilterContext) -> list[Filter]: + """Sift through the list of filters, and return only the ones which apply to the given context.""" + event_filters = [self.filters[id_] for id_ in self.subscriptions[ctx.event]] + return await self._create_filter_list_result(ctx, self.defaults, event_filters) + + +class UniquesListBase(FilterList[UniqueFilter]): + """ + A list of unique filters. + + Unique filters are ones that should only be run once in a given context. + Each unique filter subscribes to a subset of events to respond to. + """ + + def __init__(self, filtering_cog: 'Filtering'): + super().__init__() + self.filtering_cog = filtering_cog + self.loaded_types: dict[str, type[UniqueFilter]] = {} + + def add_list(self, list_data: dict) -> SubscribingAtomicList: + """Add a new type of list (such as a whitelist or a blacklist) this filter list.""" + actions, validations = create_settings(list_data["settings"], keep_empty=True) + list_type = ListType(list_data["list_type"]) + defaults = Defaults(actions, validations) + new_list = SubscribingAtomicList( + list_data["id"], + arrow.get(list_data["created_at"]), + arrow.get(list_data["updated_at"]), + self.name, + list_type, + defaults, + {} + ) + self[list_type] = new_list + + filters = {} + events = set() + for filter_data in list_data["filters"]: + new_filter = self._create_filter(filter_data, defaults) + if new_filter: + new_list.subscribe(new_filter, *new_filter.events) + filters[filter_data["id"]] = new_filter + self.loaded_types[new_filter.name] = type(new_filter) + events.update(new_filter.events) + + new_list.filters.update(filters) + if hasattr(self.filtering_cog, "subscribe"): # Subscribe the filter list to any new events found. + self.filtering_cog.subscribe(self, *events) + return new_list + + @property + def filter_types(self) -> set[type[UniqueFilter]]: + """Return the types of filters used by this list.""" + return set(self.loaded_types.values()) diff --git a/bot/exts/filtering/_filter_lists/invite.py b/bot/exts/filtering/_filter_lists/invite.py new file mode 100644 index 000000000..bd0eaa122 --- /dev/null +++ b/bot/exts/filtering/_filter_lists/invite.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +import typing + +from discord import Embed, Invite +from discord.errors import NotFound +from pydis_core.utils.regex import DISCORD_INVITE + +import bot +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType +from bot.exts.filtering._filters.filter import Filter +from bot.exts.filtering._filters.invite import InviteFilter +from bot.exts.filtering._settings import ActionSettings +from bot.exts.filtering._utils import clean_input + +if typing.TYPE_CHECKING: + from bot.exts.filtering.filtering import Filtering + + +class InviteList(FilterList[InviteFilter]): + """ + A list of filters, each looking for guild invites to a specific guild. + + If the invite is not whitelisted, it will be blocked. Partnered and verified servers are allowed unless blacklisted. + + Whitelist defaults dictate what happens when an invite is *not* explicitly allowed, + and whitelist filters overrides have no effect. + + Blacklist defaults dictate what happens by default when an explicitly blocked invite is found. + + Items in the list are added through invites for the purpose of fetching the guild info. + Items are stored as guild IDs, guild invites are *not* stored. + """ + + name = "invite" + + def __init__(self, filtering_cog: Filtering): + super().__init__() + filtering_cog.subscribe(self, Event.MESSAGE, Event.MESSAGE_EDIT) + + def get_filter_type(self, content: str) -> type[Filter]: + """Get a subclass of filter matching the filter list and the filter's content.""" + return InviteFilter + + @property + def filter_types(self) -> set[type[Filter]]: + """Return the types of filters used by this list.""" + return {InviteFilter} + + async def actions_for( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, list[str], dict[ListType, list[Filter]]]: + """Dispatch the given event to the list's filters, and return actions to take and messages to relay to mods.""" + text = clean_input(ctx.content) + + # Avoid escape characters + text = text.replace("\\", "") + + matches = list(DISCORD_INVITE.finditer(text)) + invite_codes = {m.group("invite") for m in matches} + if not invite_codes: + return None, [], {} + all_triggers = {} + + _, failed = self[ListType.ALLOW].defaults.validations.evaluate(ctx) + # If the allowed list doesn't operate in the context, unknown invites are allowed. + check_if_allowed = not failed + + # Sort the invites into two categories: + invites_for_inspection = dict() # Found guild invites requiring further inspection. + unknown_invites = dict() # Either don't resolve or group DMs. + for invite_code in invite_codes: + try: + invite = await bot.instance.fetch_invite(invite_code) + except NotFound: + if check_if_allowed: + unknown_invites[invite_code] = None + else: + if invite.guild: + invites_for_inspection[invite_code] = invite + elif check_if_allowed: # Group DM + unknown_invites[invite_code] = invite + + # Find any blocked invites + new_ctx = ctx.replace(content={invite.guild.id for invite in invites_for_inspection.values()}) + triggered = await self[ListType.DENY].filter_list_result(new_ctx) + blocked_guilds = {filter_.content for filter_ in triggered} + blocked_invites = { + code: invite for code, invite in invites_for_inspection.items() if invite.guild.id in blocked_guilds + } + + # Remove the ones which are already confirmed as blocked, or otherwise ones which are partnered or verified. + invites_for_inspection = { + code: invite for code, invite in invites_for_inspection.items() + if invite.guild.id not in blocked_guilds + and "PARTNERED" not in invite.guild.features and "VERIFIED" not in invite.guild.features + } + + # Remove any remaining invites which are allowed + guilds_for_inspection = {invite.guild.id for invite in invites_for_inspection.values()} + + if check_if_allowed: # Whether unknown invites need to be checked. + new_ctx = ctx.replace(content=guilds_for_inspection) + all_triggers[ListType.ALLOW] = [ + filter_ for filter_ in self[ListType.ALLOW].filters.values() + if await filter_.triggered_on(new_ctx) + ] + allowed = {filter_.content for filter_ in all_triggers[ListType.ALLOW]} + unknown_invites.update({ + code: invite for code, invite in invites_for_inspection.items() if invite.guild.id not in allowed + }) + + if not triggered and not unknown_invites: + return None, [], all_triggers + + actions = None + if unknown_invites: # There are invites which weren't allowed but aren't explicitly blocked. + actions = self[ListType.ALLOW].defaults.actions + # Blocked invites come second so that their actions have preference. + if triggered: + if actions: + actions = actions.union(self[ListType.DENY].merge_actions(triggered)) + else: + actions = self[ListType.DENY].merge_actions(triggered) + all_triggers[ListType.DENY] = triggered + + blocked_invites |= unknown_invites + ctx.matches += {match[0] for match in matches if match.group("invite") in blocked_invites} + ctx.alert_embeds += (self._guild_embed(invite) for invite in blocked_invites.values() if invite) + messages = self[ListType.DENY].format_messages(triggered) + messages += [ + f"`{code} - {invite.guild.id}`" if invite else f"`{code}`" for code, invite in unknown_invites.items() + ] + return actions, messages, all_triggers + + @staticmethod + def _guild_embed(invite: Invite) -> Embed: + """Return an embed representing the guild invites to.""" + embed = Embed() + if invite.guild: + embed.title = invite.guild.name + embed.set_thumbnail(url=invite.guild.icon.url) + embed.set_footer(text=f"Guild ID: {invite.guild.id}") + else: + embed.title = "Group DM" + + embed.description = ( + f"**Invite Code:** {invite.code}\n" + f"**Members:** {invite.approximate_member_count}\n" + f"**Active:** {invite.approximate_presence_count}" + ) + + return embed diff --git a/bot/exts/filtering/_filter_lists/token.py b/bot/exts/filtering/_filter_lists/token.py new file mode 100644 index 000000000..f5da28bb5 --- /dev/null +++ b/bot/exts/filtering/_filter_lists/token.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +import re +import typing + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType +from bot.exts.filtering._filters.filter import Filter +from bot.exts.filtering._filters.token import TokenFilter +from bot.exts.filtering._settings import ActionSettings +from bot.exts.filtering._utils import clean_input + +if typing.TYPE_CHECKING: + from bot.exts.filtering.filtering import Filtering + +SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL) + + +class TokensList(FilterList[TokenFilter]): + """ + A list of filters, each looking for a specific token in the given content given as regex. + + The blacklist defaults dictate what happens by default when a filter is matched, and can be overridden by + individual filters. + + Usually, if blocking literal strings, the literals themselves can be specified as the filter's value. + But since this is a list of regex patterns, be careful of the items added. For example, a dot needs to be escaped + to function as a literal dot. + """ + + name = "token" + + def __init__(self, filtering_cog: Filtering): + super().__init__() + filtering_cog.subscribe(self, Event.MESSAGE, Event.MESSAGE_EDIT, Event.NICKNAME) + + def get_filter_type(self, content: str) -> type[Filter]: + """Get a subclass of filter matching the filter list and the filter's content.""" + return TokenFilter + + @property + def filter_types(self) -> set[type[Filter]]: + """Return the types of filters used by this list.""" + return {TokenFilter} + + async def actions_for( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, list[str], dict[ListType, list[Filter]]]: + """Dispatch the given event to the list's filters, and return actions to take and messages to relay to mods.""" + text = ctx.content + if not text: + return None, [], {} + if SPOILER_RE.search(text): + text = self._expand_spoilers(text) + text = clean_input(text) + ctx = ctx.replace(content=text) + + triggers = await self[ListType.DENY].filter_list_result(ctx) + actions = None + messages = [] + if triggers: + actions = self[ListType.DENY].merge_actions(triggers) + messages = self[ListType.DENY].format_messages(triggers) + return actions, messages, {ListType.DENY: triggers} + + @staticmethod + def _expand_spoilers(text: str) -> str: + """Return a string containing all interpretations of a spoilered message.""" + split_text = SPOILER_RE.split(text) + return ''.join( + split_text[0::2] + split_text[1::2] + split_text + ) diff --git a/bot/exts/filtering/_filter_lists/unique.py b/bot/exts/filtering/_filter_lists/unique.py new file mode 100644 index 000000000..a5a04d25a --- /dev/null +++ b/bot/exts/filtering/_filter_lists/unique.py @@ -0,0 +1,39 @@ +from pydis_core.utils.logging import get_logger + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._filter_lists.filter_list import ListType, UniquesListBase +from bot.exts.filtering._filters.filter import Filter, UniqueFilter +from bot.exts.filtering._filters.unique import unique_filter_types +from bot.exts.filtering._settings import ActionSettings + +log = get_logger(__name__) + + +class UniquesList(UniquesListBase): + """ + A list of unique filters. + + Unique filters are ones that should only be run once in a given context. + Each unique filter subscribes to a subset of events to respond to. + """ + + name = "unique" + + def get_filter_type(self, content: str) -> type[UniqueFilter] | None: + """Get a subclass of filter matching the filter list and the filter's content.""" + try: + return unique_filter_types[content] + except KeyError: + return None + + async def actions_for( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, list[str], dict[ListType, list[Filter]]]: + """Dispatch the given event to the list's filters, and return actions to take and messages to relay to mods.""" + triggers = await self[ListType.DENY].filter_list_result(ctx) + actions = None + messages = [] + if triggers: + actions = self[ListType.DENY].merge_actions(triggers) + messages = self[ListType.DENY].format_messages(triggers) + return actions, messages, {ListType.DENY: triggers} diff --git a/bot/exts/filtering/_filters/__init__.py b/bot/exts/filtering/_filters/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/filtering/_filters/__init__.py diff --git a/bot/exts/filtering/_filters/antispam/__init__.py b/bot/exts/filtering/_filters/antispam/__init__.py new file mode 100644 index 000000000..637bcd410 --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/__init__.py @@ -0,0 +1,9 @@ +from os.path import dirname + +from bot.exts.filtering._filters.filter import UniqueFilter +from bot.exts.filtering._utils import subclasses_in_package + +antispam_filter_types = subclasses_in_package(dirname(__file__), f"{__name__}.", UniqueFilter) +antispam_filter_types = {filter_.name: filter_ for filter_ in antispam_filter_types} + +__all__ = [antispam_filter_types] diff --git a/bot/exts/filtering/_filters/antispam/attachments.py b/bot/exts/filtering/_filters/antispam/attachments.py new file mode 100644 index 000000000..216d9b886 --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/attachments.py @@ -0,0 +1,43 @@ +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + + +class ExtraAttachmentsSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of attachments before the filter is triggered." + + interval: int = 10 + threshold: int = 6 + + +class AttachmentsFilter(UniqueFilter): + """Detects too many attachments sent by a single user.""" + + name = "attachments" + events = (Event.MESSAGE,) + extra_fields_type = ExtraAttachmentsSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author and len(msg.attachments) > 0} + total_recent_attachments = sum(len(msg.attachments) for msg in detected_messages) + + if total_recent_attachments > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {total_recent_attachments} attachments" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/burst.py b/bot/exts/filtering/_filters/antispam/burst.py new file mode 100644 index 000000000..d78107d0a --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/burst.py @@ -0,0 +1,41 @@ +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + + +class ExtraBurstSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of messages before the filter is triggered." + + interval: int = 10 + threshold: int = 7 + + +class BurstFilter(UniqueFilter): + """Detects too many messages sent by a single user.""" + + name = "burst" + events = (Event.MESSAGE,) + extra_fields_type = ExtraBurstSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author} + if len(detected_messages) > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {len(detected_messages)} messages" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/chars.py b/bot/exts/filtering/_filters/antispam/chars.py new file mode 100644 index 000000000..5c4fa201c --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/chars.py @@ -0,0 +1,43 @@ +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + + +class ExtraCharsSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of characters before the filter is triggered." + + interval: int = 5 + threshold: int = 4_200 + + +class CharsFilter(UniqueFilter): + """Detects too many characters sent by a single user.""" + + name = "chars" + events = (Event.MESSAGE,) + extra_fields_type = ExtraCharsSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author} + total_recent_chars = sum(len(msg.content) for msg in relevant_messages) + + if total_recent_chars > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {total_recent_chars} characters" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/duplicates.py b/bot/exts/filtering/_filters/antispam/duplicates.py new file mode 100644 index 000000000..60d5c322c --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/duplicates.py @@ -0,0 +1,44 @@ +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + + +class ExtraDuplicatesSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of duplicate messages before the filter is triggered." + + interval: int = 10 + threshold: int = 3 + + +class DuplicatesFilter(UniqueFilter): + """Detects duplicated messages sent by a single user.""" + + name = "duplicates" + events = (Event.MESSAGE,) + extra_fields_type = ExtraDuplicatesSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + + detected_messages = { + msg for msg in relevant_messages + if msg.author == ctx.author and msg.content == ctx.message.content and msg.content + } + if len(detected_messages) > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {len(detected_messages)} duplicate messages" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/emoji.py b/bot/exts/filtering/_filters/antispam/emoji.py new file mode 100644 index 000000000..0511e4a7b --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/emoji.py @@ -0,0 +1,53 @@ +import re +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from emoji import demojize +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + +DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>|:\w+:") +CODE_BLOCK_RE = re.compile(r"```.*?```", flags=re.DOTALL) + + +class ExtraEmojiSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of emojis before the filter is triggered." + + interval: int = 10 + threshold: int = 20 + + +class EmojiFilter(UniqueFilter): + """Detects too many emojis sent by a single user.""" + + name = "emoji" + events = (Event.MESSAGE,) + extra_fields_type = ExtraEmojiSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author} + + # Get rid of code blocks in the message before searching for emojis. + # Convert Unicode emojis to :emoji: format to get their count. + total_emojis = sum( + len(DISCORD_EMOJI_RE.findall(demojize(CODE_BLOCK_RE.sub("", msg.content)))) + for msg in relevant_messages + ) + + if total_emojis > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {total_emojis} emojis" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/links.py b/bot/exts/filtering/_filters/antispam/links.py new file mode 100644 index 000000000..76fe53e70 --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/links.py @@ -0,0 +1,52 @@ +import re +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + +LINK_RE = re.compile(r"(https?://\S+)") + + +class ExtraLinksSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of links before the filter is triggered." + + interval: int = 10 + threshold: int = 10 + + +class DuplicatesFilter(UniqueFilter): + """Detects too many links sent by a single user.""" + + name = "links" + events = (Event.MESSAGE,) + extra_fields_type = ExtraLinksSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author} + + total_links = 0 + messages_with_links = 0 + for msg in relevant_messages: + total_matches = len(LINK_RE.findall(msg.content)) + if total_matches: + messages_with_links += 1 + total_links += total_matches + + if total_links > self.extra_fields.threshold and messages_with_links > 1: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {total_links} links" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/mentions.py b/bot/exts/filtering/_filters/antispam/mentions.py new file mode 100644 index 000000000..f3c945e16 --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/mentions.py @@ -0,0 +1,90 @@ +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from discord import DeletedReferencedMessage, MessageType, NotFound +from pydantic import BaseModel +from pydis_core.utils.logging import get_logger + +import bot +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + +log = get_logger(__name__) + + +class ExtraMentionsSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of distinct mentions before the filter is triggered." + + interval: int = 10 + threshold: int = 5 + + +class DuplicatesFilter(UniqueFilter): + """ + Detects total mentions exceeding the limit sent by a single user. + + Excludes mentions that are bots, themselves, or replied users. + + In very rare cases, may not be able to determine a + mention was to a reply, in which case it is not ignored. + """ + + name = "mentions" + events = (Event.MESSAGE,) + extra_fields_type = ExtraMentionsSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author} + + # We use `msg.mentions` here as that is supplied by the api itself, to determine who was mentioned. + # Additionally, `msg.mentions` includes the user replied to, even if the mention doesn't occur in the body. + # In order to exclude users who are mentioned as a reply, we check if the msg has a reference + # + # While we could use regex to parse the message content, and get a list of + # the mentions, that solution is very prone to breaking. + # We would need to deal with codeblocks, escaping markdown, and any discrepancies between + # our implementation and discord's Markdown parser which would cause false positives or false negatives. + total_recent_mentions = 0 + for msg in relevant_messages: + # We check if the message is a reply, and if it is try to get the author + # since we ignore mentions of a user that we're replying to + reply_author = None + + if msg.type == MessageType.reply: + ref = msg.reference + + if not (resolved := ref.resolved): + # It is possible, in a very unusual situation, for a message to have a reference + # that is both not in the cache, and deleted while running this function. + # In such a situation, this will throw an error which we catch. + try: + resolved = await bot.instance.get_partial_messageable(resolved.channel_id).fetch_message( + resolved.message_id + ) + except NotFound: + log.info('Could not fetch the reference message as it has been deleted.') + + if resolved and not isinstance(resolved, DeletedReferencedMessage): + reply_author = resolved.author + + for user in msg.mentions: + # Don't count bot or self mentions, or the user being replied to (if applicable) + if user.bot or user in {msg.author, reply_author}: + continue + total_recent_mentions += 1 + + if total_recent_mentions > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {total_recent_mentions} mentions" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/newlines.py b/bot/exts/filtering/_filters/antispam/newlines.py new file mode 100644 index 000000000..b15a35219 --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/newlines.py @@ -0,0 +1,61 @@ +import re +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + +NEWLINES = re.compile(r"(\n+)") + + +class ExtraNewlinesSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of newlines before the filter is triggered." + consecutive_threshold_description: ClassVar[str] = ( + "Maximum number of consecutive newlines before the filter is triggered." + ) + + interval: int = 10 + threshold: int = 100 + consecutive_threshold: int = 10 + + +class NewlinesFilter(UniqueFilter): + """Detects too many newlines sent by a single user.""" + + name = "newlines" + events = (Event.MESSAGE,) + extra_fields_type = ExtraNewlinesSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author} + + # Identify groups of newline characters and get group & total counts + newline_counts = [] + for msg in relevant_messages: + newline_counts += [len(group) for group in NEWLINES.findall(msg.content)] + total_recent_newlines = sum(newline_counts) + # Get maximum newline group size + max_newline_group = max(newline_counts, default=0) + + # Check first for total newlines, if this passes then check for large groupings + if total_recent_newlines > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {total_recent_newlines} newlines" + return True + if max_newline_group > self.extra_fields.consecutive_threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {max_newline_group} consecutive newlines" + return True + return False diff --git a/bot/exts/filtering/_filters/antispam/role_mentions.py b/bot/exts/filtering/_filters/antispam/role_mentions.py new file mode 100644 index 000000000..49de642fa --- /dev/null +++ b/bot/exts/filtering/_filters/antispam/role_mentions.py @@ -0,0 +1,42 @@ +from datetime import timedelta +from itertools import takewhile +from typing import ClassVar + +import arrow +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + + +class ExtraRoleMentionsSettings(BaseModel): + """Extra settings for when to trigger the antispam rule.""" + + interval_description: ClassVar[str] = ( + "Look for rule violations in messages from the last `interval` number of seconds." + ) + threshold_description: ClassVar[str] = "Maximum number of role mentions before the filter is triggered." + + interval: int = 10 + threshold: int = 3 + + +class DuplicatesFilter(UniqueFilter): + """Detects too many role mentions sent by a single user.""" + + name = "role_mentions" + events = (Event.MESSAGE,) + extra_fields_type = ExtraRoleMentionsSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.extra_fields.interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, ctx.content)) + detected_messages = {msg for msg in relevant_messages if msg.author == ctx.author} + total_recent_mentions = sum(len(msg.role_mentions) for msg in relevant_messages) + + if total_recent_mentions > self.extra_fields.threshold: + ctx.related_messages |= detected_messages + ctx.filter_info[self] = f"sent {total_recent_mentions} role mentions" + return True + return False diff --git a/bot/exts/filtering/_filters/domain.py b/bot/exts/filtering/_filters/domain.py new file mode 100644 index 000000000..7c229fdcb --- /dev/null +++ b/bot/exts/filtering/_filters/domain.py @@ -0,0 +1,62 @@ +import re +from typing import ClassVar, Optional +from urllib.parse import urlparse + +import tldextract +from discord.ext.commands import BadArgument +from pydantic import BaseModel + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._filters.filter import Filter + +URL_RE = re.compile(r"(?:https?://)?(\S+?)[\\/]*", flags=re.IGNORECASE) + + +class ExtraDomainSettings(BaseModel): + """Extra settings for how domains should be matched in a message.""" + + subdomains_description: ClassVar[str] = ( + "A boolean. If True, will will only trigger for subdomains and subpaths, and not for the domain itself." + ) + + # Whether to trigger only for subdomains and subpaths, and not the specified domain itself. + subdomains: Optional[bool] = False + + +class DomainFilter(Filter): + """ + A filter which looks for a specific domain given by URL. + + The schema (http, https) does not need to be included in the filter. + Will also match subdomains. + """ + + name = "domain" + extra_fields_type = ExtraDomainSettings + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Searches for a domain within a given context.""" + domain = tldextract.extract(self.content).registered_domain + + for found_url in ctx.content: + extract = tldextract.extract(found_url) + if self.content in found_url and extract.registered_domain == domain: + if self.extra_fields.subdomains: + if not extract.subdomain and not urlparse(f"https://{found_url}").path: + return False + ctx.matches.append(self.content) + ctx.notification_domain = self.content + return True + return False + + @classmethod + async def process_input(cls, content: str, description: str) -> tuple[str, str]: + """ + Process the content and description into a form which will work with the filtering. + + A BadArgument should be raised if the content can't be used. + """ + match = URL_RE.fullmatch(content) + if not match or not match.group(1): + raise BadArgument(f"`{content}` is not a URL.") + return match.group(1), description diff --git a/bot/exts/filtering/_filters/extension.py b/bot/exts/filtering/_filters/extension.py new file mode 100644 index 000000000..97eddc406 --- /dev/null +++ b/bot/exts/filtering/_filters/extension.py @@ -0,0 +1,27 @@ +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._filters.filter import Filter + + +class ExtensionFilter(Filter): + """ + A filter which looks for a specific attachment extension in messages. + + The filter stores the extension preceded by a dot. + """ + + name = "extension" + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Searches for an attachment extension in the context content, given as a set of extensions.""" + return self.content in ctx.content + + @classmethod + async def process_input(cls, content: str, description: str) -> tuple[str, str]: + """ + Process the content and description into a form which will work with the filtering. + + A BadArgument should be raised if the content can't be used. + """ + if not content.startswith("."): + content = f".{content}" + return content, description diff --git a/bot/exts/filtering/_filters/filter.py b/bot/exts/filtering/_filters/filter.py new file mode 100644 index 000000000..b5f4c127a --- /dev/null +++ b/bot/exts/filtering/_filters/filter.py @@ -0,0 +1,94 @@ +from abc import abstractmethod +from typing import Any + +import arrow +from pydantic import ValidationError + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._settings import Defaults, create_settings +from bot.exts.filtering._utils import FieldRequiring + + +class Filter(FieldRequiring): + """ + A class representing a filter. + + Each filter looks for a specific attribute within an event (such as message sent), + and defines what action should be performed if it is triggered. + """ + + # Each subclass must define a name which will be used to fetch its description. + # Names must be unique across all types of filters. + name = FieldRequiring.MUST_SET_UNIQUE + # If a subclass uses extra fields, it should assign the pydantic model type to this variable. + extra_fields_type = None + + def __init__(self, filter_data: dict, defaults: Defaults | None = None): + self.id = filter_data["id"] + self.content = filter_data["content"] + self.description = filter_data["description"] + self.created_at = arrow.get(filter_data["created_at"]) + self.updated_at = arrow.get(filter_data["updated_at"]) + self.actions, self.validations = create_settings(filter_data["settings"], defaults=defaults) + if self.extra_fields_type: + self.extra_fields = self.extra_fields_type.parse_raw(filter_data["additional_field"] or "{}") # noqa: P103 + else: + self.extra_fields = None + + @property + def overrides(self) -> tuple[dict[str, Any], dict[str, Any]]: + """Return a tuple of setting overrides and filter setting overrides.""" + settings = {} + if self.actions: + settings = self.actions.overrides + if self.validations: + settings |= self.validations.overrides + + filter_settings = {} + if self.extra_fields: + filter_settings = self.extra_fields.dict(exclude_unset=True) + + return settings, filter_settings + + @abstractmethod + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + + @classmethod + def validate_filter_settings(cls, extra_fields: dict) -> tuple[bool, str | None]: + """Validate whether the supplied fields are valid for the filter, and provide the error message if not.""" + if cls.extra_fields_type is None: + return True, None + + try: + cls.extra_fields_type(**extra_fields) + except ValidationError as e: + return False, repr(e) + else: + return True, None + + @classmethod + async def process_input(cls, content: str, description: str) -> tuple[str, str]: + """ + Process the content and description into a form which will work with the filtering. + + A BadArgument should be raised if the content can't be used. + """ + return content, description + + def __str__(self) -> str: + """A string representation of the filter.""" + string = f"{self.id}. `{self.content}`" + if self.description: + string += f" - {self.description}" + return string + + +class UniqueFilter(Filter): + """ + Unique filters are ones that should only be run once in a given context. + + This is as opposed to say running many domain filters on the same message. + """ + + events: tuple[Event, ...] = FieldRequiring.MUST_SET diff --git a/bot/exts/filtering/_filters/invite.py b/bot/exts/filtering/_filters/invite.py new file mode 100644 index 000000000..799a302b9 --- /dev/null +++ b/bot/exts/filtering/_filters/invite.py @@ -0,0 +1,48 @@ +from discord import NotFound +from discord.ext.commands import BadArgument +from pydis_core.utils.regex import DISCORD_INVITE + +import bot +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._filters.filter import Filter + + +class InviteFilter(Filter): + """ + A filter which looks for invites to a specific guild in messages. + + The filter stores the guild ID which is allowed or denied. + """ + + name = "invite" + + def __init__(self, filter_data: dict, defaults_data: dict | None = None): + super().__init__(filter_data, defaults_data) + self.content = int(self.content) + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Searches for a guild ID in the context content, given as a set of IDs.""" + return self.content in ctx.content + + @classmethod + async def process_input(cls, content: str, description: str) -> tuple[str, str]: + """ + Process the content and description into a form which will work with the filtering. + + A BadArgument should be raised if the content can't be used. + """ + match = DISCORD_INVITE.fullmatch(content) + if not match or not match.group("invite"): + raise BadArgument(f"`{content}` is not a valid Discord invite.") + invite_code = match.group("invite") + try: + invite = await bot.instance.fetch_invite(invite_code) + except NotFound: + raise BadArgument(f"`{invite_code}` is not a valid Discord invite code.") + if not invite.guild: + raise BadArgument("Did you just try to add a group DM?") + + guild_name = invite.guild.name if hasattr(invite.guild, "name") else "" + if guild_name.lower() not in description.lower(): + description = " - ".join(part for part in (f'Guild "{guild_name}"', description) if part) + return str(invite.guild.id), description diff --git a/bot/exts/filtering/_filters/token.py b/bot/exts/filtering/_filters/token.py new file mode 100644 index 000000000..3cd9b909d --- /dev/null +++ b/bot/exts/filtering/_filters/token.py @@ -0,0 +1,35 @@ +import re + +from discord.ext.commands import BadArgument + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._filters.filter import Filter + + +class TokenFilter(Filter): + """A filter which looks for a specific token given by regex.""" + + name = "token" + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Searches for a regex pattern within a given context.""" + pattern = self.content + + match = re.search(pattern, ctx.content, flags=re.IGNORECASE) + if match: + ctx.matches.append(match[0]) + return True + return False + + @classmethod + async def process_input(cls, content: str, description: str) -> tuple[str, str]: + """ + Process the content and description into a form which will work with the filtering. + + A BadArgument should be raised if the content can't be used. + """ + try: + re.compile(content) + except re.error as e: + raise BadArgument(str(e)) + return content, description diff --git a/bot/exts/filtering/_filters/unique/__init__.py b/bot/exts/filtering/_filters/unique/__init__.py new file mode 100644 index 000000000..ce78d6922 --- /dev/null +++ b/bot/exts/filtering/_filters/unique/__init__.py @@ -0,0 +1,9 @@ +from os.path import dirname + +from bot.exts.filtering._filters.filter import UniqueFilter +from bot.exts.filtering._utils import subclasses_in_package + +unique_filter_types = subclasses_in_package(dirname(__file__), f"{__name__}.", UniqueFilter) +unique_filter_types = {filter_.name: filter_ for filter_ in unique_filter_types} + +__all__ = [unique_filter_types] diff --git a/bot/exts/filters/token_remover.py b/bot/exts/filtering/_filters/unique/discord_token.py index 29f80671d..6174ee30b 100644 --- a/bot/exts/filters/token_remover.py +++ b/bot/exts/filtering/_filters/unique/discord_token.py @@ -1,38 +1,34 @@ import base64 import re -import typing as t - -from discord import Colour, Message, NotFound -from discord.ext.commands import Cog - -from bot import utils -from bot.bot import Bot -from bot.constants import Channels, Colours, Event, Icons +from collections.abc import Callable, Coroutine +from typing import ClassVar, NamedTuple + +import discord +from pydantic import BaseModel, Field +from pydis_core.utils.logging import get_logger +from pydis_core.utils.members import get_or_fetch_member + +import bot +from bot import constants, utils +from bot.constants import Guild +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter +from bot.exts.filtering._utils import resolve_mention from bot.exts.moderation.modlog import ModLog -from bot.log import get_logger -from bot.utils.members import get_or_fetch_member from bot.utils.messages import format_user log = get_logger(__name__) + LOG_MESSAGE = ( - "Censored a seemingly valid token sent by {author} in {channel}, " - "token was `{user_id}.{timestamp}.{hmac}`" + "Censored a seemingly valid token sent by {author} in {channel}. " + "Token was: `{user_id}.{timestamp}.{hmac}`." ) UNKNOWN_USER_LOG_MESSAGE = "Decoded user ID: `{user_id}` (Not present in server)." KNOWN_USER_LOG_MESSAGE = ( "Decoded user ID: `{user_id}` **(Present in server)**.\n" "This matches `{user_name}` and means this is likely a valid **{kind}** token." ) -DELETION_MESSAGE_TEMPLATE = ( - "Hey {mention}! I noticed you posted a seemingly valid Discord API " - "token in your message and have removed your message. " - "This means that your token has been **compromised**. " - "Please change your token **immediately** at: " - "<https://discord.com/developers/applications>\n\n" - "Feel free to re-post it with the token removed. " - "If you believe this was a mistake, please let us know!" -) DISCORD_EPOCH = 1_420_070_400 TOKEN_EPOCH = 1_293_840_000 @@ -43,7 +39,17 @@ TOKEN_EPOCH = 1_293_840_000 TOKEN_RE = re.compile(r"([\w-]{10,})\.([\w-]{5,})\.([\w-]{10,})") -class Token(t.NamedTuple): +class ExtraDiscordTokenSettings(BaseModel): + """Extra settings for who should be pinged when a Discord token is detected.""" + + pings_for_bot_description: ClassVar[str] = "A sequence. Who should be pinged if the token found belongs to a bot." + pings_for_user_description: ClassVar[str] = "A sequence. Who should be pinged if the token found belongs to a user." + + pings_for_bot: set[str] = Field(default_factory=set) + pings_for_user: set[str] = Field(default_factory=lambda: {"Moderators"}) + + +class Token(NamedTuple): """A Discord Bot token.""" user_id: str @@ -51,84 +57,64 @@ class Token(t.NamedTuple): hmac: str -class TokenRemover(Cog): +class DiscordTokenFilter(UniqueFilter): """Scans messages for potential discord client tokens and removes them.""" - def __init__(self, bot: Bot): - self.bot = bot + name = "discord_token" + events = (Event.MESSAGE, Event.MESSAGE_EDIT) + extra_fields_type = ExtraDiscordTokenSettings @property - def mod_log(self) -> ModLog: + def mod_log(self) -> ModLog | None: """Get currently loaded ModLog cog instance.""" - return self.bot.get_cog("ModLog") - - @Cog.listener() - async def on_message(self, msg: Message) -> None: - """ - Check each message for a string that matches Discord's token pattern. + return bot.instance.get_cog("ModLog") - See: https://discordapp.com/developers/docs/reference#snowflakes - """ - # Ignore DMs; can't delete messages in there anyway. - if not msg.guild or msg.author.bot: - return - - found_token = self.find_token_in_message(msg) - if found_token: - await self.take_action(msg, found_token) - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message) -> None: - """ - Check each edit for a string that matches Discord's token pattern. - - See: https://discordapp.com/developers/docs/reference#snowflakes - """ - await self.on_message(after) - - async def take_action(self, msg: Message, found_token: Token) -> None: - """Remove the `msg` containing the `found_token` and send a mod log message.""" - self.mod_log.ignore(Event.message_delete, msg.id) - - try: - await msg.delete() - except NotFound: - log.debug(f"Failed to remove token in message {msg.id}: message already deleted.") - return - - await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) - - log_message = self.format_log_message(msg, found_token) - userid_message, mention_everyone = await self.format_userid_log_message(msg, found_token) - log.debug(log_message) - - # Send pretty mod log embed to mod-alerts - await self.mod_log.send_log_message( - icon_url=Icons.token_removed, - colour=Colour(Colours.soft_red), - title="Token removed!", - text=log_message + "\n" + userid_message, - thumbnail=msg.author.display_avatar.url, - channel_id=Channels.mod_alerts, - ping_everyone=mention_everyone, - ) + async def triggered_on(self, ctx: FilterContext) -> bool: + """Return whether the message contains Discord client tokens.""" + found_token = self.find_token_in_message(ctx.content) + if not found_token: + return False - self.bot.stats.incr("tokens.removed_tokens") + if ctx.message and (mod_log := self.mod_log): + mod_log.ignore(constants.Event.message_delete, ctx.message.id) + ctx.content = ctx.content.replace(found_token.hmac, self.censor_hmac(found_token.hmac)) + ctx.additional_actions.append(self._create_token_alert_embed_wrapper(found_token)) + return True + + def _create_token_alert_embed_wrapper(self, found_token: Token) -> Callable[[FilterContext], Coroutine]: + """Create the action to perform when an alert should be sent for a message containing a Discord token.""" + async def _create_token_alert_embed(ctx: FilterContext) -> None: + """Add an alert embed to the context with info about the token sent.""" + userid_message, is_user = await self.format_userid_log_message(found_token) + log_message = self.format_log_message(ctx.author, ctx.channel, found_token) + log.debug(log_message) + + if is_user: + mentions = map(resolve_mention, self.extra_fields.pings_for_user) + color = discord.Colour.red() + else: + mentions = map(resolve_mention, self.extra_fields.pings_for_bot) + color = discord.Colour.blue() + unmentioned = [mention for mention in mentions if mention not in ctx.alert_content] + if unmentioned: + ctx.alert_content = f"{' '.join(unmentioned)} {ctx.alert_content}" + ctx.alert_embeds.append(discord.Embed(colour=color, description=userid_message)) + + return _create_token_alert_embed @classmethod - async def format_userid_log_message(cls, msg: Message, token: Token) -> t.Tuple[str, bool]: + async def format_userid_log_message(cls, token: Token) -> tuple[str, bool]: """ Format the portion of the log message that includes details about the detected user ID. If the user is resolved to a member, the format includes the user ID, name, and the kind of user detected. - - If we resolve to a member and it is not a bot, we also return True to ping everyone. - - Returns a tuple of (log_message, mention_everyone) + If it is resolved to a user or a member, and it is not a bot, also return True. + Returns a tuple of (log_message, is_user) """ user_id = cls.extract_user_id(token.user_id) - user = await get_or_fetch_member(msg.guild, user_id) + guild = bot.instance.get_guild(Guild.id) + user = await get_or_fetch_member(guild, user_id) if user: return KNOWN_USER_LOG_MESSAGE.format( @@ -140,22 +126,27 @@ class TokenRemover(Cog): return UNKNOWN_USER_LOG_MESSAGE.format(user_id=user_id), False @staticmethod - def format_log_message(msg: Message, token: Token) -> str: + def censor_hmac(hmac: str) -> str: + """Return a censored version of the hmac.""" + return 'x' * (len(hmac) - 3) + hmac[-3:] + + @classmethod + def format_log_message(cls, author: discord.User, channel: discord.abc.GuildChannel, token: Token) -> str: """Return the generic portion of the log message to send for `token` being censored in `msg`.""" return LOG_MESSAGE.format( - author=format_user(msg.author), - channel=msg.channel.mention, + author=format_user(author), + channel=channel.mention, user_id=token.user_id, timestamp=token.timestamp, - hmac='x' * (len(token.hmac) - 3) + token.hmac[-3:], + hmac=cls.censor_hmac(token.hmac), ) @classmethod - def find_token_in_message(cls, msg: Message) -> t.Optional[Token]: - """Return a seemingly valid token found in `msg` or `None` if no token is found.""" + def find_token_in_message(cls, content: str) -> Token | None: + """Return a seemingly valid token found in `content` or `None` if no token is found.""" # Use finditer rather than search to guard against method calls prematurely returning the # token check (e.g. `message.channel.send` also matches our token pattern) - for match in TOKEN_RE.finditer(msg.content): + for match in TOKEN_RE.finditer(content): token = Token(*match.groups()) if ( (cls.extract_user_id(token.user_id) is not None) @@ -169,7 +160,7 @@ class TokenRemover(Cog): return None @staticmethod - def extract_user_id(b64_content: str) -> t.Optional[int]: + def extract_user_id(b64_content: str) -> int | None: """Return a user ID integer from part of a potential token, or None if it couldn't be decoded.""" b64_content = utils.pad_base64(b64_content) @@ -214,7 +205,7 @@ class TokenRemover(Cog): """ Determine if a given HMAC portion of a token is potentially valid. - If the HMAC has 3 or less characters, it's probably a dummy value like "xxxxxxxxxx", + If the HMAC has 3 or fewer characters, it's probably a dummy value like "xxxxxxxxxx", and thus the token can probably be skipped. """ unique = len(set(b64_content.lower())) @@ -226,8 +217,3 @@ class TokenRemover(Cog): return False else: return True - - -async def setup(bot: Bot) -> None: - """Load the TokenRemover cog.""" - await bot.add_cog(TokenRemover(bot)) diff --git a/bot/exts/filtering/_filters/unique/everyone.py b/bot/exts/filtering/_filters/unique/everyone.py new file mode 100644 index 000000000..a32e67cc5 --- /dev/null +++ b/bot/exts/filtering/_filters/unique/everyone.py @@ -0,0 +1,28 @@ +import re + +from bot.constants import Guild +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter + +EVERYONE_PING_RE = re.compile(rf"@everyone|<@&{Guild.id}>|@here") +CODE_BLOCK_RE = re.compile( + r"(?P<delim>``?)[^`]+?(?P=delim)(?!`+)" # Inline codeblock + r"|```(.+?)```", # Multiline codeblock + re.DOTALL | re.MULTILINE +) + + +class EveryoneFilter(UniqueFilter): + """Filter messages which contain `@everyone` and `@here` tags outside a codeblock.""" + + name = "everyone" + events = (Event.MESSAGE, Event.MESSAGE_EDIT) + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for the filter's content within a given context.""" + # First pass to avoid running re.sub on every message + if not EVERYONE_PING_RE.search(ctx.content): + return False + + content_without_codeblocks = CODE_BLOCK_RE.sub("", ctx.content) + return bool(EVERYONE_PING_RE.search(content_without_codeblocks)) diff --git a/bot/exts/filtering/_filters/unique/rich_embed.py b/bot/exts/filtering/_filters/unique/rich_embed.py new file mode 100644 index 000000000..2ee469f51 --- /dev/null +++ b/bot/exts/filtering/_filters/unique/rich_embed.py @@ -0,0 +1,51 @@ +import re + +from pydis_core.utils.logging import get_logger + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter +from bot.utils.helpers import remove_subdomain_from_url + +log = get_logger(__name__) + +URL_RE = re.compile(r"(https?://\S+)", flags=re.IGNORECASE) + + +class RichEmbedFilter(UniqueFilter): + """Filter messages which contain rich embeds not auto-generated from a URL.""" + + name = "rich_embed" + events = (Event.MESSAGE, Event.MESSAGE_EDIT) + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Determine if `msg` contains any rich embeds not auto-generated from a URL.""" + if ctx.embeds: + if ctx.event == Event.MESSAGE_EDIT: + if not ctx.message.edited_at: # This might happen, apparently. + return False + # If the edit delta is less than 100 microseconds, it's probably a double filter trigger. + delta = ctx.message.edited_at - (ctx.before_message.edited_at or ctx.before_message.created_at) + if delta.total_seconds() < 0.0001: + return False + + for embed in ctx.embeds: + if embed.type == "rich": + urls = URL_RE.findall(ctx.content) + final_urls = set(urls) + # This is due to the way discord renders relative urls in Embeds + # if the following url is sent: https://mobile.twitter.com/something + # Discord renders it as https://twitter.com/something + for url in urls: + final_urls.add(remove_subdomain_from_url(url)) + if not embed.url or embed.url not in final_urls: + # If `embed.url` does not exist or if `embed.url` is not part of the content + # of the message, it's unlikely to be an auto-generated embed by Discord. + ctx.alert_embeds.extend(ctx.embeds) + return True + else: + log.trace( + "Found a rich embed sent by a regular user account, " + "but it was likely just an automatic URL embed." + ) + + return False diff --git a/bot/exts/filtering/_filters/unique/webhook.py b/bot/exts/filtering/_filters/unique/webhook.py new file mode 100644 index 000000000..965ef42eb --- /dev/null +++ b/bot/exts/filtering/_filters/unique/webhook.py @@ -0,0 +1,63 @@ +import re +from collections.abc import Callable, Coroutine + +from pydis_core.utils.logging import get_logger + +import bot +from bot import constants +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.filter import UniqueFilter +from bot.exts.moderation.modlog import ModLog + +log = get_logger(__name__) + + +WEBHOOK_URL_RE = re.compile( + r"((?:https?://)?(?:ptb\.|canary\.)?discord(?:app)?\.com/api/webhooks/\d+/)\S+/?", + re.IGNORECASE +) + + +class WebhookFilter(UniqueFilter): + """Scan messages to detect Discord webhooks links.""" + + name = "webhook" + events = (Event.MESSAGE, Event.MESSAGE_EDIT) + + @property + def mod_log(self) -> ModLog | None: + """Get current instance of `ModLog`.""" + return bot.instance.get_cog("ModLog") + + async def triggered_on(self, ctx: FilterContext) -> bool: + """Search for a webhook in the given content. If found, attempt to delete it.""" + matches = set(WEBHOOK_URL_RE.finditer(ctx.content)) + if not matches: + return False + + # Don't log this. + if ctx.message and (mod_log := self.mod_log): + mod_log.ignore(constants.Event.message_delete, ctx.message.id) + + for i, match in enumerate(matches, start=1): + extra = "" if len(matches) == 1 else f" ({i})" + # Queue the webhook for deletion. + ctx.additional_actions.append(self._delete_webhook_wrapper(match[0], extra)) + # Don't show the full webhook in places such as the mod alert. + ctx.content = ctx.content.replace(match[0], match[1] + "xxx") + + return True + + @staticmethod + def _delete_webhook_wrapper(webhook_url: str, extra_message: str) -> Callable[[FilterContext], Coroutine]: + """Create the action to perform when a webhook should be deleted.""" + async def _delete_webhook(ctx: FilterContext) -> None: + """Delete the given webhook and update the filter context.""" + async with bot.instance.http_session.delete(webhook_url) as resp: + # The Discord API Returns a 204 NO CONTENT response on success. + if resp.status == 204: + ctx.action_descriptions.append("webhook deleted" + extra_message) + else: + ctx.action_descriptions.append("failed to delete webhook" + extra_message) + + return _delete_webhook diff --git a/bot/exts/filtering/_settings.py b/bot/exts/filtering/_settings.py new file mode 100644 index 000000000..75e810df5 --- /dev/null +++ b/bot/exts/filtering/_settings.py @@ -0,0 +1,233 @@ +from __future__ import annotations + +import operator +import traceback +from abc import abstractmethod +from copy import copy +from functools import reduce +from typing import Any, NamedTuple, Optional, TypeVar + +from typing_extensions import Self + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types import settings_types +from bot.exts.filtering._settings_types.settings_entry import ActionEntry, SettingsEntry, ValidationEntry +from bot.exts.filtering._utils import FieldRequiring +from bot.log import get_logger + +TSettings = TypeVar("TSettings", bound="Settings") + +log = get_logger(__name__) + +_already_warned: set[str] = set() + +T = TypeVar("T", bound=SettingsEntry) + + +def create_settings( + settings_data: dict, *, defaults: Defaults | None = None, keep_empty: bool = False +) -> tuple[Optional[ActionSettings], Optional[ValidationSettings]]: + """ + Create and return instances of the Settings subclasses from the given data. + + Additionally, warn for data entries with no matching class. + + In case these are setting overrides, the defaults can be provided to keep track of the correct values. + """ + action_data = {} + validation_data = {} + for entry_name, entry_data in settings_data.items(): + if entry_name in settings_types["ActionEntry"]: + action_data[entry_name] = entry_data + elif entry_name in settings_types["ValidationEntry"]: + validation_data[entry_name] = entry_data + elif entry_name not in _already_warned: + log.warning( + f"A setting named {entry_name} was loaded from the database, but no matching class." + ) + _already_warned.add(entry_name) + if defaults is None: + default_actions = None + default_validations = None + else: + default_actions, default_validations = defaults + return ( + ActionSettings.create(action_data, defaults=default_actions, keep_empty=keep_empty), + ValidationSettings.create(validation_data, defaults=default_validations, keep_empty=keep_empty) + ) + + +class Settings(FieldRequiring, dict[str, T]): + """ + A collection of settings. + + For processing the settings parts in the database and evaluating them on given contexts. + + Each filter list and filter has its own settings. + + A filter doesn't have to have its own settings. For every undefined setting, it falls back to the value defined in + the filter list which contains the filter. + """ + + entry_type = T + + _already_warned: set[str] = set() + + @abstractmethod + def __init__(self, settings_data: dict, *, defaults: Settings | None = None, keep_empty: bool = False): + super().__init__() + + entry_classes = settings_types.get(self.entry_type.__name__) + for entry_name, entry_data in settings_data.items(): + try: + entry_cls = entry_classes[entry_name] + except KeyError: + if entry_name not in self._already_warned: + log.warning( + f"A setting named {entry_name} was loaded from the database, " + f"but no matching {self.entry_type.__name__} class." + ) + self._already_warned.add(entry_name) + else: + try: + entry_defaults = None if defaults is None else defaults[entry_name] + new_entry = entry_cls.create( + entry_data, defaults=entry_defaults, keep_empty=keep_empty + ) + if new_entry: + self[entry_name] = new_entry + except TypeError as e: + raise TypeError( + f"Attempted to load a {entry_name} setting, but the response is malformed: {entry_data}" + ) from e + + @property + def overrides(self) -> dict[str, Any]: + """Return a dictionary of overrides across all entries.""" + return reduce(operator.or_, (entry.overrides for entry in self.values() if entry), {}) + + def copy(self: TSettings) -> TSettings: + """Create a shallow copy of the object.""" + return copy(self) + + def get_setting(self, key: str, default: Optional[Any] = None) -> Any: + """Get the setting matching the key, or fall back to the default value if the key is missing.""" + for entry in self.values(): + if hasattr(entry, key): + return getattr(entry, key) + return default + + @classmethod + def create( + cls, settings_data: dict, *, defaults: Settings | None = None, keep_empty: bool = False + ) -> Optional[Settings]: + """ + Returns a Settings object from `settings_data` if it holds any value, None otherwise. + + Use this method to create Settings objects instead of the init. + The None value is significant for how a filter list iterates over its filters. + """ + settings = cls(settings_data, defaults=defaults, keep_empty=keep_empty) + # If an entry doesn't hold any values, its `create` method will return None. + # If all entries are None, then the settings object holds no values. + if not keep_empty and not any(settings.values()): + return None + + return settings + + +class ValidationSettings(Settings[ValidationEntry]): + """ + A collection of validation settings. + + A filter is triggered only if all of its validation settings (e.g whether to invoke in DM) approve + (the check returns True). + """ + + entry_type = ValidationEntry + + def __init__(self, settings_data: dict, *, defaults: Settings | None = None, keep_empty: bool = False): + super().__init__(settings_data, defaults=defaults, keep_empty=keep_empty) + + def evaluate(self, ctx: FilterContext) -> tuple[set[str], set[str]]: + """Evaluates for each setting whether the context is relevant to the filter.""" + passed = set() + failed = set() + + for name, validation in self.items(): + if validation: + if validation.triggers_on(ctx): + passed.add(name) + else: + failed.add(name) + + return passed, failed + + +class ActionSettings(Settings[ActionEntry]): + """ + A collection of action settings. + + If a filter is triggered, its action settings (e.g how to infract the user) are combined with the action settings of + other triggered filters in the same event, and action is taken according to the combined action settings. + """ + + entry_type = ActionEntry + + def __init__(self, settings_data: dict, *, defaults: Settings | None = None, keep_empty: bool = False): + super().__init__(settings_data, defaults=defaults, keep_empty=keep_empty) + + def union(self, other: Self) -> Self: + """Combine the entries of two collections of settings into a new ActionsSettings.""" + actions = {} + # A settings object doesn't necessarily have all types of entries (e.g in the case of filter overrides). + for entry in self: + if entry in other: + actions[entry] = self[entry].union(other[entry]) + else: + actions[entry] = self[entry] + for entry in other: + if entry not in actions: + actions[entry] = other[entry] + + result = ActionSettings({}) + result.update(actions) + return result + + async def action(self, ctx: FilterContext) -> None: + """Execute the action of every action entry stored, as well as any additional actions in the context.""" + for entry in self.values(): + try: + await entry.action(ctx) + # Filtering should not stop even if one type of action raised an exception. + # For example, if deleting the message raised somehow, it should still try to infract the user. + except Exception: + log.exception(traceback.format_exc()) + + for action in ctx.additional_actions: + try: + await action(ctx) + except Exception: + log.exception(traceback.format_exc()) + + def fallback_to(self, fallback: ActionSettings) -> ActionSettings: + """Fill in missing entries from `fallback`.""" + new_actions = self.copy() + for entry_name, entry_value in fallback.items(): + if entry_name not in self: + new_actions[entry_name] = entry_value + return new_actions + + +class Defaults(NamedTuple): + """Represents an atomic list's default settings.""" + + actions: ActionSettings + validations: ValidationSettings + + def dict(self) -> dict[str, Any]: + """Return a dict representation of the stored fields across all entries.""" + dict_ = {} + for settings in self: + dict_ = reduce(operator.or_, (entry.dict() for entry in settings.values()), dict_) + return dict_ diff --git a/bot/exts/filtering/_settings_types/__init__.py b/bot/exts/filtering/_settings_types/__init__.py new file mode 100644 index 000000000..61b5737d4 --- /dev/null +++ b/bot/exts/filtering/_settings_types/__init__.py @@ -0,0 +1,9 @@ +from bot.exts.filtering._settings_types.actions import action_types +from bot.exts.filtering._settings_types.validations import validation_types + +settings_types = { + "ActionEntry": {settings_type.name: settings_type for settings_type in action_types}, + "ValidationEntry": {settings_type.name: settings_type for settings_type in validation_types} +} + +__all__ = [settings_types] diff --git a/bot/exts/filtering/_settings_types/actions/__init__.py b/bot/exts/filtering/_settings_types/actions/__init__.py new file mode 100644 index 000000000..a8175b976 --- /dev/null +++ b/bot/exts/filtering/_settings_types/actions/__init__.py @@ -0,0 +1,8 @@ +from os.path import dirname + +from bot.exts.filtering._settings_types.settings_entry import ActionEntry +from bot.exts.filtering._utils import subclasses_in_package + +action_types = subclasses_in_package(dirname(__file__), f"{__name__}.", ActionEntry) + +__all__ = [action_types] diff --git a/bot/exts/filtering/_settings_types/actions/infraction_and_notification.py b/bot/exts/filtering/_settings_types/actions/infraction_and_notification.py new file mode 100644 index 000000000..c3c44616d --- /dev/null +++ b/bot/exts/filtering/_settings_types/actions/infraction_and_notification.py @@ -0,0 +1,203 @@ +from datetime import timedelta +from enum import Enum, auto +from typing import ClassVar + +import arrow +import discord.abc +from discord import Colour, Embed, Member, User +from discord.errors import Forbidden +from pydantic import validator +from pydis_core.utils.logging import get_logger +from pydis_core.utils.members import get_or_fetch_member +from typing_extensions import Self + +import bot as bot_module +from bot.constants import Channels +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types.settings_entry import ActionEntry +from bot.exts.filtering._utils import FakeContext + +log = get_logger(__name__) + +passive_form = { + "BAN": "banned", + "KICK": "kicked", + "MUTE": "muted", + "VOICE_MUTE": "voice muted", + "SUPERSTAR": "superstarred", + "WARNING": "warned", + "WATCH": "watch", + "NOTE": "noted", +} + + +class Infraction(Enum): + """An enumeration of infraction types. The lower the value, the higher it is on the hierarchy.""" + + BAN = auto() + KICK = auto() + MUTE = auto() + VOICE_MUTE = auto() + SUPERSTAR = auto() + WARNING = auto() + WATCH = auto() + NOTE = auto() + NONE = auto() + + def __str__(self) -> str: + return self.name + + async def invoke( + self, + user: Member | User, + channel: discord.abc.GuildChannel | discord.DMChannel, + alerts_channel: discord.TextChannel, + duration: float, + reason: str + ) -> None: + """Invokes the command matching the infraction name.""" + command_name = self.name.lower() + command = bot_module.instance.get_command(command_name) + if not command: + await alerts_channel.send(f":warning: Could not apply {command_name} to {user.mention}: command not found.") + log.warning(f":warning: Could not apply {command_name} to {user.mention}: command not found.") + return + + if isinstance(user, discord.User): # For example because a message was sent in a DM. + member = await get_or_fetch_member(channel.guild, user.id) + if member: + user = member + ctx = FakeContext(channel, command) + if self.name in ("KICK", "WARNING", "WATCH", "NOTE"): + await command(ctx, user, reason=reason or None) + else: + duration = arrow.utcnow() + timedelta(seconds=duration) if duration else None + await command(ctx, user, duration, reason=reason or None) + + +class InfractionAndNotification(ActionEntry): + """ + A setting entry which specifies what infraction to issue and the notification to DM the user. + + Since a DM cannot be sent when a user is banned or kicked, these two functions need to be grouped together. + """ + + name: ClassVar[str] = "infraction_and_notification" + description: ClassVar[dict[str, str]] = { + "infraction_type": ( + "The type of infraction to issue when the filter triggers, or 'NONE'. " + "If two infractions are triggered for the same message, " + "the harsher one will be applied (by type or duration).\n\n" + "Valid infraction types in order of harshness: " + ) + ", ".join(infraction.name for infraction in Infraction), + "infraction_duration": "How long the infraction should last for in seconds. 0 for permanent.", + "infraction_reason": "The reason delivered with the infraction.", + "infraction_channel": ( + "The channel ID in which to invoke the infraction (and send the confirmation message). " + "If 0, the infraction will be sent in the context channel. If the ID otherwise fails to resolve, " + "it will default to the mod-alerts channel." + ), + "dm_content": "The contents of a message to be DMed to the offending user.", + "dm_embed": "The contents of the embed to be DMed to the offending user." + } + + dm_content: str + dm_embed: str + infraction_type: Infraction + infraction_reason: str + infraction_duration: float + infraction_channel: int + + @validator("infraction_type", pre=True) + @classmethod + def convert_infraction_name(cls, infr_type: str | Infraction) -> Infraction: + """Convert the string to an Infraction by name.""" + if isinstance(infr_type, Infraction): + return infr_type + return Infraction[infr_type.replace(" ", "_").upper()] + + async def action(self, ctx: FilterContext) -> None: + """Send the notification to the user, and apply any specified infractions.""" + # If there is no infraction to apply, any DM contents already provided in the context take precedence. + if self.infraction_type == Infraction.NONE and (ctx.dm_content or ctx.dm_embed): + dm_content = ctx.dm_content + dm_embed = ctx.dm_embed + else: + dm_content = self.dm_content + dm_embed = self.dm_embed + + if dm_content or dm_embed: + formatting = {"domain": ctx.notification_domain} + dm_content = f"Hey {ctx.author.mention}!\n{dm_content.format(**formatting)}" + if dm_embed: + dm_embed = Embed(description=dm_embed.format(**formatting), colour=Colour.og_blurple()) + else: + dm_embed = None + + try: + await ctx.author.send(dm_content, embed=dm_embed) + ctx.action_descriptions.append("notified") + except Forbidden: + ctx.action_descriptions.append("failed to notify") + + if self.infraction_type != Infraction.NONE: + alerts_channel = bot_module.instance.get_channel(Channels.mod_alerts) + if self.infraction_channel: + channel = bot_module.instance.get_channel(self.infraction_channel) + if not channel: + log.info(f"Could not find a channel with ID {self.infraction_channel}, infracting in mod-alerts.") + channel = alerts_channel + elif not ctx.channel: + channel = alerts_channel + else: + channel = ctx.channel + if not channel: # If somehow it's set to `alerts_channel` and it can't be found. + log.error(f"Unable to apply infraction as the context channel {channel} can't be found.") + return + + await self.infraction_type.invoke( + ctx.author, channel, alerts_channel, self.infraction_duration, self.infraction_reason + ) + ctx.action_descriptions.append(passive_form[self.infraction_type.name]) + + def union(self, other: Self) -> Self: + """ + Combines two actions of the same type. Each type of action is executed once per filter. + + If the infractions are different, take the data of the one higher up the hierarchy. + + There is no clear way to properly combine several notification messages, especially when it's in two parts. + To avoid bombarding the user with several notifications, the message with the more significant infraction + is used. If the more significant infraction has no accompanying message, use the one from the other infraction, + if it exists. + """ + # Lower number -> higher in the hierarchy + if self.infraction_type is None: + return other.copy() + elif other.infraction_type is None: + return self.copy() + + if self.infraction_type.value < other.infraction_type.value: + result = self.copy() + elif self.infraction_type.value > other.infraction_type.value: + result = other.copy() + other = self + else: + if self.infraction_duration is None or ( + other.infraction_duration is not None and self.infraction_duration > other.infraction_duration + ): + result = self.copy() + else: + result = other.copy() + other = self + + # If the winner has no message but the loser does, copy the message to the winner. + result_overrides = result.overrides + if "dm_content" not in result_overrides and "dm_embed" not in result_overrides: + other_overrides = other.overrides + if "dm_content" in other_overrides: + result.dm_content = other_overrides["dm_content"] + if "dm_embed" in other_overrides: + result.dm_content = other_overrides["dm_embed"] + + return result diff --git a/bot/exts/filtering/_settings_types/actions/ping.py b/bot/exts/filtering/_settings_types/actions/ping.py new file mode 100644 index 000000000..ee40c54fe --- /dev/null +++ b/bot/exts/filtering/_settings_types/actions/ping.py @@ -0,0 +1,45 @@ +from typing import ClassVar + +from pydantic import validator +from typing_extensions import Self + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types.settings_entry import ActionEntry +from bot.exts.filtering._utils import resolve_mention + + +class Ping(ActionEntry): + """A setting entry which adds the appropriate pings to the alert.""" + + name: ClassVar[str] = "mentions" + description: ClassVar[dict[str, str]] = { + "guild_pings": ( + "A list of role IDs/role names/user IDs/user names/here/everyone. " + "If a mod-alert is generated for a filter triggered in a public channel, these will be pinged." + ), + "dm_pings": ( + "A list of role IDs/role names/user IDs/user names/here/everyone. " + "If a mod-alert is generated for a filter triggered in DMs, these will be pinged." + ) + } + + guild_pings: set[str] + dm_pings: set[str] + + @validator("*", pre=True) + @classmethod + def init_sequence_if_none(cls, pings: list[str] | None) -> list[str]: + """Initialize an empty sequence if the value is None.""" + if pings is None: + return [] + return pings + + async def action(self, ctx: FilterContext) -> None: + """Add the stored pings to the alert message content.""" + mentions = self.guild_pings if not ctx.channel or ctx.channel.guild else self.dm_pings + new_content = " ".join([resolve_mention(mention) for mention in mentions]) + ctx.alert_content = f"{new_content} {ctx.alert_content}" + + def union(self, other: Self) -> Self: + """Combines two actions of the same type. Each type of action is executed once per filter.""" + return Ping(guild_pings=self.guild_pings | other.guild_pings, dm_pings=self.dm_pings | other.dm_pings) diff --git a/bot/exts/filtering/_settings_types/actions/remove_context.py b/bot/exts/filtering/_settings_types/actions/remove_context.py new file mode 100644 index 000000000..3f219248c --- /dev/null +++ b/bot/exts/filtering/_settings_types/actions/remove_context.py @@ -0,0 +1,113 @@ +from collections import defaultdict +from typing import ClassVar + +from discord import Message +from discord.errors import HTTPException +from pydis_core.utils import scheduling +from pydis_core.utils.logging import get_logger +from typing_extensions import Self + +import bot +from bot.constants import Channels +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._settings_types.settings_entry import ActionEntry +from bot.exts.filtering._utils import FakeContext +from bot.utils.messages import send_attachments + +log = get_logger(__name__) + +SUPERSTAR_REASON = ( + "Your nickname was found to be in violation of our code of conduct. " + "If you believe this is a mistake, please let us know." +) + + +async def upload_messages_attachments(ctx: FilterContext, messages: list[Message]) -> None: + """Re-upload the messages' attachments for future logging.""" + if not messages: + return + destination = messages[0].guild.get_channel(Channels.attachment_log) + for message in messages: + if message.attachments and message.id not in ctx.attachments: + ctx.attachments[message.id] = await send_attachments(message, destination, link_large=False) + + +class RemoveContext(ActionEntry): + """A setting entry which tells whether to delete the offending message(s).""" + + name: ClassVar[str] = "remove_context" + description: ClassVar[str] = ( + "A boolean field. If True, the filter being triggered will cause the offending context to be removed. " + "An offending message will be deleted, while an offending nickname will be superstarified." + ) + + remove_context: bool + + async def action(self, ctx: FilterContext) -> None: + """Remove the offending context.""" + if not self.remove_context: + return + + if ctx.event in (Event.MESSAGE, Event.MESSAGE_EDIT): + await self._handle_messages(ctx) + elif ctx.event == Event.NICKNAME: + await self._handle_nickname(ctx) + + @staticmethod + async def _handle_messages(ctx: FilterContext) -> None: + """Delete any messages involved in this context.""" + if not ctx.message or not ctx.message.guild: + return + + # If deletion somehow fails at least this will allow scheduling for deletion. + ctx.messages_deletion = True + channel_messages = defaultdict(set) # Duplicates will cause batch deletion to fail. + for message in {ctx.message} | ctx.related_messages: + channel_messages[message.channel].add(message) + + success = fail = 0 + deleted = list() + for channel, messages in channel_messages.items(): + try: + await channel.delete_messages(messages) + except HTTPException: + fail += len(messages) + else: + success += len(messages) + deleted.extend(messages) + scheduling.create_task(upload_messages_attachments(ctx, deleted)) + + if not fail: + if success == 1: + ctx.action_descriptions.append("deleted") + else: + ctx.action_descriptions.append("deleted all") + elif not success: + if fail == 1: + ctx.action_descriptions.append("failed to delete") + else: + ctx.action_descriptions.append("all failed to delete") + else: + ctx.action_descriptions.append(f"{success} deleted, {fail} failed to delete") + + @staticmethod + async def _handle_nickname(ctx: FilterContext) -> None: + """Apply a superstar infraction to remove the user's nickname.""" + alerts_channel = bot.instance.get_channel(Channels.mod_alerts) + if not alerts_channel: + log.error(f"Unable to apply superstar as the context channel {alerts_channel} can't be found.") + return + command = bot.instance.get_command("superstar") + if not command: + user = ctx.author + await alerts_channel.send(f":warning: Could not apply superstar to {user.mention}: command not found.") + log.warning(f":warning: Could not apply superstar to {user.mention}: command not found.") + ctx.action_descriptions.append("failed to superstar") + return + + await command(FakeContext(alerts_channel, command), ctx.author, None, reason=SUPERSTAR_REASON) + ctx.action_descriptions.append("superstar") + + def union(self, other: Self) -> Self: + """Combines two actions of the same type. Each type of action is executed once per filter.""" + return RemoveContext(remove_context=self.remove_context or other.remove_context) diff --git a/bot/exts/filtering/_settings_types/actions/send_alert.py b/bot/exts/filtering/_settings_types/actions/send_alert.py new file mode 100644 index 000000000..f554cdd4d --- /dev/null +++ b/bot/exts/filtering/_settings_types/actions/send_alert.py @@ -0,0 +1,23 @@ +from typing import ClassVar + +from typing_extensions import Self + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types.settings_entry import ActionEntry + + +class SendAlert(ActionEntry): + """A setting entry which tells whether to send an alert message.""" + + name: ClassVar[str] = "send_alert" + description: ClassVar[str] = "A boolean. If all filters triggered set this to False, no mod-alert will be created." + + send_alert: bool + + async def action(self, ctx: FilterContext) -> None: + """Add the stored pings to the alert message content.""" + ctx.send_alert = self.send_alert + + def union(self, other: Self) -> Self: + """Combines two actions of the same type. Each type of action is executed once per filter.""" + return SendAlert(send_alert=self.send_alert or other.send_alert) diff --git a/bot/exts/filtering/_settings_types/settings_entry.py b/bot/exts/filtering/_settings_types/settings_entry.py new file mode 100644 index 000000000..e41ef5c7a --- /dev/null +++ b/bot/exts/filtering/_settings_types/settings_entry.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +from abc import abstractmethod +from typing import Any, ClassVar, Union + +from pydantic import BaseModel, PrivateAttr +from typing_extensions import Self + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._utils import FieldRequiring + + +class SettingsEntry(BaseModel, FieldRequiring): + """ + A basic entry in the settings field appearing in every filter list and filter. + + For a filter list, this is the default setting for it. For a filter, it's an override of the default entry. + """ + + # Each subclass must define a name matching the entry name we're expecting to receive from the database. + # Names must be unique across all filter lists. + name: ClassVar[str] = FieldRequiring.MUST_SET_UNIQUE + # Each subclass must define a description of what it does. If the data an entry type receives comprises + # several DB fields, the value should a dictionary of field names and their descriptions. + description: ClassVar[Union[str, dict[str, str]]] = FieldRequiring.MUST_SET + + _overrides: set[str] = PrivateAttr(default_factory=set) + + def __init__(self, defaults: SettingsEntry | None = None, /, **data): + overrides = set() + if defaults: + defaults_dict = defaults.dict() + for field_name, field_value in list(data.items()): + if field_value is None: + data[field_name] = defaults_dict[field_name] + else: + overrides.add(field_name) + super().__init__(**data) + self._overrides |= overrides + + @property + def overrides(self) -> dict[str, Any]: + """Return a dictionary of overrides.""" + return {name: getattr(self, name) for name in self._overrides} + + @classmethod + def create( + cls, entry_data: dict[str, Any] | None, *, defaults: SettingsEntry | None = None, keep_empty: bool = False + ) -> SettingsEntry | None: + """ + Returns a SettingsEntry object from `entry_data` if it holds any value, None otherwise. + + Use this method to create SettingsEntry objects instead of the init. + The None value is significant for how a filter list iterates over its filters. + """ + if entry_data is None: + return None + if not keep_empty and hasattr(entry_data, "values") and all(value is None for value in entry_data.values()): + return None + + if not isinstance(entry_data, dict): + entry_data = {cls.name: entry_data} + return cls(defaults, **entry_data) + + +class ValidationEntry(SettingsEntry): + """A setting entry to validate whether the filter should be triggered in the given context.""" + + @abstractmethod + def triggers_on(self, ctx: FilterContext) -> bool: + """Return whether the filter should be triggered with this setting in the given context.""" + ... + + +class ActionEntry(SettingsEntry): + """A setting entry defining what the bot should do if the filter it belongs to is triggered.""" + + @abstractmethod + async def action(self, ctx: FilterContext) -> None: + """Execute an action that should be taken when the filter this setting belongs to is triggered.""" + ... + + @abstractmethod + def union(self, other: Self) -> Self: + """ + Combine two actions of the same type. Each type of action is executed once per filter. + + The following condition must hold: if self == other, then self | other == self. + """ + ... diff --git a/bot/exts/filtering/_settings_types/validations/__init__.py b/bot/exts/filtering/_settings_types/validations/__init__.py new file mode 100644 index 000000000..5c44e8b27 --- /dev/null +++ b/bot/exts/filtering/_settings_types/validations/__init__.py @@ -0,0 +1,8 @@ +from os.path import dirname + +from bot.exts.filtering._settings_types.settings_entry import ValidationEntry +from bot.exts.filtering._utils import subclasses_in_package + +validation_types = subclasses_in_package(dirname(__file__), f"{__name__}.", ValidationEntry) + +__all__ = [validation_types] diff --git a/bot/exts/filtering/_settings_types/validations/bypass_roles.py b/bot/exts/filtering/_settings_types/validations/bypass_roles.py new file mode 100644 index 000000000..d42e6407c --- /dev/null +++ b/bot/exts/filtering/_settings_types/validations/bypass_roles.py @@ -0,0 +1,24 @@ +from typing import ClassVar, Union + +from discord import Member + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types.settings_entry import ValidationEntry + + +class RoleBypass(ValidationEntry): + """A setting entry which tells whether the roles the member has allow them to bypass the filter.""" + + name: ClassVar[str] = "bypass_roles" + description: ClassVar[str] = "A list of role IDs or role names. Users with these roles will not trigger the filter." + + bypass_roles: set[Union[int, str]] + + def triggers_on(self, ctx: FilterContext) -> bool: + """Return whether the filter should be triggered on this user given their roles.""" + if not isinstance(ctx.author, Member): + return True + return all( + member_role.id not in self.bypass_roles and member_role.name not in self.bypass_roles + for member_role in ctx.author.roles + ) diff --git a/bot/exts/filtering/_settings_types/validations/channel_scope.py b/bot/exts/filtering/_settings_types/validations/channel_scope.py new file mode 100644 index 000000000..d37efaa09 --- /dev/null +++ b/bot/exts/filtering/_settings_types/validations/channel_scope.py @@ -0,0 +1,70 @@ +from typing import ClassVar, Union + +from pydantic import validator + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types.settings_entry import ValidationEntry + + +class ChannelScope(ValidationEntry): + """A setting entry which tells whether the filter was invoked in a whitelisted channel or category.""" + + name: ClassVar[str] = "channel_scope" + description: ClassVar[str] = { + "disabled_channels": ( + "A list of channel IDs or channel names. " + "The filter will not trigger in these channels even if the category is expressly enabled." + ), + "disabled_categories": ( + "A list of category IDs or category names. The filter will not trigger in these categories." + ), + "enabled_channels": ( + "A list of channel IDs or channel names. " + "The filter can trigger in these channels even if the category is disabled or not expressly enabled." + ), + "enabled_categories": ( + "A list of category IDs or category names. " + "If the list is not empty, filters will trigger only in channels of these categories, " + "unless the channel is expressly disabled." + ) + } + + disabled_channels: set[Union[int, str]] + disabled_categories: set[Union[int, str]] + enabled_channels: set[Union[int, str]] + enabled_categories: set[Union[int, str]] + + @validator("*", pre=True) + @classmethod + def init_if_sequence_none(cls, sequence: list[str] | None) -> list[str]: + """Initialize an empty sequence if the value is None.""" + if sequence is None: + return [] + return sequence + + def triggers_on(self, ctx: FilterContext) -> bool: + """ + Return whether the filter should be triggered in the given channel. + + The filter is invoked by default. + If the channel is explicitly enabled, it bypasses the set disabled channels and categories. + """ + channel = ctx.channel + + if not channel: + return True + if not hasattr(channel, "category"): # This is not a guild channel, outside the scope of this setting. + return True + if hasattr(channel, "parent"): + channel = channel.parent + + enabled_channel = channel.id in self.enabled_channels or channel.name in self.enabled_channels + disabled_channel = channel.id in self.disabled_channels or channel.name in self.disabled_channels + enabled_category = channel.category and (not self.enabled_categories or ( + channel.category.id in self.enabled_categories or channel.category.name in self.enabled_categories + )) + disabled_category = channel.category and ( + channel.category.id in self.disabled_categories or channel.category.name in self.disabled_categories + ) + + return enabled_channel or (enabled_category and not disabled_channel and not disabled_category) diff --git a/bot/exts/filtering/_settings_types/validations/enabled.py b/bot/exts/filtering/_settings_types/validations/enabled.py new file mode 100644 index 000000000..3b5e3e446 --- /dev/null +++ b/bot/exts/filtering/_settings_types/validations/enabled.py @@ -0,0 +1,19 @@ +from typing import ClassVar + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types.settings_entry import ValidationEntry + + +class Enabled(ValidationEntry): + """A setting entry which tells whether the filter is enabled.""" + + name: ClassVar[str] = "enabled" + description: ClassVar[str] = ( + "A boolean field. Setting it to False allows disabling the filter without deleting it entirely." + ) + + enabled: bool + + def triggers_on(self, ctx: FilterContext) -> bool: + """Return whether the filter is enabled.""" + return self.enabled diff --git a/bot/exts/filtering/_settings_types/validations/filter_dm.py b/bot/exts/filtering/_settings_types/validations/filter_dm.py new file mode 100644 index 000000000..9961984d6 --- /dev/null +++ b/bot/exts/filtering/_settings_types/validations/filter_dm.py @@ -0,0 +1,20 @@ +from typing import ClassVar + +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._settings_types.settings_entry import ValidationEntry + + +class FilterDM(ValidationEntry): + """A setting entry which tells whether to apply the filter to DMs.""" + + name: ClassVar[str] = "filter_dm" + description: ClassVar[str] = "A boolean field. If True, the filter can trigger for messages sent to the bot in DMs." + + filter_dm: bool + + def triggers_on(self, ctx: FilterContext) -> bool: + """Return whether the filter should be triggered even if it was triggered in DMs.""" + if not ctx.channel: # No channel - out of scope for this setting. + return True + + return ctx.channel.guild is not None or self.filter_dm diff --git a/bot/exts/filtering/_ui/__init__.py b/bot/exts/filtering/_ui/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/filtering/_ui/__init__.py diff --git a/bot/exts/filtering/_ui/filter.py b/bot/exts/filtering/_ui/filter.py new file mode 100644 index 000000000..1ef25f17a --- /dev/null +++ b/bot/exts/filtering/_ui/filter.py @@ -0,0 +1,464 @@ +from __future__ import annotations + +from typing import Any, Callable + +import discord +import discord.ui +from discord import Embed, Interaction, User +from discord.ext.commands import BadArgument +from discord.ui.select import SelectOption +from pydis_core.site_api import ResponseCodeError + +from bot.exts.filtering._filter_lists.filter_list import FilterList, ListType +from bot.exts.filtering._filters.filter import Filter +from bot.exts.filtering._ui.ui import ( + COMPONENT_TIMEOUT, CustomCallbackSelect, EditBaseView, MAX_EMBED_DESCRIPTION, MISSING, SETTINGS_DELIMITER, + SINGLE_SETTING_PATTERN, format_response_error, parse_value, populate_embed_from_dict +) +from bot.exts.filtering._utils import repr_equals, to_serializable +from bot.log import get_logger + +log = get_logger(__name__) + + +def build_filter_repr_dict( + filter_list: FilterList, + list_type: ListType, + filter_type: type[Filter], + settings_overrides: dict, + extra_fields_overrides: dict +) -> dict: + """Build a dictionary of field names and values to pass to `populate_embed_from_dict`.""" + # Get filter list settings + default_setting_values = {} + for settings_group in filter_list[list_type].defaults: + for _, setting in settings_group.items(): + default_setting_values.update(to_serializable(setting.dict())) + + # Add overrides. It's done in this way to preserve field order, since the filter won't have all settings. + total_values = {} + for name, value in default_setting_values.items(): + if name not in settings_overrides or repr_equals(settings_overrides[name], value): + total_values[name] = value + else: + total_values[f"{name}*"] = settings_overrides[name] + + # Add the filter-specific settings. + if filter_type.extra_fields_type: + # This iterates over the default values of the extra fields model. + for name, value in filter_type.extra_fields_type().dict().items(): + if name not in extra_fields_overrides or repr_equals(extra_fields_overrides[name], value): + total_values[f"{filter_type.name}/{name}"] = value + else: + total_values[f"{filter_type.name}/{name}*"] = extra_fields_overrides[name] + + return total_values + + +class EditContentModal(discord.ui.Modal, title="Edit Content"): + """A modal to input a filter's content.""" + + content = discord.ui.TextInput(label="Content") + + def __init__(self, embed_view: FilterEditView, message: discord.Message): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.embed_view = embed_view + self.message = message + + async def on_submit(self, interaction: Interaction) -> None: + """Update the embed with the new content.""" + await interaction.response.defer() + await self.embed_view.update_embed(self.message, content=self.content.value) + + +class EditDescriptionModal(discord.ui.Modal, title="Edit Description"): + """A modal to input a filter's description.""" + + description = discord.ui.TextInput(label="Description") + + def __init__(self, embed_view: FilterEditView, message: discord.Message): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.embed_view = embed_view + self.message = message + + async def on_submit(self, interaction: Interaction) -> None: + """Update the embed with the new description.""" + await interaction.response.defer() + await self.embed_view.update_embed(self.message, description=self.description.value) + + +class TemplateModal(discord.ui.Modal, title="Template"): + """A modal to enter a filter ID to copy its overrides over.""" + + template = discord.ui.TextInput(label="Template Filter ID") + + def __init__(self, embed_view: FilterEditView, message: discord.Message): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.embed_view = embed_view + self.message = message + + async def on_submit(self, interaction: Interaction) -> None: + """Update the embed with the new description.""" + await self.embed_view.apply_template(self.template.value, self.message, interaction) + + +class FilterEditView(EditBaseView): + """A view used to edit a filter's settings before updating the database.""" + + class _REMOVE: + """Sentinel value for when an override should be removed.""" + + def __init__( + self, + filter_list: FilterList, + list_type: ListType, + filter_type: type[Filter], + content: str | None, + description: str | None, + settings_overrides: dict, + filter_settings_overrides: dict, + loaded_settings: dict, + loaded_filter_settings: dict, + author: User, + embed: Embed, + confirm_callback: Callable + ): + super().__init__(author) + self.filter_list = filter_list + self.list_type = list_type + self.filter_type = filter_type + self.content = content + self.description = description + self.settings_overrides = settings_overrides + self.filter_settings_overrides = filter_settings_overrides + self.loaded_settings = loaded_settings + self.loaded_filter_settings = loaded_filter_settings + self.embed = embed + self.confirm_callback = confirm_callback + + all_settings_repr_dict = build_filter_repr_dict( + filter_list, list_type, filter_type, settings_overrides, filter_settings_overrides + ) + populate_embed_from_dict(embed, all_settings_repr_dict) + + self.type_per_setting_name = {setting: info[2] for setting, info in loaded_settings.items()} + self.type_per_setting_name.update({ + f"{filter_type.name}/{name}": type_ + for name, (_, _, type_) in loaded_filter_settings.get(filter_type.name, {}).items() + }) + + add_select = CustomCallbackSelect( + self._prompt_new_value, + placeholder="Select a setting to edit", + options=[SelectOption(label=name) for name in sorted(self.type_per_setting_name)], + row=1 + ) + self.add_item(add_select) + + if settings_overrides or filter_settings_overrides: + override_names = ( + list(settings_overrides) + [f"{filter_list.name}/{setting}" for setting in filter_settings_overrides] + ) + remove_select = CustomCallbackSelect( + self._remove_override, + placeholder="Select an override to remove", + options=[SelectOption(label=name) for name in sorted(override_names)], + row=2 + ) + self.add_item(remove_select) + + @discord.ui.button(label="Edit Content", row=3) + async def edit_content(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to edit the filter's content. Pressing the button invokes a modal.""" + modal = EditContentModal(self, interaction.message) + await interaction.response.send_modal(modal) + + @discord.ui.button(label="Edit Description", row=3) + async def edit_description(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to edit the filter's description. Pressing the button invokes a modal.""" + modal = EditDescriptionModal(self, interaction.message) + await interaction.response.send_modal(modal) + + @discord.ui.button(label="Empty Description", row=3) + async def empty_description(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to empty the filter's description.""" + await self.update_embed(interaction, description=self._REMOVE) + + @discord.ui.button(label="Template", row=3) + async def enter_template(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to enter a filter template ID and copy its overrides over.""" + modal = TemplateModal(self, interaction.message) + await interaction.response.send_modal(modal) + + @discord.ui.button(label="✅ Confirm", style=discord.ButtonStyle.green, row=4) + async def confirm(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Confirm the content, description, and settings, and update the filters database.""" + if self.content is None: + await interaction.response.send_message( + ":x: Cannot add a filter with no content.", ephemeral=True, reference=interaction.message + ) + if self.description is None: + self.description = "" + await interaction.response.edit_message(view=None) # Make sure the interaction succeeds first. + try: + await self.confirm_callback( + interaction.message, + self.filter_list, + self.list_type, + self.filter_type, + self.content, + self.description, + self.settings_overrides, + self.filter_settings_overrides + ) + except ResponseCodeError as e: + await interaction.message.reply(embed=format_response_error(e)) + await interaction.message.edit(view=self) + except BadArgument as e: + await interaction.message.reply( + embed=Embed(colour=discord.Colour.red(), title="Bad Argument", description=str(e)) + ) + await interaction.message.edit(view=self) + else: + self.stop() + + @discord.ui.button(label="🚫 Cancel", style=discord.ButtonStyle.red, row=4) + async def cancel(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Cancel the operation.""" + await interaction.response.edit_message(content="🚫 Operation canceled.", embed=None, view=None) + self.stop() + + def current_value(self, setting_name: str) -> Any: + """Get the current value stored for the setting or MISSING if none found.""" + if setting_name in self.settings_overrides: + return self.settings_overrides[setting_name] + if "/" in setting_name: + _, setting_name = setting_name.split("/", maxsplit=1) + if setting_name in self.filter_settings_overrides: + return self.filter_settings_overrides[setting_name] + return MISSING + + async def update_embed( + self, + interaction_or_msg: discord.Interaction | discord.Message, + *, + content: str | None = None, + description: str | type[FilterEditView._REMOVE] | None = None, + setting_name: str | None = None, + setting_value: str | type[FilterEditView._REMOVE] | None = None, + ) -> None: + """ + Update the embed with the new information. + + If a setting name is provided with a _REMOVE value, remove the override. + If `interaction_or_msg` is a Message, the invoking Interaction must be deferred before calling this function. + """ + if content is not None or description is not None: + if content is not None: + filter_type = self.filter_list.get_filter_type(content) + if not filter_type: + if isinstance(interaction_or_msg, discord.Message): + send_method = interaction_or_msg.channel.send + else: + send_method = interaction_or_msg.response.send_message + await send_method(f":x: Could not find a filter type appropriate for `{content}`.") + return + self.content = content + self.filter_type = filter_type + else: + content = self.content # If there's no content or description, use the existing values. + if description is self._REMOVE: + self.description = None + elif description is not None: + self.description = description + else: + description = self.description + + # Update the embed with the new content and/or description. + self.embed.description = f"`{content}`" if content else "*No content*" + if description and description is not self._REMOVE: + self.embed.description += f" - {description}" + if len(self.embed.description) > MAX_EMBED_DESCRIPTION: + self.embed.description = self.embed.description[:MAX_EMBED_DESCRIPTION - 5] + "[...]" + + if setting_name: + # Find the right dictionary to update. + if "/" in setting_name: + filter_name, setting_name = setting_name.split("/", maxsplit=1) + dict_to_edit = self.filter_settings_overrides + default_value = self.filter_type.extra_fields_type().dict()[setting_name] + else: + dict_to_edit = self.settings_overrides + default_value = self.filter_list[self.list_type].default(setting_name) + # Update the setting override value or remove it + if setting_value is not self._REMOVE: + if not repr_equals(setting_value, default_value): + dict_to_edit[setting_name] = setting_value + # If there's already an override, remove it, since the new value is the same as the default. + elif setting_name in dict_to_edit: + dict_to_edit.pop(setting_name) + elif setting_name in dict_to_edit: + dict_to_edit.pop(setting_name) + + # This is inefficient, but otherwise the selects go insane if the user attempts to edit the same setting + # multiple times, even when replacing the select with a new one. + self.embed.clear_fields() + new_view = self.copy() + + try: + if isinstance(interaction_or_msg, discord.Interaction): + await interaction_or_msg.response.edit_message(embed=self.embed, view=new_view) + else: + await interaction_or_msg.edit(embed=self.embed, view=new_view) + except discord.errors.HTTPException: # Various unexpected errors. + pass + else: + self.stop() + + async def edit_setting_override(self, interaction: Interaction, setting_name: str, override_value: Any) -> None: + """ + Update the overrides with the new value and edit the embed. + + The interaction needs to be the selection of the setting attached to the embed. + """ + await self.update_embed(interaction, setting_name=setting_name, setting_value=override_value) + + async def apply_template(self, template_id: str, embed_message: discord.Message, interaction: Interaction) -> None: + """Replace any non-overridden settings with overrides from the given filter.""" + try: + settings, filter_settings = template_settings( + template_id, self.filter_list, self.list_type, self.filter_type + ) + except BadArgument as e: # The interaction object is necessary to send an ephemeral message. + await interaction.response.send_message(f":x: {e}", ephemeral=True) + return + else: + await interaction.response.defer() + + self.settings_overrides = settings | self.settings_overrides + self.filter_settings_overrides = filter_settings | self.filter_settings_overrides + self.embed.clear_fields() + await embed_message.edit(embed=self.embed, view=self.copy()) + self.stop() + + async def _remove_override(self, interaction: Interaction, select: discord.ui.Select) -> None: + """ + Remove the override for the setting the user selected, and edit the embed. + + The interaction needs to be the selection of the setting attached to the embed. + """ + await self.update_embed(interaction, setting_name=select.values[0], setting_value=self._REMOVE) + + def copy(self) -> FilterEditView: + """Create a copy of this view.""" + return FilterEditView( + self.filter_list, + self.list_type, + self.filter_type, + self.content, + self.description, + self.settings_overrides, + self.filter_settings_overrides, + self.loaded_settings, + self.loaded_filter_settings, + self.author, + self.embed, + self.confirm_callback + ) + + +def description_and_settings_converter( + filter_list: FilterList, + list_type: ListType, + filter_type: type[Filter], + loaded_settings: dict, + loaded_filter_settings: dict, + input_data: str +) -> tuple[str, dict[str, Any], dict[str, Any]]: + """Parse a string representing a possible description and setting overrides, and validate the setting names.""" + if not input_data: + return "", {}, {} + + parsed = SETTINGS_DELIMITER.split(input_data) + if not parsed: + return "", {}, {} + + description = "" + if not SINGLE_SETTING_PATTERN.match(parsed[0]): + description, *parsed = parsed + + settings = {setting: value for setting, value in [part.split("=", maxsplit=1) for part in parsed]} + template = None + if "--template" in settings: + template = settings.pop("--template") + + filter_settings = {} + for setting, _ in list(settings.items()): + if setting in loaded_settings: # It's a filter list setting + type_ = loaded_settings[setting][2] + try: + parsed_value = parse_value(settings.pop(setting), type_) + if not repr_equals(parsed_value, filter_list[list_type].default(setting)): + settings[setting] = parsed_value + except (TypeError, ValueError) as e: + raise BadArgument(e) + elif "/" not in setting: + raise BadArgument(f"{setting!r} is not a recognized setting.") + else: # It's a filter setting + filter_name, filter_setting_name = setting.split("/", maxsplit=1) + if filter_name.lower() != filter_type.name.lower(): + raise BadArgument( + f"A setting for a {filter_name!r} filter was provided, but the filter name is {filter_type.name!r}" + ) + if filter_setting_name not in loaded_filter_settings[filter_type.name]: + raise BadArgument(f"{setting!r} is not a recognized setting.") + type_ = loaded_filter_settings[filter_type.name][filter_setting_name][2] + try: + parsed_value = parse_value(settings.pop(setting), type_) + if not repr_equals(parsed_value, getattr(filter_type.extra_fields_type(), filter_setting_name)): + filter_settings[filter_setting_name] = parsed_value + except (TypeError, ValueError) as e: + raise BadArgument(e) + + # Pull templates settings and apply them. + if template is not None: + try: + t_settings, t_filter_settings = template_settings(template, filter_list, list_type, filter_type) + except ValueError as e: + raise BadArgument(str(e)) + else: + # The specified settings go on top of the template + settings = t_settings | settings + filter_settings = t_filter_settings | filter_settings + + return description, settings, filter_settings + + +def filter_serializable_overrides(filter_: Filter) -> tuple[dict, dict]: + """Get a serializable version of the filter's overrides.""" + overrides_values, extra_fields_overrides = filter_.overrides + return to_serializable(overrides_values), to_serializable(extra_fields_overrides) + + +def template_settings( + filter_id: str, filter_list: FilterList, list_type: ListType, filter_type: type[Filter] +) -> tuple[dict, dict]: + """Find the filter with specified ID, and return its settings.""" + try: + filter_id = int(filter_id) + if filter_id < 0: + raise ValueError() + except ValueError: + raise BadArgument("Template value must be a non-negative integer.") + + if filter_id not in filter_list[list_type].filters: + raise BadArgument( + f"Could not find filter with ID `{filter_id}` in the {list_type.name} {filter_list.name} list." + ) + filter_ = filter_list[list_type].filters[filter_id] + + if not isinstance(filter_, filter_type): + raise BadArgument( + f"The template filter name is {filter_.name!r}, but the target filter is {filter_type.name!r}" + ) + return filter_serializable_overrides(filter_) diff --git a/bot/exts/filtering/_ui/filter_list.py b/bot/exts/filtering/_ui/filter_list.py new file mode 100644 index 000000000..a4526f090 --- /dev/null +++ b/bot/exts/filtering/_ui/filter_list.py @@ -0,0 +1,271 @@ +from __future__ import annotations + +from typing import Any, Callable + +import discord +from discord import Embed, Interaction, SelectOption, User +from discord.ext.commands import BadArgument +from pydis_core.site_api import ResponseCodeError + +from bot.exts.filtering._filter_lists import FilterList, ListType +from bot.exts.filtering._ui.ui import ( + CustomCallbackSelect, EditBaseView, MISSING, SETTINGS_DELIMITER, format_response_error, parse_value, + populate_embed_from_dict +) +from bot.exts.filtering._utils import repr_equals, to_serializable + + +def settings_converter(loaded_settings: dict, input_data: str) -> dict[str, Any]: + """Parse a string representing settings, and validate the setting names.""" + if not input_data: + return {} + + parsed = SETTINGS_DELIMITER.split(input_data) + if not parsed: + return {} + + try: + settings = {setting: value for setting, value in [part.split("=", maxsplit=1) for part in parsed]} + except ValueError: + raise BadArgument("The settings provided are not in the correct format.") + + for setting in settings: + if setting not in loaded_settings: + raise BadArgument(f"{setting!r} is not a recognized setting.") + else: + type_ = loaded_settings[setting][2] + try: + parsed_value = parse_value(settings.pop(setting), type_) + settings[setting] = parsed_value + except (TypeError, ValueError) as e: + raise BadArgument(e) + + return settings + + +def build_filterlist_repr_dict(filter_list: FilterList, list_type: ListType, new_settings: dict) -> dict: + """Build a dictionary of field names and values to pass to `_build_embed_from_dict`.""" + # Get filter list settings + default_setting_values = {} + for settings_group in filter_list[list_type].defaults: + for _, setting in settings_group.items(): + default_setting_values.update(to_serializable(setting.dict())) + + # Add new values. It's done in this way to preserve field order, since the new_values won't have all settings. + total_values = {} + for name, value in default_setting_values.items(): + if name not in new_settings or repr_equals(new_settings[name], value): + total_values[name] = value + else: + total_values[f"{name}~"] = new_settings[name] + + return total_values + + +class FilterListAddView(EditBaseView): + """A view used to add a new filter list.""" + + def __init__( + self, + list_name: str, + list_type: ListType, + settings: dict, + loaded_settings: dict, + author: User, + embed: Embed, + confirm_callback: Callable + ): + super().__init__(author) + self.list_name = list_name + self.list_type = list_type + self.settings = settings + self.loaded_settings = loaded_settings + self.embed = embed + self.confirm_callback = confirm_callback + + self.settings_repr_dict = {name: to_serializable(value) for name, value in settings.items()} + populate_embed_from_dict(embed, self.settings_repr_dict) + + self.type_per_setting_name = {setting: info[2] for setting, info in loaded_settings.items()} + + edit_select = CustomCallbackSelect( + self._prompt_new_value, + placeholder="Select a setting to edit", + options=[SelectOption(label=name) for name in sorted(settings)], + row=0 + ) + self.add_item(edit_select) + + @discord.ui.button(label="✅ Confirm", style=discord.ButtonStyle.green, row=1) + async def confirm(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Confirm the content, description, and settings, and update the filters database.""" + await interaction.response.edit_message(view=None) # Make sure the interaction succeeds first. + try: + await self.confirm_callback(interaction.message, self.list_name, self.list_type, self.settings) + except ResponseCodeError as e: + await interaction.message.reply(embed=format_response_error(e)) + await interaction.message.edit(view=self) + else: + self.stop() + + @discord.ui.button(label="🚫 Cancel", style=discord.ButtonStyle.red, row=1) + async def cancel(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Cancel the operation.""" + await interaction.response.edit_message(content="🚫 Operation canceled.", embed=None, view=None) + self.stop() + + def current_value(self, setting_name: str) -> Any: + """Get the current value stored for the setting or MISSING if none found.""" + if setting_name in self.settings: + return self.settings[setting_name] + return MISSING + + async def update_embed( + self, + interaction_or_msg: discord.Interaction | discord.Message, + *, + setting_name: str | None = None, + setting_value: str | None = None, + ) -> None: + """ + Update the embed with the new information. + + If `interaction_or_msg` is a Message, the invoking Interaction must be deferred before calling this function. + """ + if not setting_name: # Obligatory check to match the signature in the parent class. + return + + self.settings[setting_name] = setting_value + + self.embed.clear_fields() + new_view = self.copy() + + try: + if isinstance(interaction_or_msg, discord.Interaction): + await interaction_or_msg.response.edit_message(embed=self.embed, view=new_view) + else: + await interaction_or_msg.edit(embed=self.embed, view=new_view) + except discord.errors.HTTPException: # Various unexpected errors. + pass + else: + self.stop() + + def copy(self) -> FilterListAddView: + """Create a copy of this view.""" + return FilterListAddView( + self.list_name, + self.list_type, + self.settings, + self.loaded_settings, + self.author, + self.embed, + self.confirm_callback + ) + + +class FilterListEditView(EditBaseView): + """A view used to edit a filter list's settings before updating the database.""" + + def __init__( + self, + filter_list: FilterList, + list_type: ListType, + new_settings: dict, + loaded_settings: dict, + author: User, + embed: Embed, + confirm_callback: Callable + ): + super().__init__(author) + self.filter_list = filter_list + self.list_type = list_type + self.settings = new_settings + self.loaded_settings = loaded_settings + self.embed = embed + self.confirm_callback = confirm_callback + + self.settings_repr_dict = build_filterlist_repr_dict(filter_list, list_type, new_settings) + populate_embed_from_dict(embed, self.settings_repr_dict) + + self.type_per_setting_name = {setting: info[2] for setting, info in loaded_settings.items()} + + edit_select = CustomCallbackSelect( + self._prompt_new_value, + placeholder="Select a setting to edit", + options=[SelectOption(label=name) for name in sorted(self.type_per_setting_name)], + row=0 + ) + self.add_item(edit_select) + + @discord.ui.button(label="✅ Confirm", style=discord.ButtonStyle.green, row=1) + async def confirm(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Confirm the content, description, and settings, and update the filters database.""" + await interaction.response.edit_message(view=None) # Make sure the interaction succeeds first. + try: + await self.confirm_callback(interaction.message, self.filter_list, self.list_type, self.settings) + except ResponseCodeError as e: + await interaction.message.reply(embed=format_response_error(e)) + await interaction.message.edit(view=self) + else: + self.stop() + + @discord.ui.button(label="🚫 Cancel", style=discord.ButtonStyle.red, row=1) + async def cancel(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Cancel the operation.""" + await interaction.response.edit_message(content="🚫 Operation canceled.", embed=None, view=None) + self.stop() + + def current_value(self, setting_name: str) -> Any: + """Get the current value stored for the setting or MISSING if none found.""" + if setting_name in self.settings: + return self.settings[setting_name] + if setting_name in self.settings_repr_dict: + return self.settings_repr_dict[setting_name] + return MISSING + + async def update_embed( + self, + interaction_or_msg: discord.Interaction | discord.Message, + *, + setting_name: str | None = None, + setting_value: str | None = None, + ) -> None: + """ + Update the embed with the new information. + + If `interaction_or_msg` is a Message, the invoking Interaction must be deferred before calling this function. + """ + if not setting_name: # Obligatory check to match the signature in the parent class. + return + + default_value = self.filter_list[self.list_type].default(setting_name) + if not repr_equals(setting_value, default_value): + self.settings[setting_name] = setting_value + # If there's already a new value, remove it, since the new value is the same as the default. + elif setting_name in self.settings: + self.settings.pop(setting_name) + + self.embed.clear_fields() + new_view = self.copy() + + try: + if isinstance(interaction_or_msg, discord.Interaction): + await interaction_or_msg.response.edit_message(embed=self.embed, view=new_view) + else: + await interaction_or_msg.edit(embed=self.embed, view=new_view) + except discord.errors.HTTPException: # Various errors such as embed description being too long. + pass + else: + self.stop() + + def copy(self) -> FilterListEditView: + """Create a copy of this view.""" + return FilterListEditView( + self.filter_list, + self.list_type, + self.settings, + self.loaded_settings, + self.author, + self.embed, + self.confirm_callback + ) diff --git a/bot/exts/filtering/_ui/search.py b/bot/exts/filtering/_ui/search.py new file mode 100644 index 000000000..d553c28ea --- /dev/null +++ b/bot/exts/filtering/_ui/search.py @@ -0,0 +1,365 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import Any + +import discord +from discord import Interaction, SelectOption +from discord.ext.commands import BadArgument + +from bot.exts.filtering._filter_lists import FilterList, ListType +from bot.exts.filtering._filters.filter import Filter +from bot.exts.filtering._settings_types.settings_entry import SettingsEntry +from bot.exts.filtering._ui.filter import filter_serializable_overrides +from bot.exts.filtering._ui.ui import ( + COMPONENT_TIMEOUT, CustomCallbackSelect, EditBaseView, MISSING, SETTINGS_DELIMITER, parse_value, + populate_embed_from_dict +) + + +def search_criteria_converter( + filter_lists: dict, + loaded_filters: dict, + loaded_settings: dict, + loaded_filter_settings: dict, + filter_type: type[Filter] | None, + input_data: str +) -> tuple[dict[str, Any], dict[str, Any], type[Filter]]: + """Parse a string representing setting overrides, and validate the setting names.""" + if not input_data: + return {}, {}, filter_type + + parsed = SETTINGS_DELIMITER.split(input_data) + if not parsed: + return {}, {}, filter_type + + try: + settings = {setting: value for setting, value in [part.split("=", maxsplit=1) for part in parsed]} + except ValueError: + raise BadArgument("The settings provided are not in the correct format.") + + template = None + if "--template" in settings: + template = settings.pop("--template") + + filter_settings = {} + for setting, _ in list(settings.items()): + if setting in loaded_settings: # It's a filter list setting + type_ = loaded_settings[setting][2] + try: + settings[setting] = parse_value(settings[setting], type_) + except (TypeError, ValueError) as e: + raise BadArgument(e) + elif "/" not in setting: + raise BadArgument(f"{setting!r} is not a recognized setting.") + else: # It's a filter setting + filter_name, filter_setting_name = setting.split("/", maxsplit=1) + if not filter_type: + if filter_name in loaded_filters: + filter_type = loaded_filters[filter_name] + else: + raise BadArgument(f"There's no filter type named {filter_name!r}.") + if filter_name.lower() != filter_type.name.lower(): + raise BadArgument( + f"A setting for a {filter_name!r} filter was provided, " + f"but the filter name is {filter_type.name!r}" + ) + if filter_setting_name not in loaded_filter_settings[filter_type.name]: + raise BadArgument(f"{setting!r} is not a recognized setting.") + type_ = loaded_filter_settings[filter_type.name][filter_setting_name][2] + try: + filter_settings[filter_setting_name] = parse_value(settings.pop(setting), type_) + except (TypeError, ValueError) as e: + raise BadArgument(e) + + # Pull templates settings and apply them. + if template is not None: + try: + t_settings, t_filter_settings, filter_type = template_settings(template, filter_lists, filter_type) + except ValueError as e: + raise BadArgument(str(e)) + else: + # The specified settings go on top of the template + settings = t_settings | settings + filter_settings = t_filter_settings | filter_settings + + return settings, filter_settings, filter_type + + +def get_filter(filter_id: int, filter_lists: dict) -> tuple[Filter, FilterList, ListType] | None: + """Return a filter with the specific filter_id, if found.""" + for filter_list in filter_lists.values(): + for list_type, sublist in filter_list.items(): + if filter_id in sublist.filters: + return sublist.filters[filter_id], filter_list, list_type + return None + + +def template_settings( + filter_id: str, filter_lists: dict, filter_type: type[Filter] | None +) -> tuple[dict, dict, type[Filter]]: + """Find a filter with the specified ID and filter type, and return its settings and (maybe newly found) type.""" + try: + filter_id = int(filter_id) + if filter_id < 0: + raise ValueError() + except ValueError: + raise BadArgument("Template value must be a non-negative integer.") + + result = get_filter(filter_id, filter_lists) + if not result: + raise BadArgument(f"Could not find a filter with ID `{filter_id}`.") + filter_, filter_list, list_type = result + + if filter_type and not isinstance(filter_, filter_type): + raise BadArgument(f"The filter with ID `{filter_id}` is not of type {filter_type.name!r}.") + + settings, filter_settings = filter_serializable_overrides(filter_) + return settings, filter_settings, type(filter_) + + +def build_search_repr_dict( + settings: dict[str, Any], filter_settings: dict[str, Any], filter_type: type[Filter] | None +) -> dict: + """Build a dictionary of field names and values to pass to `populate_embed_from_dict`.""" + total_values = settings.copy() + if filter_type: + for setting_name, value in filter_settings.items(): + total_values[f"{filter_type.name}/{setting_name}"] = value + + return total_values + + +class SearchEditView(EditBaseView): + """A view used to edit the search criteria before performing the search.""" + + class _REMOVE: + """Sentinel value for when an override should be removed.""" + + def __init__( + self, + filter_type: type[Filter] | None, + settings: dict[str, Any], + filter_settings: dict[str, Any], + loaded_filter_lists: dict[str, FilterList], + loaded_filters: dict[str, type[Filter]], + loaded_settings: dict[str, tuple[str, SettingsEntry, type]], + loaded_filter_settings: dict[str, dict[str, tuple[str, SettingsEntry, type]]], + author: discord.User | discord.Member, + embed: discord.Embed, + confirm_callback: Callable + ): + super().__init__(author) + self.filter_type = filter_type + self.settings = settings + self.filter_settings = filter_settings + self.loaded_filter_lists = loaded_filter_lists + self.loaded_filters = loaded_filters + self.loaded_settings = loaded_settings + self.loaded_filter_settings = loaded_filter_settings + self.embed = embed + self.confirm_callback = confirm_callback + + title = "Filters Search" + if filter_type: + title += f" - {filter_type.name.title()}" + embed.set_author(name=title) + + settings_repr_dict = build_search_repr_dict(settings, filter_settings, filter_type) + populate_embed_from_dict(embed, settings_repr_dict) + + self.type_per_setting_name = {setting: info[2] for setting, info in loaded_settings.items()} + if filter_type: + self.type_per_setting_name.update({ + f"{filter_type.name}/{name}": type_ + for name, (_, _, type_) in loaded_filter_settings.get(filter_type.name, {}).items() + }) + + add_select = CustomCallbackSelect( + self._prompt_new_value, + placeholder="Add or edit criterion", + options=[SelectOption(label=name) for name in sorted(self.type_per_setting_name)], + row=0 + ) + self.add_item(add_select) + + if settings_repr_dict: + remove_select = CustomCallbackSelect( + self._remove_criterion, + placeholder="Select a criterion to remove", + options=[SelectOption(label=name) for name in sorted(settings_repr_dict)], + row=1 + ) + self.add_item(remove_select) + + @discord.ui.button(label="Template", row=2) + async def enter_template(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to enter a filter template ID and copy its overrides over.""" + modal = TemplateModal(self, interaction.message) + await interaction.response.send_modal(modal) + + @discord.ui.button(label="Filter Type", row=2) + async def enter_filter_type(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to enter a filter type.""" + modal = FilterTypeModal(self, interaction.message) + await interaction.response.send_modal(modal) + + @discord.ui.button(label="✅ Confirm", style=discord.ButtonStyle.green, row=3) + async def confirm(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Confirm the search criteria and perform the search.""" + await interaction.response.edit_message(view=None) # Make sure the interaction succeeds first. + try: + await self.confirm_callback(interaction.message, self.filter_type, self.settings, self.filter_settings) + except BadArgument as e: + await interaction.message.reply( + embed=discord.Embed(colour=discord.Colour.red(), title="Bad Argument", description=str(e)) + ) + await interaction.message.edit(view=self) + else: + self.stop() + + @discord.ui.button(label="🚫 Cancel", style=discord.ButtonStyle.red, row=3) + async def cancel(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Cancel the operation.""" + await interaction.response.edit_message(content="🚫 Operation canceled.", embed=None, view=None) + self.stop() + + def current_value(self, setting_name: str) -> Any: + """Get the current value stored for the setting or MISSING if none found.""" + if setting_name in self.settings: + return self.settings[setting_name] + if "/" in setting_name: + _, setting_name = setting_name.split("/", maxsplit=1) + if setting_name in self.filter_settings: + return self.filter_settings[setting_name] + return MISSING + + async def update_embed( + self, + interaction_or_msg: discord.Interaction | discord.Message, + *, + setting_name: str | None = None, + setting_value: str | type[SearchEditView._REMOVE] | None = None, + ) -> None: + """ + Update the embed with the new information. + + If a setting name is provided with a _REMOVE value, remove the override. + If `interaction_or_msg` is a Message, the invoking Interaction must be deferred before calling this function. + """ + if not setting_name: # Can be None just to make the function signature compatible with the parent class. + return + + if "/" in setting_name: + filter_name, setting_name = setting_name.split("/", maxsplit=1) + dict_to_edit = self.filter_settings + else: + dict_to_edit = self.settings + + # Update the criterion value or remove it + if setting_value is not self._REMOVE: + dict_to_edit[setting_name] = setting_value + elif setting_name in dict_to_edit: + dict_to_edit.pop(setting_name) + + self.embed.clear_fields() + new_view = self.copy() + + try: + if isinstance(interaction_or_msg, discord.Interaction): + await interaction_or_msg.response.edit_message(embed=self.embed, view=new_view) + else: + await interaction_or_msg.edit(embed=self.embed, view=new_view) + except discord.errors.HTTPException: # Just in case of faulty input. + pass + else: + self.stop() + + async def _remove_criterion(self, interaction: Interaction, select: discord.ui.Select) -> None: + """ + Remove the criterion the user selected, and edit the embed. + + The interaction needs to be the selection of the setting attached to the embed. + """ + await self.update_embed(interaction, setting_name=select.values[0], setting_value=self._REMOVE) + + async def apply_template(self, template_id: str, embed_message: discord.Message, interaction: Interaction) -> None: + """Set any unset criteria with settings values from the given filter.""" + try: + settings, filter_settings, self.filter_type = template_settings( + template_id, self.loaded_filter_lists, self.filter_type + ) + except BadArgument as e: # The interaction object is necessary to send an ephemeral message. + await interaction.response.send_message(f":x: {e}", ephemeral=True) + return + else: + await interaction.response.defer() + + self.settings = settings | self.settings + self.filter_settings = filter_settings | self.filter_settings + self.embed.clear_fields() + await embed_message.edit(embed=self.embed, view=self.copy()) + self.stop() + + async def apply_filter_type(self, type_name: str, embed_message: discord.Message, interaction: Interaction) -> None: + """Set a new filter type and reset any criteria for settings of the old filter type.""" + if type_name.lower() not in self.loaded_filters: + if type_name.lower()[:-1] not in self.loaded_filters: # In case the user entered the plural form. + await interaction.response.send_message(f":x: No such filter type {type_name!r}.", ephemeral=True) + return + type_name = type_name[:-1] + type_name = type_name.lower() + await interaction.response.defer() + + if self.filter_type and type_name == self.filter_type.name: + return + self.filter_type = self.loaded_filters[type_name] + self.filter_settings = {} + self.embed.clear_fields() + await embed_message.edit(embed=self.embed, view=self.copy()) + self.stop() + + def copy(self) -> SearchEditView: + """Create a copy of this view.""" + return SearchEditView( + self.filter_type, + self.settings, + self.filter_settings, + self.loaded_filter_lists, + self.loaded_filters, + self.loaded_settings, + self.loaded_filter_settings, + self.author, + self.embed, + self.confirm_callback + ) + + +class TemplateModal(discord.ui.Modal, title="Template"): + """A modal to enter a filter ID to copy its overrides over.""" + + template = discord.ui.TextInput(label="Template Filter ID", required=False) + + def __init__(self, embed_view: SearchEditView, message: discord.Message): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.embed_view = embed_view + self.message = message + + async def on_submit(self, interaction: Interaction) -> None: + """Update the embed with the new description.""" + await self.embed_view.apply_template(self.template.value, self.message, interaction) + + +class FilterTypeModal(discord.ui.Modal, title="Template"): + """A modal to enter a filter ID to copy its overrides over.""" + + filter_type = discord.ui.TextInput(label="Filter Type") + + def __init__(self, embed_view: SearchEditView, message: discord.Message): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.embed_view = embed_view + self.message = message + + async def on_submit(self, interaction: Interaction) -> None: + """Update the embed with the new description.""" + await self.embed_view.apply_filter_type(self.filter_type.value, self.message, interaction) diff --git a/bot/exts/filtering/_ui/ui.py b/bot/exts/filtering/_ui/ui.py new file mode 100644 index 000000000..6d4a08d93 --- /dev/null +++ b/bot/exts/filtering/_ui/ui.py @@ -0,0 +1,565 @@ +from __future__ import annotations + +import re +from abc import ABC, abstractmethod +from collections.abc import Iterable +from enum import EnumMeta +from functools import partial +from typing import Any, Callable, Coroutine, Optional, TypeVar + +import discord +from discord import Embed, Interaction +from discord.ext.commands import Context +from discord.ui.select import MISSING as SELECT_MISSING, SelectOption +from discord.utils import escape_markdown +from pydis_core.site_api import ResponseCodeError +from pydis_core.utils import scheduling +from pydis_core.utils.logging import get_logger +from pydis_core.utils.members import get_or_fetch_member + +import bot +from bot.constants import Colours +from bot.exts.filtering._filter_context import FilterContext +from bot.exts.filtering._filter_lists import FilterList +from bot.exts.filtering._utils import FakeContext +from bot.utils.messages import format_channel, format_user, upload_log + +log = get_logger(__name__) + + +# Max number of characters in a Discord embed field value, minus 6 characters for a placeholder. +MAX_FIELD_SIZE = 1018 +# Max number of characters for an embed field's value before it should take its own line. +MAX_INLINE_SIZE = 50 +# Number of seconds before a settings editing view timeout. +EDIT_TIMEOUT = 600 +# Number of seconds before timeout of an editing component. +COMPONENT_TIMEOUT = 180 +# Amount of seconds to confirm the operation. +DELETION_TIMEOUT = 60 +# Max length of modal title +MAX_MODAL_TITLE_LENGTH = 45 +# Max number of items in a select +MAX_SELECT_ITEMS = 25 +MAX_EMBED_DESCRIPTION = 4080 +# Number of seconds before timeout of the alert view +ALERT_VIEW_TIMEOUT = 3600 + +SETTINGS_DELIMITER = re.compile(r"\s+(?=\S+=\S+)") +SINGLE_SETTING_PATTERN = re.compile(r"[\w/]+=.+") + +# Sentinel value to denote that a value is missing +MISSING = object() + +T = TypeVar('T') + + +async def _build_alert_message_content(ctx: FilterContext, current_message_length: int) -> str: + """Build the content section of the alert.""" + # For multiple messages and those with attachments or excessive newlines, use the logs API + if ctx.messages_deletion and ctx.upload_deletion_logs and any(( + ctx.related_messages, + len(ctx.attachments) > 0, + ctx.content.count('\n') > 15 + )): + url = await upload_log(ctx.related_messages, bot.instance.user.id, ctx.attachments) + return f"A complete log of the offending messages can be found [here]({url})" + + alert_content = escape_markdown(ctx.content) + remaining_chars = MAX_EMBED_DESCRIPTION - current_message_length + + if len(alert_content) > remaining_chars: + if ctx.messages_deletion and ctx.upload_deletion_logs: + url = await upload_log([ctx.message], bot.instance.user.id, ctx.attachments) + log_site_msg = f"The full message can be found [here]({url})" + # 7 because that's the length of "[...]\n\n" + return alert_content[:remaining_chars - (7 + len(log_site_msg))] + "[...]\n\n" + log_site_msg + else: + return alert_content[:remaining_chars - 5] + "[...]" + + return alert_content + + +async def build_mod_alert(ctx: FilterContext, triggered_filters: dict[FilterList, Iterable[str]]) -> Embed: + """Build an alert message from the filter context.""" + embed = Embed(color=Colours.soft_orange) + embed.set_thumbnail(url=ctx.author.display_avatar.url) + triggered_by = f"**Triggered by:** {format_user(ctx.author)}" + if ctx.channel: + if ctx.channel.guild: + triggered_in = f"**Triggered in:** {format_channel(ctx.channel)}\n" + else: + triggered_in = "**Triggered in:** :warning:**DM**:warning:\n" + if len(ctx.related_channels) > 1: + triggered_in += f"**Channels:** {', '.join(channel.mention for channel in ctx.related_channels)}\n" + else: + triggered_by += "\n" + triggered_in = "" + + filters = [] + for filter_list, list_message in triggered_filters.items(): + if list_message: + filters.append(f"**{filter_list.name.title()} Filters:** {', '.join(list_message)}") + filters = "\n".join(filters) + + matches = "**Matches:** " + ", ".join(repr(match) for match in ctx.matches) if ctx.matches else "" + actions = "\n**Actions Taken:** " + (", ".join(ctx.action_descriptions) if ctx.action_descriptions else "-") + + mod_alert_message = "\n".join(part for part in (triggered_by, triggered_in, filters, matches, actions) if part) + log.debug(f"{ctx.event.name} Filter:\n{mod_alert_message}") + + if ctx.message: + mod_alert_message += f"\n**[Original Content]({ctx.message.jump_url})**:\n" + else: + mod_alert_message += "\n**Original Content**:\n" + mod_alert_message += await _build_alert_message_content(ctx, len(mod_alert_message)) + + embed.description = mod_alert_message + return embed + + +def populate_embed_from_dict(embed: Embed, data: dict) -> None: + """Populate a Discord embed by populating fields from the given dict.""" + for setting, value in data.items(): + if setting.startswith("_"): + continue + if isinstance(value, (list, set, tuple)): + value = f"[{', '.join(map(str, value))}]" + else: + value = str(value) if value not in ("", None) else "-" + if len(value) > MAX_FIELD_SIZE: + value = value[:MAX_FIELD_SIZE] + " [...]" + embed.add_field(name=setting, value=value, inline=len(value) < MAX_INLINE_SIZE) + + +def parse_value(value: str, type_: type[T]) -> T: + """Parse the value and attempt to convert it to the provided type.""" + if hasattr(type_, "__origin__"): # In case this is a types.GenericAlias or a typing._GenericAlias + type_ = type_.__origin__ + if value == '""': + return type_() + if type_ in (tuple, list, set): + return list(value.split(",")) + if type_ is bool: + return value.lower() == "true" or value == "1" + if isinstance(type_, EnumMeta): + return type_[value.upper()] + + return type_(value) + + +def format_response_error(e: ResponseCodeError) -> Embed: + """Format the response error into an embed.""" + description = "" + if isinstance(e.response_json, list): + description = "\n".join(f"• {error}" for error in e.response_json) + elif isinstance(e.response_json, dict): + if "non_field_errors" in e.response_json: + non_field_errors = e.response_json.pop("non_field_errors") + description += "\n".join(f"• {error}" for error in non_field_errors) + "\n" + for field, errors in e.response_json.items(): + description += "\n".join(f"• {field} - {error}" for error in errors) + "\n" + + description = description.strip() + if len(description) > MAX_EMBED_DESCRIPTION: + description = description[:MAX_EMBED_DESCRIPTION] + "[...]" + if not description: + description = "Something unexpected happened, check the logs." + + embed = Embed(colour=discord.Colour.red(), title="Oops...", description=description) + return embed + + +class ArgumentCompletionSelect(discord.ui.Select): + """A select detailing the options that can be picked to assign to a missing argument.""" + + def __init__( + self, + ctx: Context, + args: list, + arg_name: str, + options: list[str], + position: int, + converter: Optional[Callable] = None + ): + super().__init__( + placeholder=f"Select a value for {arg_name!r}", + options=[discord.SelectOption(label=option) for option in options] + ) + self.ctx = ctx + self.args = args + self.position = position + self.converter = converter + + async def callback(self, interaction: discord.Interaction) -> None: + """re-invoke the context command with the completed argument value.""" + await interaction.response.defer() + value = interaction.data["values"][0] + if self.converter: + value = self.converter(value) + args = self.args.copy() # This makes the view reusable. + args.insert(self.position, value) + log.trace(f"Argument filled with the value {value}. Re-invoking command") + await self.ctx.invoke(self.ctx.command, *args) + + +class ArgumentCompletionView(discord.ui.View): + """A view used to complete a missing argument in an in invoked command.""" + + def __init__( + self, + ctx: Context, + args: list, + arg_name: str, + options: list[str], + position: int, + converter: Optional[Callable] = None + ): + super().__init__() + log.trace(f"The {arg_name} argument was designated missing in the invocation {ctx.view.buffer!r}") + self.add_item(ArgumentCompletionSelect(ctx, args, arg_name, options, position, converter)) + self.ctx = ctx + + async def interaction_check(self, interaction: discord.Interaction) -> bool: + """Check to ensure that the interacting user is the user who invoked the command.""" + if interaction.user != self.ctx.author: + embed = discord.Embed(description="Sorry, but this dropdown menu can only be used by the original author.") + await interaction.response.send_message(embed=embed, ephemeral=True) + return False + return True + + +class CustomCallbackSelect(discord.ui.Select): + """A selection which calls the provided callback on interaction.""" + + def __init__( + self, + callback: Callable[[Interaction, discord.ui.Select], Coroutine[None]], + *, + custom_id: str = SELECT_MISSING, + placeholder: str | None = None, + min_values: int = 1, + max_values: int = 1, + options: list[SelectOption] = SELECT_MISSING, + disabled: bool = False, + row: int | None = None, + ): + super().__init__( + custom_id=custom_id, + placeholder=placeholder, + min_values=min_values, + max_values=max_values, + options=options, + disabled=disabled, + row=row + ) + self.custom_callback = callback + + async def callback(self, interaction: Interaction) -> Any: + """Invoke the provided callback.""" + await self.custom_callback(interaction, self) + + +class BooleanSelectView(discord.ui.View): + """A view containing an instance of BooleanSelect.""" + + class BooleanSelect(discord.ui.Select): + """Select a true or false value and send it to the supplied callback.""" + + def __init__(self, setting_name: str, update_callback: Callable): + super().__init__(options=[SelectOption(label="True"), SelectOption(label="False")]) + self.setting_name = setting_name + self.update_callback = update_callback + + async def callback(self, interaction: Interaction) -> Any: + """Respond to the interaction by sending the boolean value to the update callback.""" + await interaction.response.edit_message(content="✅ Edit confirmed", view=None) + value = self.values[0] == "True" + await self.update_callback(setting_name=self.setting_name, setting_value=value) + + def __init__(self, setting_name: str, update_callback: Callable): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.add_item(self.BooleanSelect(setting_name, update_callback)) + + +class FreeInputModal(discord.ui.Modal): + """A modal to freely enter a value for a setting.""" + + def __init__(self, setting_name: str, type_: type, update_callback: Callable): + title = f"{setting_name} Input" if len(setting_name) < MAX_MODAL_TITLE_LENGTH - 6 else "Setting Input" + super().__init__(timeout=COMPONENT_TIMEOUT, title=title) + + self.setting_name = setting_name + self.type_ = type_ + self.update_callback = update_callback + + label = setting_name if len(setting_name) < MAX_MODAL_TITLE_LENGTH else "Value" + self.setting_input = discord.ui.TextInput(label=label, style=discord.TextStyle.paragraph, required=False) + self.add_item(self.setting_input) + + async def on_submit(self, interaction: Interaction) -> None: + """Update the setting with the new value in the embed.""" + try: + if not self.setting_input.value: + value = self.type_() + else: + value = self.type_(self.setting_input.value) + except (ValueError, TypeError): + await interaction.response.send_message( + f"Could not process the input value for `{self.setting_name}`.", ephemeral=True + ) + else: + await interaction.response.defer() + await self.update_callback(setting_name=self.setting_name, setting_value=value) + + +class SequenceEditView(discord.ui.View): + """A view to modify the contents of a sequence of values.""" + + class SingleItemModal(discord.ui.Modal): + """A modal to enter a single list item.""" + + new_item = discord.ui.TextInput(label="New Item") + + def __init__(self, view: SequenceEditView): + super().__init__(title="Item Addition", timeout=COMPONENT_TIMEOUT) + self.view = view + + async def on_submit(self, interaction: Interaction) -> None: + """Send the submitted value to be added to the list.""" + await self.view.apply_addition(interaction, self.new_item.value) + + class NewListModal(discord.ui.Modal): + """A modal to enter new contents for the list.""" + + new_value = discord.ui.TextInput(label="Enter comma separated values", style=discord.TextStyle.paragraph) + + def __init__(self, view: SequenceEditView): + super().__init__(title="New List", timeout=COMPONENT_TIMEOUT) + self.view = view + + async def on_submit(self, interaction: Interaction) -> None: + """Send the submitted value to be added to the list.""" + await self.view.apply_edit(interaction, self.new_value.value) + + def __init__(self, setting_name: str, starting_value: list, update_callback: Callable): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.setting_name = setting_name + self.stored_value = starting_value + self.update_callback = update_callback + + options = [SelectOption(label=item) for item in self.stored_value[:MAX_SELECT_ITEMS]] + self.removal_select = CustomCallbackSelect( + self.apply_removal, placeholder="Enter an item to remove", options=options, row=1 + ) + if self.stored_value: + self.add_item(self.removal_select) + + async def apply_removal(self, interaction: Interaction, select: discord.ui.Select) -> None: + """Remove an item from the list.""" + # The value might not be stored as a string. + _i = len(self.stored_value) + for _i, element in enumerate(self.stored_value): + if str(element) == select.values[0]: + break + if _i != len(self.stored_value): + self.stored_value.pop(_i) + + await interaction.response.edit_message( + content=f"Current list: [{', '.join(self.stored_value)}]", view=self.copy() + ) + self.stop() + + async def apply_addition(self, interaction: Interaction, item: str) -> None: + """Add an item to the list.""" + if item in self.stored_value: # Ignore duplicates + await interaction.response.defer() + return + + self.stored_value.append(item) + await interaction.response.edit_message( + content=f"Current list: [{', '.join(self.stored_value)}]", view=self.copy() + ) + self.stop() + + async def apply_edit(self, interaction: Interaction, new_list: str) -> None: + """Change the contents of the list.""" + self.stored_value = list(set(part.strip() for part in new_list.split(",") if part.strip())) + await interaction.response.edit_message(content=f"Current list: {self.stored_value}", view=self.copy()) + self.stop() + + @discord.ui.button(label="Add Value") + async def add_value(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to add an item to the list.""" + await interaction.response.send_modal(self.SingleItemModal(self)) + + @discord.ui.button(label="Free Input") + async def free_input(self, interaction: Interaction, button: discord.ui.Button) -> None: + """A button to change the entire list.""" + await interaction.response.send_modal(self.NewListModal(self)) + + @discord.ui.button(label="✅ Confirm", style=discord.ButtonStyle.green) + async def confirm(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Send the final value to the embed editor.""" + # Edit first, it might time out otherwise. + await interaction.response.edit_message(content="✅ Edit confirmed", view=None) + await self.update_callback(setting_name=self.setting_name, setting_value=self.stored_value) + self.stop() + + @discord.ui.button(label="🚫 Cancel", style=discord.ButtonStyle.red) + async def cancel(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Cancel the list editing.""" + await interaction.response.edit_message(content="🚫 Canceled", view=None) + self.stop() + + def copy(self) -> SequenceEditView: + """Return a copy of this view.""" + return SequenceEditView(self.setting_name, self.stored_value, self.update_callback) + + +class EnumSelectView(discord.ui.View): + """A view containing an instance of EnumSelect.""" + + class EnumSelect(discord.ui.Select): + """Select an enum value and send it to the supplied callback.""" + + def __init__(self, setting_name: str, enum_cls: EnumMeta, update_callback: Callable): + super().__init__(options=[SelectOption(label=elem.name) for elem in enum_cls]) + self.setting_name = setting_name + self.enum_cls = enum_cls + self.update_callback = update_callback + + async def callback(self, interaction: Interaction) -> Any: + """Respond to the interaction by sending the enum value to the update callback.""" + await interaction.response.edit_message(content="✅ Edit confirmed", view=None) + await self.update_callback(setting_name=self.setting_name, setting_value=self.values[0]) + + def __init__(self, setting_name: str, enum_cls: EnumMeta, update_callback: Callable): + super().__init__(timeout=COMPONENT_TIMEOUT) + self.add_item(self.EnumSelect(setting_name, enum_cls, update_callback)) + + +class EditBaseView(ABC, discord.ui.View): + """A view used to edit embed fields based on a provided type.""" + + def __init__(self, author: discord.User): + super().__init__(timeout=EDIT_TIMEOUT) + self.author = author + self.type_per_setting_name = {} + + async def interaction_check(self, interaction: Interaction) -> bool: + """Only allow interactions from the command invoker.""" + return interaction.user.id == self.author.id + + async def _prompt_new_value(self, interaction: Interaction, select: discord.ui.Select) -> None: + """Prompt the user to give an override value for the setting they selected, and respond to the interaction.""" + setting_name = select.values[0] + type_ = self.type_per_setting_name[setting_name] + if hasattr(type_, "__origin__"): # In case this is a types.GenericAlias or a typing._GenericAlias + type_ = type_.__origin__ + new_view = self.copy() + # This is in order to not block the interaction response. There's a potential race condition here, since + # a view's method is used without guaranteeing the task completed, but since it depends on user input + # realistically it shouldn't happen. + scheduling.create_task(interaction.message.edit(view=new_view)) + update_callback = partial(new_view.update_embed, interaction_or_msg=interaction.message) + if type_ is bool: + view = BooleanSelectView(setting_name, update_callback) + await interaction.response.send_message(f"Choose a value for `{setting_name}`:", view=view, ephemeral=True) + elif type_ in (set, list, tuple): + if (current_value := self.current_value(setting_name)) is not MISSING: + current_list = list(current_value) + else: + current_list = [] + await interaction.response.send_message( + f"Current list: [{', '.join(current_list)}]", + view=SequenceEditView(setting_name, current_list, update_callback), + ephemeral=True + ) + elif isinstance(type_, EnumMeta): + view = EnumSelectView(setting_name, type_, update_callback) + await interaction.response.send_message(f"Choose a value for `{setting_name}`:", view=view, ephemeral=True) + else: + await interaction.response.send_modal(FreeInputModal(setting_name, type_, update_callback)) + self.stop() + + @abstractmethod + def current_value(self, setting_name: str) -> Any: + """Get the current value stored for the setting or MISSING if none found.""" + + @abstractmethod + async def update_embed(self, interaction_or_msg: Interaction | discord.Message) -> None: + """ + Update the embed with the new information. + + If `interaction_or_msg` is a Message, the invoking Interaction must be deferred before calling this function. + """ + + @abstractmethod + def copy(self) -> EditBaseView: + """Create a copy of this view.""" + + +class DeleteConfirmationView(discord.ui.View): + """A view to confirm a deletion.""" + + def __init__(self, author: discord.Member | discord.User, callback: Callable): + super().__init__(timeout=DELETION_TIMEOUT) + self.author = author + self.callback = callback + + async def interaction_check(self, interaction: Interaction) -> bool: + """Only allow interactions from the command invoker.""" + return interaction.user.id == self.author.id + + @discord.ui.button(label="Delete", style=discord.ButtonStyle.red, row=0) + async def confirm(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Invoke the filter list deletion.""" + await interaction.response.edit_message(view=None) + await self.callback() + + @discord.ui.button(label="Cancel", row=0) + async def cancel(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Cancel the filter list deletion.""" + await interaction.response.edit_message(content="🚫 Operation canceled.", view=None) + + +class AlertView(discord.ui.View): + """A view providing info about the offending user.""" + + def __init__(self, ctx: FilterContext): + super().__init__(timeout=ALERT_VIEW_TIMEOUT) + self.ctx = ctx + + @discord.ui.button(label="ID") + async def user_id(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Reply with the ID of the offending user.""" + await interaction.response.send_message(self.ctx.author.id, ephemeral=True) + + @discord.ui.button(emoji="👤") + async def user_info(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Send the info embed of the offending user.""" + command = bot.instance.get_command("user") + if not command: + await interaction.response.send_message("The command `user` is not loaded.", ephemeral=True) + return + + await interaction.response.defer() + fake_ctx = FakeContext(interaction.channel, command, author=interaction.user) + # Get the most updated user/member object every time the button is pressed. + author = await get_or_fetch_member(interaction.guild, self.ctx.author.id) + if author is None: + author = await bot.instance.fetch_user(self.ctx.author.id) + await command(fake_ctx, author) + + @discord.ui.button(emoji="⚠") + async def user_infractions(self, interaction: Interaction, button: discord.ui.Button) -> None: + """Send the infractions embed of the offending user.""" + command = bot.instance.get_command("infraction search") + if not command: + await interaction.response.send_message("The command `infraction search` is not loaded.", ephemeral=True) + return + + await interaction.response.defer() + fake_ctx = FakeContext(interaction.channel, command, author=interaction.user) + await command(fake_ctx, self.ctx.author) diff --git a/bot/exts/filtering/_utils.py b/bot/exts/filtering/_utils.py new file mode 100644 index 000000000..b96767fbf --- /dev/null +++ b/bot/exts/filtering/_utils.py @@ -0,0 +1,223 @@ +import importlib +import importlib.util +import inspect +import pkgutil +import types +from abc import ABC, abstractmethod +from collections import defaultdict +from dataclasses import dataclass +from functools import cache +from typing import Any, Iterable, TypeVar, Union, get_args, get_origin + +import discord +import regex +from discord.ext.commands import Command + +import bot +from bot.bot import Bot +from bot.constants import Guild + +VARIATION_SELECTORS = r"\uFE00-\uFE0F\U000E0100-\U000E01EF" +INVISIBLE_RE = regex.compile(rf"[{VARIATION_SELECTORS}\p{{UNASSIGNED}}\p{{FORMAT}}\p{{CONTROL}}--\s]", regex.V1) +ZALGO_RE = regex.compile(rf"[\p{{NONSPACING MARK}}\p{{ENCLOSING MARK}}--[{VARIATION_SELECTORS}]]", regex.V1) + + +T = TypeVar('T') + + +def subclasses_in_package(package: str, prefix: str, parent: T) -> set[T]: + """Return all the subclasses of class `parent`, found in the top-level of `package`, given by absolute path.""" + subclasses = set() + + # Find all modules in the package. + for module_info in pkgutil.iter_modules([package], prefix): + if not module_info.ispkg: + module = importlib.import_module(module_info.name) + # Find all classes in each module... + for _, class_ in inspect.getmembers(module, inspect.isclass): + # That are a subclass of the given class. + if parent in class_.__mro__: + subclasses.add(class_) + + return subclasses + + +def clean_input(string: str) -> str: + """Remove zalgo and invisible characters from `string`.""" + # For future consideration: remove characters in the Mc, Sk, and Lm categories too. + # Can be normalised with form C to merge char + combining char into a single char to avoid + # removing legit diacritics, but this would open up a way to bypass _filters. + no_zalgo = ZALGO_RE.sub("", string) + return INVISIBLE_RE.sub("", no_zalgo) + + +def past_tense(word: str) -> str: + """Return the past tense form of the input word.""" + if not word: + return word + if word.endswith("e"): + return word + "d" + if word.endswith("y") and len(word) > 1 and word[-2] not in "aeiou": + return word[:-1] + "ied" + return word + "ed" + + +def to_serializable(item: Any) -> Union[bool, int, float, str, list, dict, None]: + """Convert the item into an object that can be converted to JSON.""" + if isinstance(item, (bool, int, float, str, type(None))): + return item + if isinstance(item, dict): + result = {} + for key, value in item.items(): + if not isinstance(key, (bool, int, float, str, type(None))): + key = str(key) + result[key] = to_serializable(value) + return result + if isinstance(item, Iterable): + return [to_serializable(subitem) for subitem in item] + return str(item) + + +@cache +def resolve_mention(mention: str) -> str: + """Return the appropriate formatting for the mention, be it a literal, a user ID, or a role ID.""" + guild = bot.instance.get_guild(Guild.id) + if mention in ("here", "everyone"): + return f"@{mention}" + try: + mention = int(mention) # It's an ID. + except ValueError: + pass + else: + if any(mention == role.id for role in guild.roles): + return f"<@&{mention}>" + else: + return f"<@{mention}>" + + # It's a name + for role in guild.roles: + if role.name == mention: + return role.mention + for member in guild.members: + if str(member) == mention: + return member.mention + return mention + + +def repr_equals(override: Any, default: Any) -> bool: + """Return whether the override and the default have the same representation.""" + if override is None: # It's not an override + return True + + override_is_sequence = isinstance(override, (tuple, list, set)) + default_is_sequence = isinstance(default, (tuple, list, set)) + if override_is_sequence != default_is_sequence: # One is a sequence and the other isn't. + return False + if override_is_sequence: + if len(override) != len(default): + return False + return all(str(item1) == str(item2) for item1, item2 in zip(set(override), set(default))) + return str(override) == str(default) + + +def starting_value(type_: type[T]) -> T: + """Return a value of the given type.""" + if get_origin(type_) in (Union, types.UnionType): # In case of a Union + args = get_args(type_) + if type(None) in args: + return None + type_ = args[0] # Pick one, doesn't matter + if origin := get_origin(type_): # In case of a parameterized List, Set, Dict etc. + type_ = origin + + try: + return type_() + except TypeError: # In case it all fails, return a string and let the user handle it. + return "" + + +class FieldRequiring(ABC): + """A mixin class that can force its concrete subclasses to set a value for specific class attributes.""" + + # Sentinel value that mustn't remain in a concrete subclass. + MUST_SET = object() + + # Sentinel value that mustn't remain in a concrete subclass. + # Overriding value must be unique in the subclasses of the abstract class in which the attribute was set. + MUST_SET_UNIQUE = object() + + # A mapping of the attributes which must be unique, and their unique values, per FieldRequiring subclass. + __unique_attributes: defaultdict[type, dict[str, set]] = defaultdict(dict) + + @abstractmethod + def __init__(self): + ... + + def __init_subclass__(cls, **kwargs): + def inherited(attr: str) -> bool: + """True if `attr` was inherited from a parent class.""" + for parent in cls.__mro__[1:-1]: # The first element is the class itself, last element is object. + if hasattr(parent, attr): # The attribute was inherited. + return True + return False + + # If a new attribute with the value MUST_SET_UNIQUE was defined in an abstract class, record it. + if inspect.isabstract(cls): + for attribute in dir(cls): + if getattr(cls, attribute, None) is FieldRequiring.MUST_SET_UNIQUE: + if not inherited(attribute): + # A new attribute with the value MUST_SET_UNIQUE. + FieldRequiring.__unique_attributes[cls][attribute] = set() + return + + for attribute in dir(cls): + if attribute.startswith("__") or attribute in ("MUST_SET", "MUST_SET_UNIQUE"): + continue + value = getattr(cls, attribute) + if value is FieldRequiring.MUST_SET and inherited(attribute): + raise ValueError(f"You must set attribute {attribute!r} when creating {cls!r}") + elif value is FieldRequiring.MUST_SET_UNIQUE and inherited(attribute): + raise ValueError(f"You must set a unique value to attribute {attribute!r} when creating {cls!r}") + else: + # Check if the value needs to be unique. + for parent in cls.__mro__[1:-1]: + # Find the parent class the attribute was first defined in. + if attribute in FieldRequiring.__unique_attributes[parent]: + if value in FieldRequiring.__unique_attributes[parent][attribute]: + raise ValueError(f"Value of {attribute!r} in {cls!r} is not unique for parent {parent!r}.") + else: + # Add to the set of unique values for that field. + FieldRequiring.__unique_attributes[parent][attribute].add(value) + + +@dataclass +class FakeContext: + """ + A class representing a context-like object that can be sent to infraction commands. + + The goal is to be able to apply infractions without depending on the existence of a message or an interaction + (which are the two ways to create a Context), e.g. in API events which aren't message-driven, or in custom filtering + events. + """ + + channel: discord.abc.Messageable + command: Command | None + bot: Bot | None = None + guild: discord.Guild | None = None + author: discord.Member | discord.User | None = None + me: discord.Member | None = None + + def __post_init__(self): + """Initialize the missing information.""" + if not self.bot: + self.bot = bot.instance + if not self.guild: + self.guild = self.bot.get_guild(Guild.id) + if not self.me: + self.me = self.guild.me + if not self.author: + self.author = self.me + + async def send(self, *args, **kwargs) -> discord.Message: + """A wrapper for channel.send.""" + return await self.channel.send(*args, **kwargs) diff --git a/bot/exts/filtering/filtering.py b/bot/exts/filtering/filtering.py new file mode 100644 index 000000000..c4417e5e0 --- /dev/null +++ b/bot/exts/filtering/filtering.py @@ -0,0 +1,1424 @@ +import datetime +import json +import re +import unicodedata +from collections import defaultdict +from collections.abc import Iterable, Mapping +from functools import partial, reduce +from io import BytesIO +from operator import attrgetter +from typing import Literal, Optional, get_type_hints + +import arrow +import discord +from async_rediscache import RedisCache +from discord import Colour, Embed, HTTPException, Message, MessageType +from discord.ext import commands, tasks +from discord.ext.commands import BadArgument, Cog, Context, command, has_any_role +from pydis_core.site_api import ResponseCodeError +from pydis_core.utils import scheduling + +import bot +import bot.exts.filtering._ui.filter as filters_ui +from bot import constants +from bot.bot import Bot +from bot.constants import Channels, Guild, MODERATION_ROLES, Roles +from bot.exts.backend.branding._repository import HEADERS, PARAMS +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filter_lists import FilterList, ListType, filter_list_types, list_type_converter +from bot.exts.filtering._filter_lists.filter_list import AtomicList +from bot.exts.filtering._filters.filter import Filter, UniqueFilter +from bot.exts.filtering._settings import ActionSettings +from bot.exts.filtering._settings_types.actions.infraction_and_notification import Infraction +from bot.exts.filtering._ui.filter import ( + build_filter_repr_dict, description_and_settings_converter, filter_serializable_overrides, populate_embed_from_dict +) +from bot.exts.filtering._ui.filter_list import FilterListAddView, FilterListEditView, settings_converter +from bot.exts.filtering._ui.search import SearchEditView, search_criteria_converter +from bot.exts.filtering._ui.ui import ( + AlertView, ArgumentCompletionView, DeleteConfirmationView, build_mod_alert, format_response_error +) +from bot.exts.filtering._utils import past_tense, repr_equals, starting_value, to_serializable +from bot.exts.moderation.infraction.infractions import COMP_BAN_DURATION, COMP_BAN_REASON +from bot.log import get_logger +from bot.pagination import LinePaginator +from bot.utils.channel import is_mod_channel +from bot.utils.lock import lock_arg +from bot.utils.message_cache import MessageCache + +log = get_logger(__name__) + +WEBHOOK_ICON_URL = r"https://github.com/python-discord/branding/raw/main/icons/filter/filter_pfp.png" +WEBHOOK_NAME = "Filtering System" +CACHE_SIZE = 1000 +HOURS_BETWEEN_NICKNAME_ALERTS = 1 +OFFENSIVE_MSG_DELETE_TIME = datetime.timedelta(days=7) +WEEKLY_REPORT_ISO_DAY = 3 # 1=Monday, 7=Sunday + + +class Filtering(Cog): + """Filtering and alerting for content posted on the server.""" + + # A set of filter list names with missing implementations that already caused a warning. + already_warned = set() + + # Redis cache mapping a user ID to the last timestamp a bad nickname alert was sent. + name_alerts = RedisCache() + + # region: init + + def __init__(self, bot: Bot): + self.bot = bot + self.filter_lists: dict[str, FilterList] = {} + self._subscriptions: defaultdict[Event, list[FilterList]] = defaultdict(list) + self.delete_scheduler = scheduling.Scheduler(self.__class__.__name__) + self.webhook: discord.Webhook | None = None + + self.loaded_settings = {} + self.loaded_filters = {} + self.loaded_filter_settings = {} + + self.message_cache = MessageCache(CACHE_SIZE, newest_first=True) + + async def cog_load(self) -> None: + """ + Fetch the filter data from the API, parse it, and load it to the appropriate data structures. + + Additionally, fetch the alerting webhook. + """ + await self.bot.wait_until_guild_available() + + log.trace("Loading filtering information from the database.") + raw_filter_lists = await self.bot.api_client.get("bot/filter/filter_lists") + example_list = None + for raw_filter_list in raw_filter_lists: + loaded_list = self._load_raw_filter_list(raw_filter_list) + if not example_list and loaded_list: + example_list = loaded_list + + # The webhook must be generated by the bot to send messages with components through it. + self.webhook = await self._fetch_or_generate_filtering_webhook() + + self.collect_loaded_types(example_list) + await self.schedule_offending_messages_deletion() + self.weekly_auto_infraction_report_task.start() + + def subscribe(self, filter_list: FilterList, *events: Event) -> None: + """ + Subscribe a filter list to the given events. + + The filter list is added to a list for each event. When the event is triggered, the filter context will be + dispatched to the subscribed filter lists. + + While it's possible to just make each filter list check the context's event, these are only the events a filter + list expects to receive from the filtering cog, there isn't an actual limitation on the kinds of events a filter + list can handle as long as the filter context is built properly. If for whatever reason we want to invoke a + filter list outside of the usual procedure with the filtering cog, it will be more problematic if the events are + hard-coded into each filter list. + """ + for event in events: + if filter_list not in self._subscriptions[event]: + self._subscriptions[event].append(filter_list) + + def unsubscribe(self, filter_list: FilterList, *events: Event) -> None: + """Unsubscribe a filter list from the given events. If no events given, unsubscribe from every event.""" + if not events: + events = list(self._subscriptions) + + for event in events: + if filter_list in self._subscriptions.get(event, []): + self._subscriptions[event].remove(filter_list) + + def collect_loaded_types(self, example_list: AtomicList) -> None: + """ + Go over the classes used in initialization and collect them to dictionaries. + + The information that is collected is about the types actually used to load the API response, not all types + available in the filtering extension. + + Any filter list has the fields for all settings in the DB schema, so picking any one of them is enough. + """ + # Get the filter types used by each filter list. + for filter_list in self.filter_lists.values(): + self.loaded_filters.update({filter_type.name: filter_type for filter_type in filter_list.filter_types}) + + # Get the setting types used by each filter list. + if self.filter_lists: + settings_entries = set() + # The settings are split between actions and validations. + for settings_group in example_list.defaults: + settings_entries.update(type(setting) for _, setting in settings_group.items()) + + for setting_entry in settings_entries: + type_hints = get_type_hints(setting_entry) + # The description should be either a string or a dictionary. + if isinstance(setting_entry.description, str): + # If it's a string, then the settings entry matches a single field in the DB, + # and its name is the setting type's name attribute. + self.loaded_settings[setting_entry.name] = ( + setting_entry.description, setting_entry, type_hints[setting_entry.name] + ) + else: + # Otherwise, the setting entry works with compound settings. + self.loaded_settings.update({ + subsetting: (description, setting_entry, type_hints[subsetting]) + for subsetting, description in setting_entry.description.items() + }) + + # Get the settings per filter as well. + for filter_name, filter_type in self.loaded_filters.items(): + extra_fields_type = filter_type.extra_fields_type + if not extra_fields_type: + continue + type_hints = get_type_hints(extra_fields_type) + # A class var with a `_description` suffix is expected per field name. + self.loaded_filter_settings[filter_name] = { + field_name: ( + getattr(extra_fields_type, f"{field_name}_description", ""), + extra_fields_type, + type_hints[field_name] + ) + for field_name in extra_fields_type.__fields__ + } + + async def schedule_offending_messages_deletion(self) -> None: + """Load the messages that need to be scheduled for deletion from the database.""" + response = await self.bot.api_client.get('bot/offensive-messages') + + now = arrow.utcnow() + for msg in response: + delete_at = arrow.get(msg['delete_date']) + if delete_at < now: + await self._delete_offensive_msg(msg) + else: + self._schedule_msg_delete(msg) + + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators to invoke the commands in this cog.""" + return await has_any_role(*MODERATION_ROLES).predicate(ctx) + + # endregion + # region: listeners and event handlers + + @Cog.listener() + async def on_message(self, msg: Message) -> None: + """Filter the contents of a sent message.""" + if msg.author.bot or msg.webhook_id or msg.type == MessageType.auto_moderation_action: + return + self.message_cache.append(msg) + + ctx = FilterContext.from_message(Event.MESSAGE, msg, None, self.message_cache) + result_actions, list_messages, triggers = await self._resolve_action(ctx) + self.message_cache.update(msg, metadata=triggers) + if result_actions: + await result_actions.action(ctx) + if ctx.send_alert: + await self._send_alert(ctx, list_messages) + + nick_ctx = FilterContext.from_message(Event.NICKNAME, msg) + nick_ctx.content = msg.author.display_name + await self._check_bad_name(nick_ctx) + + await self._maybe_schedule_msg_delete(ctx, result_actions) + self._increment_stats(triggers) + + @Cog.listener() + async def on_message_edit(self, before: discord.Message, after: discord.Message) -> None: + """Filter the contents of an edited message. Don't reinvoke filters already invoked on the `before` version.""" + # Only check changes to the message contents/attachments and embed additions, not pin status etc. + if all(( + before.content == after.content, # content hasn't changed + before.attachments == after.attachments, # attachments haven't changed + len(before.embeds) >= len(after.embeds) # embeds haven't been added + )): + return + + # Update the cache first, it might be used by the antispam filter. + # No need to update the triggers, they're going to be updated inside the sublists if necessary. + self.message_cache.update(after) + ctx = FilterContext.from_message(Event.MESSAGE_EDIT, after, before, self.message_cache) + result_actions, list_messages, triggers = await self._resolve_action(ctx) + if result_actions: + await result_actions.action(ctx) + if ctx.send_alert: + await self._send_alert(ctx, list_messages) + await self._maybe_schedule_msg_delete(ctx, result_actions) + self._increment_stats(triggers) + + @Cog.listener() + async def on_voice_state_update(self, member: discord.Member, *_) -> None: + """Checks for bad words in usernames when users join, switch or leave a voice channel.""" + ctx = FilterContext(Event.NICKNAME, member, None, member.display_name, None) + await self._check_bad_name(ctx) + + async def filter_snekbox_output(self, snekbox_result: str, msg: Message) -> bool: + """ + Filter the result of a snekbox command to see if it violates any of our rules, and then respond accordingly. + + Also requires the original message, to check whether to filter and for alerting. + Any action (deletion, infraction) will be applied in the context of the original message. + + Returns whether a filter was triggered or not. + """ + ctx = FilterContext.from_message(Event.MESSAGE, msg).replace(content=snekbox_result) + result_actions, list_messages, triggers = await self._resolve_action(ctx) + if result_actions: + await result_actions.action(ctx) + if ctx.send_alert: + await self._send_alert(ctx, list_messages) + self._increment_stats(triggers) + + return result_actions is not None + + # endregion + # region: blacklist commands + + @commands.group(aliases=("bl", "blacklist", "denylist", "dl")) + async def blocklist(self, ctx: Context) -> None: + """Group for managing blacklisted items.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @blocklist.command(name="list", aliases=("get",)) + async def bl_list(self, ctx: Context, list_name: Optional[str] = None) -> None: + """List the contents of a specified blacklist.""" + result = await self._resolve_list_type_and_name(ctx, ListType.DENY, list_name, exclude="list_type") + if not result: + return + list_type, filter_list = result + await self._send_list(ctx, filter_list, list_type) + + @blocklist.command(name="add", aliases=("a",)) + async def bl_add( + self, + ctx: Context, + noui: Optional[Literal["noui"]], + list_name: Optional[str], + content: str, + *, + description_and_settings: Optional[str] = None + ) -> None: + """ + Add a blocked filter to the specified filter list. + + Unless `noui` is specified, a UI will be provided to edit the content, description, and settings + before confirmation. + + The settings can be provided in the command itself, in the format of `setting_name=value` (no spaces around the + equal sign). The value doesn't need to (shouldn't) be surrounded in quotes even if it contains spaces. + """ + result = await self._resolve_list_type_and_name(ctx, ListType.DENY, list_name, exclude="list_type") + if result is None: + return + list_type, filter_list = result + await self._add_filter(ctx, noui, list_type, filter_list, content, description_and_settings) + + # endregion + # region: whitelist commands + + @commands.group(aliases=("wl", "whitelist", "al")) + async def allowlist(self, ctx: Context) -> None: + """Group for managing blacklisted items.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @allowlist.command(name="list", aliases=("get",)) + async def al_list(self, ctx: Context, list_name: Optional[str] = None) -> None: + """List the contents of a specified whitelist.""" + result = await self._resolve_list_type_and_name(ctx, ListType.ALLOW, list_name, exclude="list_type") + if not result: + return + list_type, filter_list = result + await self._send_list(ctx, filter_list, list_type) + + @allowlist.command(name="add", aliases=("a",)) + async def al_add( + self, + ctx: Context, + noui: Optional[Literal["noui"]], + list_name: Optional[str], + content: str, + *, + description_and_settings: Optional[str] = None + ) -> None: + """ + Add an allowed filter to the specified filter list. + + Unless `noui` is specified, a UI will be provided to edit the content, description, and settings + before confirmation. + + The settings can be provided in the command itself, in the format of `setting_name=value` (no spaces around the + equal sign). The value doesn't need to (shouldn't) be surrounded in quotes even if it contains spaces. + """ + result = await self._resolve_list_type_and_name(ctx, ListType.ALLOW, list_name, exclude="list_type") + if result is None: + return + list_type, filter_list = result + await self._add_filter(ctx, noui, list_type, filter_list, content, description_and_settings) + + # endregion + # region: filter commands + + @commands.group(aliases=("filters", "f"), invoke_without_command=True) + async def filter(self, ctx: Context, id_: Optional[int] = None) -> None: + """ + Group for managing filters. + + If a valid filter ID is provided, an embed describing the filter will be posted. + """ + if not ctx.invoked_subcommand and not id_: + await ctx.send_help(ctx.command) + return + + result = self._get_filter_by_id(id_) + if result is None: + await ctx.send(f":x: Could not find a filter with ID `{id_}`.") + return + filter_, filter_list, list_type = result + + overrides_values, extra_fields_overrides = filter_serializable_overrides(filter_) + + all_settings_repr_dict = build_filter_repr_dict( + filter_list, list_type, type(filter_), overrides_values, extra_fields_overrides + ) + embed = Embed(colour=Colour.blue()) + populate_embed_from_dict(embed, all_settings_repr_dict) + embed.description = f"`{filter_.content}`" + if filter_.description: + embed.description += f" - {filter_.description}" + embed.set_author(name=f"Filter {id_} - " + f"{filter_list[list_type].label}".title()) + embed.set_footer(text=( + "Field names with an asterisk have values which override the defaults of the containing filter list. " + f"To view all defaults of the list, " + f"run `{constants.Bot.prefix}filterlist describe {list_type.name} {filter_list.name}`." + )) + await ctx.send(embed=embed) + + @filter.command(name="list", aliases=("get",)) + async def f_list( + self, ctx: Context, list_type: Optional[list_type_converter] = None, list_name: Optional[str] = None + ) -> None: + """List the contents of a specified list of filters.""" + result = await self._resolve_list_type_and_name(ctx, list_type, list_name) + if result is None: + return + list_type, filter_list = result + + await self._send_list(ctx, filter_list, list_type) + + @filter.command(name="describe", aliases=("explain", "manual")) + async def f_describe(self, ctx: Context, filter_name: Optional[str]) -> None: + """Show a description of the specified filter, or a list of possible values if no name is specified.""" + if not filter_name: + filter_names = [f"» {f}" for f in self.loaded_filters] + embed = Embed(colour=Colour.blue()) + embed.set_author(name="List of filter names") + await LinePaginator.paginate(filter_names, ctx, embed, max_lines=10, empty=False) + else: + filter_type = self.loaded_filters.get(filter_name) + if not filter_type: + filter_type = self.loaded_filters.get(filter_name[:-1]) # A plural form or a typo. + if not filter_type: + await ctx.send(f":x: There's no filter type named {filter_name!r}.") + return + # Use the class's docstring, and ignore single newlines. + embed = Embed(description=re.sub(r"(?<!\n)\n(?!\n)", " ", filter_type.__doc__), colour=Colour.blue()) + embed.set_author(name=f"Description of the {filter_name} filter") + await ctx.send(embed=embed) + + @filter.command(name="add", aliases=("a",)) + async def f_add( + self, + ctx: Context, + noui: Optional[Literal["noui"]], + list_type: Optional[list_type_converter], + list_name: Optional[str], + content: str, + *, + description_and_settings: Optional[str] = None + ) -> None: + """ + Add a filter to the specified filter list. + + Unless `noui` is specified, a UI will be provided to edit the content, description, and settings + before confirmation. + + The settings can be provided in the command itself, in the format of `setting_name=value` (no spaces around the + equal sign). The value doesn't need to (shouldn't) be surrounded in quotes even if it contains spaces. + + A template filter can be specified in the settings area to copy overrides from. The setting name is "--template" + and the value is the filter ID. The template will be used before applying any other override. + + Example: `!filter add denied token "Scaleios is great" remove_context=True send_alert=False --template=100` + """ + result = await self._resolve_list_type_and_name(ctx, list_type, list_name) + if result is None: + return + list_type, filter_list = result + await self._add_filter(ctx, noui, list_type, filter_list, content, description_and_settings) + + @filter.command(name="edit", aliases=("e",)) + async def f_edit( + self, + ctx: Context, + noui: Optional[Literal["noui"]], + filter_id: int, + *, + description_and_settings: Optional[str] = None + ) -> None: + """ + Edit a filter specified by its ID. + + Unless `noui` is specified, a UI will be provided to edit the content, description, and settings + before confirmation. + + The settings can be provided in the command itself, in the format of `setting_name=value` (no spaces around the + equal sign). The value doesn't need to (shouldn't) be surrounded in quotes even if it contains spaces. + + A template filter can be specified in the settings area to copy overrides from. The setting name is "--template" + and the value is the filter ID. The template will be used before applying any other override. + + To edit the filter's content, use the UI. + """ + result = self._get_filter_by_id(filter_id) + if result is None: + await ctx.send(f":x: Could not find a filter with ID `{filter_id}`.") + return + filter_, filter_list, list_type = result + filter_type = type(filter_) + settings, filter_settings = filter_serializable_overrides(filter_) + description, new_settings, new_filter_settings = description_and_settings_converter( + filter_list, + list_type, filter_type, + self.loaded_settings, + self.loaded_filter_settings, + description_and_settings + ) + + content = filter_.content + description = description or filter_.description + settings.update(new_settings) + filter_settings.update(new_filter_settings) + patch_func = partial(self._patch_filter, filter_) + + if noui: + try: + await patch_func( + ctx.message, filter_list, list_type, filter_type, content, description, settings, filter_settings + ) + except ResponseCodeError as e: + await ctx.reply(embed=format_response_error(e)) + return + + embed = Embed(colour=Colour.blue()) + embed.description = f"`{filter_.content}`" + if description: + embed.description += f" - {description}" + embed.set_author( + name=f"Filter {filter_id} - {filter_list[list_type].label}".title()) + embed.set_footer(text=( + "Field names with an asterisk have values which override the defaults of the containing filter list. " + f"To view all defaults of the list, " + f"run `{constants.Bot.prefix}filterlist describe {list_type.name} {filter_list.name}`." + )) + + view = filters_ui.FilterEditView( + filter_list, + list_type, + filter_type, + content, + description, + settings, + filter_settings, + self.loaded_settings, + self.loaded_filter_settings, + ctx.author, + embed, + patch_func + ) + await ctx.send(embed=embed, reference=ctx.message, view=view) + + @filter.command(name="delete", aliases=("d", "remove")) + async def f_delete(self, ctx: Context, filter_id: int) -> None: + """Delete the filter specified by its ID.""" + async def delete_list() -> None: + """The actual removal routine.""" + await bot.instance.api_client.delete(f'bot/filter/filters/{filter_id}') + log.info(f"Successfully deleted filter with ID {filter_id}.") + filter_list[list_type].filters.pop(filter_id) + await ctx.reply(f"✅ Deleted filter: {filter_}") + + result = self._get_filter_by_id(filter_id) + if result is None: + await ctx.send(f":x: Could not find a filter with ID `{filter_id}`.") + return + filter_, filter_list, list_type = result + await ctx.reply( + f"Are you sure you want to delete filter {filter_}?", + view=DeleteConfirmationView(ctx.author, delete_list) + ) + + @filter.command(aliases=("settings",)) + async def setting(self, ctx: Context, setting_name: str | None) -> None: + """Show a description of the specified setting, or a list of possible settings if no name is specified.""" + if not setting_name: + settings_list = [f"» {setting_name}" for setting_name in self.loaded_settings] + for filter_name, filter_settings in self.loaded_filter_settings.items(): + settings_list.extend(f"» {filter_name}/{setting}" for setting in filter_settings) + embed = Embed(colour=Colour.blue()) + embed.set_author(name="List of setting names") + await LinePaginator.paginate(settings_list, ctx, embed, max_lines=10, empty=False) + + else: + # The setting is either in a SettingsEntry subclass, or a pydantic model. + setting_data = self.loaded_settings.get(setting_name) + description = None + if setting_data: + description = setting_data[0] + elif "/" in setting_name: # It's a filter specific setting. + filter_name, filter_setting_name = setting_name.split("/", maxsplit=1) + if filter_name in self.loaded_filter_settings: + if filter_setting_name in self.loaded_filter_settings[filter_name]: + description = self.loaded_filter_settings[filter_name][filter_setting_name][0] + if description is None: + await ctx.send(f":x: There's no setting type named {setting_name!r}.") + return + embed = Embed(colour=Colour.blue(), description=description) + embed.set_author(name=f"Description of the {setting_name} setting") + await ctx.send(embed=embed) + + @filter.command(name="match") + async def f_match( + self, ctx: Context, no_user: bool | None, message: Message | None, *, string: str | None + ) -> None: + """ + Post any responses from the filter lists for the given message or string. + + If there's a `message`, the `string` will be ignored. Note that if a `message` is provided, it will go through + all validations appropriate to where it was sent and who sent it. To check for matches regardless of the author + (for example if the message was sent by another staff member or yourself) set `no_user` to '1' or 'True'. + + If a `string` is provided, it will be validated in the context of a user with no roles in python-general. + """ + if not message and not string: + raise BadArgument("Please provide input.") + if message: + user = None if no_user else message.author + filter_ctx = FilterContext(Event.MESSAGE, user, message.channel, message.content, message, message.embeds) + else: + python_general = ctx.guild.get_channel(Channels.python_general) + filter_ctx = FilterContext(Event.MESSAGE, None, python_general, string, None) + + _, _, triggers = await self._resolve_action(filter_ctx) + lines = [] + for sublist, sublist_triggers in triggers.items(): + if sublist_triggers: + triggers_repr = map(str, sublist_triggers) + lines.extend([f"**{sublist.label.title()}s**", *triggers_repr, "\n"]) + lines = lines[:-1] # Remove last newline. + + embed = Embed(colour=Colour.blue(), title="Match results") + await LinePaginator.paginate(lines, ctx, embed, max_lines=10, empty=False) + + @filter.command(name="search") + async def f_search( + self, + ctx: Context, + noui: Literal["noui"] | None, + filter_type_name: str | None, + *, + settings: str = "" + ) -> None: + """ + Find filters with the provided settings. The format is identical to that of the add and edit commands. + + If a list type and/or a list name are provided, the search will be limited to those parameters. A list name must + be provided in order to search by filter-specific settings. + """ + filter_type = None + if filter_type_name: + filter_type_name = filter_type_name.lower() + filter_type = self.loaded_filters.get(filter_type_name) + if not filter_type: + self.loaded_filters.get(filter_type_name[:-1]) # In case the user tried to specify the plural form. + # If settings were provided with no filter_type, discord.py will capture the first word as the filter type. + if filter_type is None and filter_type_name is not None: + if settings: + settings = f"{filter_type_name} {settings}" + else: + settings = filter_type_name + filter_type_name = None + + settings, filter_settings, filter_type = search_criteria_converter( + self.filter_lists, + self.loaded_filters, + self.loaded_settings, + self.loaded_filter_settings, + filter_type, + settings + ) + + if noui: + await self._search_filters(ctx.message, filter_type, settings, filter_settings) + return + + embed = Embed(colour=Colour.blue()) + view = SearchEditView( + filter_type, + settings, + filter_settings, + self.filter_lists, + self.loaded_filters, + self.loaded_settings, + self.loaded_filter_settings, + ctx.author, + embed, + self._search_filters + ) + await ctx.send(embed=embed, reference=ctx.message, view=view) + + @filter.command(root_aliases=("compfilter", "compf")) + async def compadd( + self, ctx: Context, list_name: Optional[str], content: str, *, description: Optional[str] = "Phishing" + ) -> None: + """Add a filter to detect a compromised account. Will apply the equivalent of a compban if triggered.""" + result = await self._resolve_list_type_and_name(ctx, ListType.DENY, list_name, exclude="list_type") + if result is None: + return + list_type, filter_list = result + + settings = ( + "remove_context=True " + "dm_pings=Moderators " + "infraction_type=BAN " + "infraction_channel=1 " # Post the ban in #mod-alerts + f"infraction_duration={COMP_BAN_DURATION.total_seconds()} " + f"infraction_reason={COMP_BAN_REASON}" + ) + description_and_settings = f"{description} {settings}" + await self._add_filter(ctx, "noui", list_type, filter_list, content, description_and_settings) + + # endregion + # region: filterlist group + + @commands.group(aliases=("fl",)) + async def filterlist(self, ctx: Context) -> None: + """Group for managing filter lists.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @filterlist.command(name="describe", aliases=("explain", "manual", "id")) + async def fl_describe( + self, ctx: Context, list_type: Optional[list_type_converter] = None, list_name: Optional[str] = None + ) -> None: + """Show a description of the specified filter list, or a list of possible values if no values are provided.""" + if not list_type and not list_name: + list_names = [f"» {fl}" for fl in self.filter_lists] + embed = Embed(colour=Colour.blue()) + embed.set_author(name="List of filter lists names") + await LinePaginator.paginate(list_names, ctx, embed, max_lines=10, empty=False) + return + + result = await self._resolve_list_type_and_name(ctx, list_type, list_name) + if result is None: + return + list_type, filter_list = result + + setting_values = {} + for settings_group in filter_list[list_type].defaults: + for _, setting in settings_group.items(): + setting_values.update(to_serializable(setting.dict())) + + embed = Embed(colour=Colour.blue()) + populate_embed_from_dict(embed, setting_values) + # Use the class's docstring, and ignore single newlines. + embed.description = re.sub(r"(?<!\n)\n(?!\n)", " ", filter_list.__doc__) + embed.set_author( + name=f"Description of the {filter_list[list_type].label} filter list" + ) + await ctx.send(embed=embed) + + @filterlist.command(name="add", aliases=("a",)) + @has_any_role(Roles.admins) + async def fl_add(self, ctx: Context, list_type: list_type_converter, list_name: str) -> None: + """Add a new filter list.""" + # Check if there's an implementation. + if list_name.lower() not in filter_list_types: + if list_name.lower()[:-1] not in filter_list_types: # Maybe the name was given with uppercase or in plural? + await ctx.reply(f":x: Cannot add a `{list_name}` filter list, as there is no matching implementation.") + return + else: + list_name = list_name.lower()[:-1] + + # Check it doesn't already exist. + list_description = f"{past_tense(list_type.name.lower())} {list_name.lower()}" + if list_name in self.filter_lists: + filter_list = self.filter_lists[list_name] + if list_type in filter_list: + await ctx.reply(f":x: The {list_description} filter list already exists.") + return + + embed = Embed(colour=Colour.blue()) + embed.set_author(name=f"New Filter List - {list_description.title()}") + settings = {name: starting_value(value[2]) for name, value in self.loaded_settings.items()} + + view = FilterListAddView( + list_name, + list_type, + settings, + self.loaded_settings, + ctx.author, + embed, + self._post_filter_list + ) + await ctx.send(embed=embed, reference=ctx.message, view=view) + + @filterlist.command(name="edit", aliases=("e",)) + @has_any_role(Roles.admins) + async def fl_edit( + self, + ctx: Context, + noui: Optional[Literal["noui"]], + list_type: Optional[list_type_converter] = None, + list_name: Optional[str] = None, + *, + settings: str | None + ) -> None: + """ + Edit the filter list. + + Unless `noui` is specified, a UI will be provided to edit the settings before confirmation. + + The settings can be provided in the command itself, in the format of `setting_name=value` (no spaces around the + equal sign). The value doesn't need to (shouldn't) be surrounded in quotes even if it contains spaces. + """ + result = await self._resolve_list_type_and_name(ctx, list_type, list_name) + if result is None: + return + list_type, filter_list = result + settings = settings_converter(self.loaded_settings, settings) + if noui: + try: + await self._patch_filter_list(ctx.message, filter_list, list_type, settings) + except ResponseCodeError as e: + await ctx.reply(embed=format_response_error(e)) + return + + embed = Embed(colour=Colour.blue()) + embed.set_author(name=f"{filter_list[list_type].label.title()} Filter List") + embed.set_footer(text="Field names with a ~ have values which change the existing value in the filter list.") + + view = FilterListEditView( + filter_list, + list_type, + settings, + self.loaded_settings, + ctx.author, + embed, + self._patch_filter_list + ) + await ctx.send(embed=embed, reference=ctx.message, view=view) + + @filterlist.command(name="delete", aliases=("remove",)) + @has_any_role(Roles.admins) + async def fl_delete( + self, ctx: Context, list_type: Optional[list_type_converter] = None, list_name: Optional[str] = None + ) -> None: + """Remove the filter list and all of its filters from the database.""" + async def delete_list() -> None: + """The actual removal routine.""" + list_data = await bot.instance.api_client.get(f"bot/filter/filter_lists/{list_id}") + file = discord.File(BytesIO(json.dumps(list_data, indent=4).encode("utf-8")), f"{list_description}.json") + message = await ctx.send("⏳ Annihilation in progress, please hold...", file=file) + # Unload the filter list. + filter_list.pop(list_type) + if not filter_list: # There's nothing left, remove from the cog. + self.filter_lists.pop(filter_list.name) + self.unsubscribe(filter_list) + + await bot.instance.api_client.delete(f"bot/filter/filter_lists/{list_id}") + log.info(f"Successfully deleted the {filter_list[list_type].label} filterlist.") + await message.edit(content=f"✅ The {list_description} list has been deleted.") + + result = await self._resolve_list_type_and_name(ctx, list_type, list_name) + if result is None: + return + list_type, filter_list = result + list_id = filter_list[list_type].id + list_description = filter_list[list_type].label + await ctx.reply( + f"Are you sure you want to delete the {list_description} list?", + view=DeleteConfirmationView(ctx.author, delete_list) + ) + + # endregion + # region: utility commands + + @command(name="filter_report") + async def force_send_weekly_report(self, ctx: Context) -> None: + """Respond with a list of auto-infractions added in the last 7 days.""" + await self.send_weekly_auto_infraction_report(ctx.channel) + + # endregion + # region: helper functions + + def _load_raw_filter_list(self, list_data: dict) -> AtomicList | None: + """Load the raw list data to the cog.""" + list_name = list_data["name"] + if list_name not in self.filter_lists: + if list_name not in filter_list_types: + if list_name not in self.already_warned: + log.warning( + f"A filter list named {list_name} was loaded from the database, but no matching class." + ) + self.already_warned.add(list_name) + return None + self.filter_lists[list_name] = filter_list_types[list_name](self) + return self.filter_lists[list_name].add_list(list_data) + + async def _fetch_or_generate_filtering_webhook(self) -> discord.Webhook | None: + """Generate a webhook with the filtering avatar.""" + alerts_channel = self.bot.get_guild(Guild.id).get_channel(Channels.mod_alerts) + # Try to find an existing webhook. + for webhook in await alerts_channel.webhooks(): + if webhook.name == WEBHOOK_NAME and webhook.user == self.bot.user and webhook.is_authenticated(): + log.trace(f"Found existing filters webhook with ID {webhook.id}.") + return webhook + + # Download the filtering avatar from the branding repository. + webhook_icon = None + async with self.bot.http_session.get(WEBHOOK_ICON_URL, params=PARAMS, headers=HEADERS) as response: + if response.status == 200: + log.debug("Successfully fetched filtering webhook icon, reading payload.") + webhook_icon = await response.read() + else: + log.warning(f"Failed to fetch filtering webhook icon due to status: {response.status}") + + # Generate a new webhook. + try: + webhook = await alerts_channel.create_webhook(name=WEBHOOK_NAME, avatar=webhook_icon) + log.trace(f"Generated new filters webhook with ID {webhook.id},") + return webhook + except HTTPException as e: + log.error(f"Failed to create filters webhook: {e}") + return None + + async def _resolve_action( + self, ctx: FilterContext + ) -> tuple[ActionSettings | None, dict[FilterList, list[str]], dict[AtomicList, list[Filter]]]: + """ + Return the actions that should be taken for all filter lists in the given context. + + Additionally, a message is possibly provided from each filter list describing the triggers, + which should be relayed to the moderators. + """ + actions = [] + messages = {} + triggers = {} + for filter_list in self._subscriptions[ctx.event]: + list_actions, list_message, list_triggers = await filter_list.actions_for(ctx) + triggers.update({filter_list[list_type]: filters for list_type, filters in list_triggers.items()}) + if list_actions: + actions.append(list_actions) + if list_message: + messages[filter_list] = list_message + + result_actions = None + if actions: + result_actions = reduce(ActionSettings.union, actions) + + return result_actions, messages, triggers + + async def _send_alert(self, ctx: FilterContext, triggered_filters: dict[FilterList, Iterable[str]]) -> None: + """Build an alert message from the filter context, and send it via the alert webhook.""" + if not self.webhook: + return + + name = f"{ctx.event.name.replace('_', ' ').title()} Filter" + embed = await build_mod_alert(ctx, triggered_filters) + # There shouldn't be more than 10, but if there are it's not very useful to send them all. + await self.webhook.send( + username=name, content=ctx.alert_content, embeds=[embed, *ctx.alert_embeds][:10], view=AlertView(ctx) + ) + + def _increment_stats(self, triggered_filters: dict[AtomicList, list[Filter]]) -> None: + """Increment the stats for every filter triggered.""" + for filters in triggered_filters.values(): + for filter_ in filters: + if isinstance(filter_, UniqueFilter): + self.bot.stats.incr(f"filters.{filter_.name}") + + async def _recently_alerted_name(self, member: discord.Member) -> bool: + """When it hasn't been `HOURS_BETWEEN_NICKNAME_ALERTS` since last alert, return False, otherwise True.""" + if last_alert := await self.name_alerts.get(member.id): + last_alert = arrow.get(last_alert) + if arrow.utcnow() - last_alert < datetime.timedelta(days=HOURS_BETWEEN_NICKNAME_ALERTS): + log.trace(f"Last alert was too recent for {member}'s nickname.") + return True + + return False + + @lock_arg("filtering.check_bad_name", "ctx", attrgetter("author.id")) + async def _check_bad_name(self, ctx: FilterContext) -> None: + """Check filter triggers in the passed context - a member's display name.""" + if await self._recently_alerted_name(ctx.author): + return + + name = ctx.content + normalised_name = unicodedata.normalize("NFKC", name) + cleaned_normalised_name = "".join([c for c in normalised_name if not unicodedata.combining(c)]) + + # Run filters against normalised, cleaned normalised and the original name, + # in case there are filters for one but not another. + names_to_check = (name, normalised_name, cleaned_normalised_name) + + new_ctx = ctx.replace(content=" ".join(names_to_check)) + result_actions, list_messages, triggers = await self._resolve_action(new_ctx) + if result_actions: + await result_actions.action(ctx) + if ctx.send_alert: + await self._send_alert(ctx, list_messages) # `ctx` has the original content. + # Update time when alert sent + await self.name_alerts.set(ctx.author.id, arrow.utcnow().timestamp()) + self._increment_stats(triggers) + + async def _resolve_list_type_and_name( + self, ctx: Context, list_type: ListType | None = None, list_name: str | None = None, *, exclude: str = "" + ) -> tuple[ListType, FilterList] | None: + """Prompt the user to complete the list type or list name if one of them is missing.""" + if list_name is None: + args = [list_type] if exclude != "list_type" else [] + await ctx.send( + "The **list_name** argument is unspecified. Please pick a value from the options below:", + view=ArgumentCompletionView(ctx, args, "list_name", list(self.filter_lists), 1, None) + ) + return None + + filter_list = self._get_list_by_name(list_name) + if list_type is None: + if len(filter_list) > 1: + args = [list_name] if exclude != "list_name" else [] + await ctx.send( + "The **list_type** argument is unspecified. Please pick a value from the options below:", + view=ArgumentCompletionView( + ctx, args, "list_type", [option.name for option in ListType], 0, list_type_converter + ) + ) + return None + list_type = list(filter_list)[0] + return list_type, filter_list + + def _get_list_by_name(self, list_name: str) -> FilterList: + """Get a filter list by its name, or raise an error if there's no such list.""" + log.trace(f"Getting the filter list matching the name {list_name}") + filter_list = self.filter_lists.get(list_name) + if not filter_list: + if list_name.endswith("s"): # The user may have attempted to use the plural form. + filter_list = self.filter_lists.get(list_name[:-1]) + if not filter_list: + raise BadArgument(f"There's no filter list named {list_name!r}.") + log.trace(f"Found list named {filter_list.name}") + return filter_list + + @staticmethod + async def _send_list(ctx: Context, filter_list: FilterList, list_type: ListType) -> None: + """Show the list of filters identified by the list name and type.""" + if list_type not in filter_list: + await ctx.send(f":x: There is no list of {past_tense(list_type.name.lower())} {filter_list.name}s.") + return + + lines = list(map(str, filter_list[list_type].filters.values())) + log.trace(f"Sending a list of {len(lines)} filters.") + + embed = Embed(colour=Colour.blue()) + embed.set_author(name=f"List of {filter_list[list_type].label}s ({len(lines)} total)") + + await LinePaginator.paginate(lines, ctx, embed, max_lines=15, empty=False, reply=True) + + def _get_filter_by_id(self, id_: int) -> Optional[tuple[Filter, FilterList, ListType]]: + """Get the filter object corresponding to the provided ID, along with its containing list and list type.""" + for filter_list in self.filter_lists.values(): + for list_type, sublist in filter_list.items(): + if id_ in sublist.filters: + return sublist.filters[id_], filter_list, list_type + + async def _add_filter( + self, + ctx: Context, + noui: Optional[Literal["noui"]], + list_type: ListType, + filter_list: FilterList, + content: str, + description_and_settings: Optional[str] = None + ) -> None: + """Add a filter to the database.""" + # Validations. + if list_type not in filter_list: + await ctx.reply(f":x: There is no list of {past_tense(list_type.name.lower())} {filter_list.name}s.") + return + filter_type = filter_list.get_filter_type(content) + if not filter_type: + await ctx.reply(f":x: Could not find a filter type appropriate for `{content}`.") + return + # Parse the description and settings. + description, settings, filter_settings = description_and_settings_converter( + filter_list, + list_type, + filter_type, + self.loaded_settings, + self.loaded_filter_settings, + description_and_settings + ) + + if noui: # Add directly with no UI. + try: + await self._post_new_filter( + ctx.message, filter_list, list_type, filter_type, content, description, settings, filter_settings + ) + except ResponseCodeError as e: + await ctx.reply(embed=format_response_error(e)) + except ValueError as e: + raise BadArgument(str(e)) + return + # Bring up the UI. + embed = Embed(colour=Colour.blue()) + embed.description = f"`{content}`" if content else "*No content*" + if description: + embed.description += f" - {description}" + embed.set_author( + name=f"New Filter - {filter_list[list_type].label}".title()) + embed.set_footer(text=( + "Field names with an asterisk have values which override the defaults of the containing filter list. " + f"To view all defaults of the list, " + f"run `{constants.Bot.prefix}filterlist describe {list_type.name} {filter_list.name}`." + )) + + view = filters_ui.FilterEditView( + filter_list, + list_type, + filter_type, + content, + description, + settings, + filter_settings, + self.loaded_settings, + self.loaded_filter_settings, + ctx.author, + embed, + self._post_new_filter + ) + await ctx.send(embed=embed, reference=ctx.message, view=view) + + @staticmethod + def _identical_filters_message(content: str, filter_list: FilterList, list_type: ListType, filter_: Filter) -> str: + """Returns all the filters in the list with content identical to the content supplied.""" + if list_type not in filter_list: + return "" + duplicates = [ + f for f in filter_list[list_type].filters.values() + if f.content == content and f.id != filter_.id + ] + msg = "" + if duplicates: + msg = f"\n:warning: The filter(s) #{', #'.join(str(dup.id) for dup in duplicates)} have the same content. " + msg += "Please make sure this is intentional." + + return msg + + @staticmethod + async def _maybe_alert_auto_infraction( + filter_list: FilterList, list_type: ListType, filter_: Filter, old_filter: Filter | None = None + ) -> None: + """If the filter is new and applies an auto-infraction, or was edited to apply a different one, log it.""" + infraction_type = filter_.overrides[0].get("infraction_type") + if not infraction_type: + infraction_type = filter_list[list_type].default("infraction_type") + if old_filter: + old_infraction_type = old_filter.overrides[0].get("infraction_type") + if not old_infraction_type: + old_infraction_type = filter_list[list_type].default("infraction_type") + if infraction_type == old_infraction_type: + return + + if infraction_type != Infraction.NONE: + filter_log = bot.instance.get_channel(Channels.filter_log) + if filter_log: + await filter_log.send( + f":warning: Heads up! The new {filter_list[list_type].label} filter " + f"({filter_}) will automatically {infraction_type.name.lower()} users." + ) + + async def _post_new_filter( + self, + msg: Message, + filter_list: FilterList, + list_type: ListType, + filter_type: type[Filter], + content: str, + description: str | None, + settings: dict, + filter_settings: dict + ) -> None: + """POST the data of the new filter to the site API.""" + valid, error_msg = filter_type.validate_filter_settings(filter_settings) + if not valid: + raise BadArgument(f"Error while validating filter-specific settings: {error_msg}") + + content, description = await filter_type.process_input(content, description) + + list_id = filter_list[list_type].id + description = description or None + payload = { + "filter_list": list_id, "content": content, "description": description, + "additional_field": json.dumps(filter_settings), **settings + } + response = await bot.instance.api_client.post('bot/filter/filters', json=to_serializable(payload)) + new_filter = filter_list.add_filter(list_type, response) + log.info(f"Added new filter: {new_filter}.") + if new_filter: + await self._maybe_alert_auto_infraction(filter_list, list_type, new_filter) + extra_msg = Filtering._identical_filters_message(content, filter_list, list_type, new_filter) + await msg.reply(f"✅ Added filter: {new_filter}" + extra_msg) + else: + await msg.reply(":x: Could not create the filter. Are you sure it's implemented?") + + async def _patch_filter( + self, + filter_: Filter, + msg: Message, + filter_list: FilterList, + list_type: ListType, + filter_type: type[Filter], + content: str, + description: str | None, + settings: dict, + filter_settings: dict + ) -> None: + """PATCH the new data of the filter to the site API.""" + valid, error_msg = filter_type.validate_filter_settings(filter_settings) + if not valid: + raise BadArgument(f"Error while validating filter-specific settings: {error_msg}") + + if content != filter_.content: + content, description = await filter_type.process_input(content, description) + + # If the setting is not in `settings`, the override was either removed, or there wasn't one in the first place. + for current_settings in (filter_.actions, filter_.validations): + if current_settings: + for setting_entry in current_settings.values(): + settings.update({setting: None for setting in setting_entry.dict() if setting not in settings}) + + # Even though the list ID remains unchanged, it still needs to be provided for correct serializer validation. + list_id = filter_list[list_type].id + description = description or None + payload = { + "filter_list": list_id, "content": content, "description": description, + "additional_field": json.dumps(filter_settings), **settings + } + response = await bot.instance.api_client.patch( + f'bot/filter/filters/{filter_.id}', json=to_serializable(payload) + ) + # Return type can be None, but if it's being edited then it's not supposed to be. + edited_filter = filter_list.add_filter(list_type, response) + log.info(f"Successfully patched filter {edited_filter}.") + await self._maybe_alert_auto_infraction(filter_list, list_type, edited_filter, filter_) + extra_msg = Filtering._identical_filters_message(content, filter_list, list_type, edited_filter) + await msg.reply(f"✅ Edited filter: {edited_filter}" + extra_msg) + + async def _post_filter_list(self, msg: Message, list_name: str, list_type: ListType, settings: dict) -> None: + """POST the new data of the filter list to the site API.""" + payload = {"name": list_name, "list_type": list_type.value, **to_serializable(settings)} + filterlist_name = f"{past_tense(list_type.name.lower())} {list_name}" + response = await bot.instance.api_client.post('bot/filter/filter_lists', json=payload) + log.info(f"Successfully posted the new {filterlist_name} filterlist.") + self._load_raw_filter_list(response) + await msg.reply(f"✅ Added a new filter list: {filterlist_name}") + + @staticmethod + async def _patch_filter_list(msg: Message, filter_list: FilterList, list_type: ListType, settings: dict) -> None: + """PATCH the new data of the filter list to the site API.""" + list_id = filter_list[list_type].id + response = await bot.instance.api_client.patch( + f'bot/filter/filter_lists/{list_id}', json=to_serializable(settings) + ) + log.info(f"Successfully patched the {filter_list[list_type].label} filterlist, reloading...") + filter_list.pop(list_type, None) + filter_list.add_list(response) + await msg.reply(f"✅ Edited filter list: {filter_list[list_type].label}") + + def _filter_match_query( + self, filter_: Filter, settings_query: dict, filter_settings_query: dict, differ_by_default: set[str] + ) -> bool: + """Return whether the given filter matches the query.""" + override_matches = set() + overrides, _ = filter_.overrides + for setting_name, setting_value in settings_query.items(): + if setting_name not in overrides: + continue + if repr_equals(overrides[setting_name], setting_value): + override_matches.add(setting_name) + else: # If an override doesn't match then the filter doesn't match. + return False + if not (differ_by_default <= override_matches): # The overrides didn't cover for the default mismatches. + return False + + filter_settings = filter_.extra_fields.dict() if filter_.extra_fields else {} + # If the dict changes then some fields were not the same. + return (filter_settings | filter_settings_query) == filter_settings + + def _search_filter_list( + self, atomic_list: AtomicList, filter_type: type[Filter] | None, settings: dict, filter_settings: dict + ) -> list[Filter]: + """Find all filters in the filter list which match the settings.""" + # If the default answers are known, only the overrides need to be checked for each filter. + all_defaults = atomic_list.defaults.dict() + match_by_default = set() + differ_by_default = set() + for setting_name, setting_value in settings.items(): + if repr_equals(all_defaults[setting_name], setting_value): + match_by_default.add(setting_name) + else: + differ_by_default.add(setting_name) + + result_filters = [] + for filter_ in atomic_list.filters.values(): + if filter_type and not isinstance(filter_, filter_type): + continue + if self._filter_match_query(filter_, settings, filter_settings, differ_by_default): + result_filters.append(filter_) + + return result_filters + + async def _search_filters( + self, message: Message, filter_type: type[Filter] | None, settings: dict, filter_settings: dict + ) -> None: + """Find all filters which match the settings and display them.""" + lines = [] + result_count = 0 + for filter_list in self.filter_lists.values(): + if filter_type and filter_type not in filter_list.filter_types: + continue + for atomic_list in filter_list.values(): + list_results = self._search_filter_list(atomic_list, filter_type, settings, filter_settings) + if list_results: + lines.append(f"**{atomic_list.label.title()}**") + lines.extend(map(str, list_results)) + lines.append("") + result_count += len(list_results) + + embed = Embed(colour=Colour.blue()) + embed.set_author(name=f"Search Results ({result_count} total)") + ctx = await bot.instance.get_context(message) + await LinePaginator.paginate(lines, ctx, embed, max_lines=15, empty=False, reply=True) + + async def _delete_offensive_msg(self, msg: Mapping[str, int]) -> None: + """Delete an offensive message, and then delete it from the DB.""" + try: + channel = self.bot.get_channel(msg['channel_id']) + if channel: + msg_obj = await channel.fetch_message(msg['id']) + await msg_obj.delete() + except discord.NotFound: + log.info( + f"Tried to delete message {msg['id']}, but the message can't be found " + f"(it has been probably already deleted)." + ) + except HTTPException as e: + log.warning(f"Failed to delete message {msg['id']}: status {e.status}") + + await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}') + log.info(f"Deleted the offensive message with id {msg['id']}.") + + def _schedule_msg_delete(self, msg: dict) -> None: + """Delete an offensive message once its deletion date is reached.""" + delete_at = arrow.get(msg['delete_date']).datetime + self.delete_scheduler.schedule_at(delete_at, msg['id'], self._delete_offensive_msg(msg)) + + async def _maybe_schedule_msg_delete(self, ctx: FilterContext, actions: ActionSettings | None) -> None: + """Post the message to the database and schedule it for deletion if it's not set to be deleted already.""" + msg = ctx.message + if not msg or not actions or actions.get_setting("remove_context", True): + return + + delete_date = (msg.created_at + OFFENSIVE_MSG_DELETE_TIME).isoformat() + data = { + 'id': msg.id, + 'channel_id': msg.channel.id, + 'delete_date': delete_date + } + + try: + await self.bot.api_client.post('bot/offensive-messages', json=data) + except ResponseCodeError as e: + if e.status == 400 and "already exists" in e.response_json.get("id", [""])[0]: + log.debug(f"Offensive message {msg.id} already exists.") + else: + log.error(f"Offensive message {msg.id} failed to post: {e}") + else: + self._schedule_msg_delete(data) + log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}") + + # endregion + # region: tasks + + @tasks.loop(time=datetime.time(hour=18)) + async def weekly_auto_infraction_report_task(self) -> None: + """Trigger an auto-infraction report to be sent if it is the desired day of the week (WEEKLY_REPORT_ISO_DAY).""" + if arrow.utcnow().isoweekday() != WEEKLY_REPORT_ISO_DAY: + return + + await self.send_weekly_auto_infraction_report() + + async def send_weekly_auto_infraction_report(self, channel: discord.TextChannel | discord.Thread = None) -> None: + """ + Send a list of auto-infractions added in the last 7 days to the specified channel. + + If `channel` is not specified, it is sent to #mod-meta. + """ + log.trace("Preparing weekly auto-infraction report.") + seven_days_ago = arrow.utcnow().shift(days=-7) + if not channel: + log.info("Auto-infraction report: the channel to report to is missing.") + channel = self.bot.get_channel(Channels.mod_meta) + elif not is_mod_channel(channel): + # Silently fail if output is going to be a non-mod channel. + log.info(f"Auto-infraction report: the channel {channel} is not a mod channel.") + return + + found_filters = defaultdict(list) + # Extract all auto-infraction filters added in the past 7 days from each filter type + for filter_list in self.filter_lists.values(): + for sublist in filter_list.values(): + default_infraction_type = sublist.default("infraction_type") + for filter_ in sublist.filters.values(): + if max(filter_.created_at, filter_.updated_at) < seven_days_ago: + continue + infraction_type = filter_.overrides[0].get("infraction_type") + if ( + (infraction_type and infraction_type != Infraction.NONE) + or (not infraction_type and default_infraction_type != Infraction.NONE) + ): + found_filters[sublist.label].append((filter_, infraction_type or default_infraction_type)) + + # Nicely format the output so each filter list type is grouped + lines = [f"**Auto-infraction filters added since {seven_days_ago.format('YYYY-MM-DD')}**"] + for list_label, filters in found_filters.items(): + lines.append("\n".join([f"**{list_label.title()}**"]+[f"{filter_} ({infr})" for filter_, infr in filters])) + + if len(lines) == 1: + lines.append("Nothing to show") + + await channel.send("\n\n".join(lines)) + log.info("Successfully sent auto-infraction report.") + + # endregion + + async def cog_unload(self) -> None: + """Cancel the weekly auto-infraction filter report and deletion scheduling on cog unload.""" + self.weekly_auto_infraction_report_task.cancel() + self.delete_scheduler.cancel_all() + + +async def setup(bot: Bot) -> None: + """Load the Filtering cog.""" + await bot.add_cog(Filtering(bot)) diff --git a/bot/exts/filters/antimalware.py b/bot/exts/filters/antimalware.py deleted file mode 100644 index ff39700a6..000000000 --- a/bot/exts/filters/antimalware.py +++ /dev/null @@ -1,106 +0,0 @@ -import typing as t -from os.path import splitext - -from discord import Embed, Message, NotFound -from discord.ext.commands import Cog - -from bot.bot import Bot -from bot.constants import Channels, Filter, URLs -from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME -from bot.log import get_logger - -log = get_logger(__name__) - -PY_EMBED_DESCRIPTION = ( - "It looks like you tried to attach a Python file - " - f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}" -) - -TXT_LIKE_FILES = {".txt", ".csv", ".json"} -TXT_EMBED_DESCRIPTION = ( - "You either uploaded a `{blocked_extension}` file or entered a message that was too long. " - f"Please use our [paste bin]({URLs.site_schema}{URLs.site_paste}) instead." -) - -DISALLOWED_EMBED_DESCRIPTION = ( - "It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). " - "We currently allow the following file types: **{joined_whitelist}**.\n\n" - "Feel free to ask in {meta_channel_mention} if you think this is a mistake." -) - - -class AntiMalware(Cog): - """Delete messages which contain attachments with non-whitelisted file extensions.""" - - def __init__(self, bot: Bot): - self.bot = bot - - def _get_whitelisted_file_formats(self) -> list: - """Get the file formats currently on the whitelist.""" - return self.bot.filter_list_cache['FILE_FORMAT.True'].keys() - - def _get_disallowed_extensions(self, message: Message) -> t.Iterable[str]: - """Get an iterable containing all the disallowed extensions of attachments.""" - file_extensions = {splitext(attachment.filename.lower())[1] for attachment in message.attachments} - extensions_blocked = file_extensions - set(self._get_whitelisted_file_formats()) - return extensions_blocked - - @Cog.listener() - async def on_message(self, message: Message) -> None: - """Identify messages with prohibited attachments.""" - # Return when message don't have attachment and don't moderate DMs - if not message.attachments or not message.guild: - return - - # Ignore webhook and bot messages - if message.webhook_id or message.author.bot: - return - - # Ignore code jam channels - if getattr(message.channel, "category", None) and message.channel.category.name == JAM_CATEGORY_NAME: - return - - # Check if user is staff, if is, return - # Since we only care that roles exist to iterate over, check for the attr rather than a User/Member instance - if hasattr(message.author, "roles") and any(role.id in Filter.role_whitelist for role in message.author.roles): - return - - embed = Embed() - extensions_blocked = self._get_disallowed_extensions(message) - blocked_extensions_str = ', '.join(extensions_blocked) - if ".py" in extensions_blocked: - # Short-circuit on *.py files to provide a pastebin link - embed.description = PY_EMBED_DESCRIPTION - elif extensions := TXT_LIKE_FILES.intersection(extensions_blocked): - # Work around Discord AutoConversion of messages longer than 2000 chars to .txt - cmd_channel = self.bot.get_channel(Channels.bot_commands) - embed.description = TXT_EMBED_DESCRIPTION.format( - blocked_extension=extensions.pop(), - cmd_channel_mention=cmd_channel.mention - ) - elif extensions_blocked: - meta_channel = self.bot.get_channel(Channels.meta) - embed.description = DISALLOWED_EMBED_DESCRIPTION.format( - joined_whitelist=', '.join(self._get_whitelisted_file_formats()), - blocked_extensions_str=blocked_extensions_str, - meta_channel_mention=meta_channel.mention, - ) - - if embed.description: - log.info( - f"User '{message.author}' ({message.author.id}) uploaded blacklisted file(s): {blocked_extensions_str}", - extra={"attachment_list": [attachment.filename for attachment in message.attachments]} - ) - - await message.channel.send(f"Hey {message.author.mention}!", embed=embed) - - # Delete the offending message: - try: - await message.delete() - except NotFound: - log.info(f"Tried to delete message `{message.id}`, but message could not be found.") - - -async def setup(bot: Bot) -> None: - """Load the AntiMalware cog.""" - await bot.add_cog(AntiMalware(bot)) diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py deleted file mode 100644 index d7783292d..000000000 --- a/bot/exts/filters/antispam.py +++ /dev/null @@ -1,324 +0,0 @@ -import asyncio -from collections import defaultdict -from collections.abc import Mapping -from dataclasses import dataclass, field -from datetime import timedelta -from itertools import takewhile -from operator import attrgetter, itemgetter -from typing import Dict, Iterable, List, Set - -import arrow -from discord import Colour, Member, Message, MessageType, NotFound, Object, TextChannel -from discord.ext.commands import Cog -from pydis_core.utils import scheduling - -from bot import rules -from bot.bot import Bot -from bot.constants import ( - AntiSpam as AntiSpamConfig, Channels, Colours, DEBUG_MODE, Event, Filter, Guild as GuildConfig, Icons -) -from bot.converters import Duration -from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME -from bot.exts.moderation.modlog import ModLog -from bot.log import get_logger -from bot.utils import lock -from bot.utils.message_cache import MessageCache -from bot.utils.messages import format_user, send_attachments - -log = get_logger(__name__) - -RULE_FUNCTION_MAPPING = { - 'attachments': rules.apply_attachments, - 'burst': rules.apply_burst, - # burst shared is temporarily disabled due to a bug - # 'burst_shared': rules.apply_burst_shared, - 'chars': rules.apply_chars, - 'discord_emojis': rules.apply_discord_emojis, - 'duplicates': rules.apply_duplicates, - 'links': rules.apply_links, - 'mentions': rules.apply_mentions, - 'newlines': rules.apply_newlines, - 'role_mentions': rules.apply_role_mentions, -} - - -@dataclass -class DeletionContext: - """Represents a Deletion Context for a single spam event.""" - - members: frozenset[Member] - triggered_in: TextChannel - channels: set[TextChannel] = field(default_factory=set) - rules: Set[str] = field(default_factory=set) - messages: Dict[int, Message] = field(default_factory=dict) - attachments: List[List[str]] = field(default_factory=list) - - async def add(self, rule_name: str, channels: Iterable[TextChannel], messages: Iterable[Message]) -> None: - """Adds new rule violation events to the deletion context.""" - self.rules.add(rule_name) - - self.channels.update(channels) - - for message in messages: - if message.id not in self.messages: - self.messages[message.id] = message - - # Re-upload attachments - destination = message.guild.get_channel(Channels.attachment_log) - urls = await send_attachments(message, destination, link_large=False) - self.attachments.append(urls) - - async def upload_messages(self, actor_id: int, modlog: ModLog) -> None: - """Method that takes care of uploading the queue and posting modlog alert.""" - triggered_by_users = ", ".join(format_user(m) for m in self.members) - triggered_in_channel = f"**Triggered in:** {self.triggered_in.mention}\n" if len(self.channels) > 1 else "" - channels_description = ", ".join(channel.mention for channel in self.channels) - - mod_alert_message = ( - f"**Triggered by:** {triggered_by_users}\n" - f"{triggered_in_channel}" - f"**Channels:** {channels_description}\n" - f"**Rules:** {', '.join(rule for rule in self.rules)}\n" - ) - - messages_as_list = list(self.messages.values()) - first_message = messages_as_list[0] - # For multiple messages and those with attachments or excessive newlines, use the logs API - if any(( - len(messages_as_list) > 1, - len(first_message.attachments) > 0, - first_message.content.count('\n') > 15 - )): - url = await modlog.upload_log(self.messages.values(), actor_id, self.attachments) - mod_alert_message += f"A complete log of the offending messages can be found [here]({url})" - else: - mod_alert_message += "Message:\n" - content = first_message.clean_content - remaining_chars = 4080 - len(mod_alert_message) - - if len(content) > remaining_chars: - url = await modlog.upload_log([first_message], actor_id, self.attachments) - log_site_msg = f"The full message can be found [here]({url})" - content = content[:remaining_chars - (3 + len(log_site_msg))] + "..." - - mod_alert_message += content - - await modlog.send_log_message( - content=", ".join(str(m.id) for m in self.members), # quality-of-life improvement for mobile moderators - icon_url=Icons.filtering, - colour=Colour(Colours.soft_red), - title="Spam detected!", - text=mod_alert_message, - thumbnail=first_message.author.display_avatar.url, - channel_id=Channels.mod_alerts, - ping_everyone=AntiSpamConfig.ping_everyone - ) - - -class AntiSpam(Cog): - """Cog that controls our anti-spam measures.""" - - def __init__(self, bot: Bot, validation_errors: Dict[str, str]) -> None: - self.bot = bot - self.validation_errors = validation_errors - role_id = AntiSpamConfig.punishment['role_id'] - self.muted_role = Object(role_id) - self.expiration_date_converter = Duration() - - self.message_deletion_queue = dict() - - # Fetch the rule configuration with the highest rule interval. - max_interval_config = max( - AntiSpamConfig.rules.values(), - key=itemgetter('interval') - ) - self.max_interval = max_interval_config['interval'] - self.cache = MessageCache(AntiSpamConfig.cache_size, newest_first=True) - - @property - def mod_log(self) -> ModLog: - """Allows for easy access of the ModLog cog.""" - return self.bot.get_cog("ModLog") - - async def cog_load(self) -> None: - """Unloads the cog and alerts admins if configuration validation failed.""" - await self.bot.wait_until_guild_available() - if self.validation_errors: - body = "**The following errors were encountered:**\n" - body += "\n".join(f"- {error}" for error in self.validation_errors.values()) - body += "\n\n**The cog has been unloaded.**" - - await self.mod_log.send_log_message( - title="Error: AntiSpam configuration validation failed!", - text=body, - ping_everyone=True, - icon_url=Icons.token_removed, - colour=Colour.red() - ) - - await self.bot.remove_cog(self.__class__.__name__) - return - - @Cog.listener() - async def on_message(self, message: Message) -> None: - """Applies the antispam rules to each received message.""" - if ( - not message.guild - or message.guild.id != GuildConfig.id - or message.author.bot - or (getattr(message.channel, "category", None) and message.channel.category.name == JAM_CATEGORY_NAME) - or (message.channel.id in Filter.channel_whitelist and not DEBUG_MODE) - or (any(role.id in Filter.role_whitelist for role in message.author.roles) and not DEBUG_MODE) - or message.type == MessageType.auto_moderation_action - ): - return - - self.cache.append(message) - - earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.max_interval) - relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, self.cache)) - - for rule_name in AntiSpamConfig.rules: - rule_config = AntiSpamConfig.rules[rule_name] - rule_function = RULE_FUNCTION_MAPPING[rule_name] - - # Create a list of messages that were sent in the interval that the rule cares about. - latest_interesting_stamp = arrow.utcnow() - timedelta(seconds=rule_config['interval']) - messages_for_rule = list( - takewhile(lambda msg: msg.created_at > latest_interesting_stamp, relevant_messages) # noqa: B023 - ) - - result = await rule_function(message, messages_for_rule, rule_config) - - # If the rule returns `None`, that means the message didn't violate it. - # If it doesn't, it returns a tuple in the form `(str, Iterable[discord.Member])` - # which contains the reason for why the message violated the rule and - # an iterable of all members that violated the rule. - if result is not None: - self.bot.stats.incr(f"mod_alerts.{rule_name}") - reason, members, relevant_messages = result - full_reason = f"`{rule_name}` rule: {reason}" - - # If there's no spam event going on for this channel, start a new Message Deletion Context - authors_set = frozenset(members) - if authors_set not in self.message_deletion_queue: - log.trace(f"Creating queue for members `{authors_set}`") - self.message_deletion_queue[authors_set] = DeletionContext(authors_set, message.channel) - scheduling.create_task( - self._process_deletion_context(authors_set), - name=f"AntiSpam._process_deletion_context({authors_set})" - ) - - # Add the relevant of this trigger to the Deletion Context - await self.message_deletion_queue[authors_set].add( - rule_name=rule_name, - channels=set(message.channel for message in relevant_messages), - messages=relevant_messages - ) - - for member in members: - scheduling.create_task( - self.punish(message, member, full_reason), - name=f"AntiSpam.punish(message={message.id}, member={member.id}, rule={rule_name})" - ) - - await self.maybe_delete_messages(relevant_messages) - break - - @lock.lock_arg("antispam.punish", "member", attrgetter("id")) - async def punish(self, msg: Message, member: Member, reason: str) -> None: - """Punishes the given member for triggering an antispam rule.""" - if not any(role.id == self.muted_role.id for role in member.roles): - remove_role_after = AntiSpamConfig.punishment['remove_after'] - - # Get context and make sure the bot becomes the actor of infraction by patching the `author` attributes - context = await self.bot.get_context(msg) - context.author = self.bot.user - - # Since we're going to invoke the tempmute command directly, we need to manually call the converter. - dt_remove_role_after = await self.expiration_date_converter.convert(context, f"{remove_role_after}S") - await context.invoke( - self.bot.get_command('tempmute'), - member, - dt_remove_role_after, - reason=reason - ) - - async def maybe_delete_messages(self, messages: List[Message]) -> None: - """Cleans the messages if cleaning is configured.""" - if AntiSpamConfig.clean_offending: - # If we have more than one message, we can use bulk delete. - if len(messages) > 1: - message_ids = [message.id for message in messages] - self.mod_log.ignore(Event.message_delete, *message_ids) - channel_messages = defaultdict(list) - for message in messages: - channel_messages[message.channel].append(message) - for channel, messages in channel_messages.items(): - try: - await channel.delete_messages(messages) - except NotFound: - # In the rare case where we found messages matching the - # spam filter across multiple channels, it is possible - # that a single channel will only contain a single message - # to delete. If that should be the case, discord.py will - # use the "delete single message" endpoint instead of the - # bulk delete endpoint, and the single message deletion - # endpoint will complain if you give it that does not exist. - # As this means that we have no other message to delete in - # this channel (and message deletes work per-channel), - # we can just log an exception and carry on with business. - log.info(f"Tried to delete message `{messages[0].id}`, but message could not be found.") - - # Otherwise, the bulk delete endpoint will throw up. - # Delete the message directly instead. - else: - self.mod_log.ignore(Event.message_delete, messages[0].id) - try: - await messages[0].delete() - except NotFound: - log.info(f"Tried to delete message `{messages[0].id}`, but message could not be found.") - - async def _process_deletion_context(self, context_id: frozenset) -> None: - """Processes the Deletion Context queue.""" - log.trace("Sleeping before processing message deletion queue.") - await asyncio.sleep(10) - - if context_id not in self.message_deletion_queue: - log.error(f"Started processing deletion queue for context `{context_id}`, but it was not found!") - return - - deletion_context = self.message_deletion_queue.pop(context_id) - await deletion_context.upload_messages(self.bot.user.id, self.mod_log) - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message) -> None: - """Updates the message in the cache, if it's cached.""" - self.cache.update(after) - - -def validate_config(rules_: Mapping = AntiSpamConfig.rules) -> Dict[str, str]: - """Validates the antispam configs.""" - validation_errors = {} - for name, config in rules_.items(): - if name not in RULE_FUNCTION_MAPPING: - log.error( - f"Unrecognized antispam rule `{name}`. " - f"Valid rules are: {', '.join(RULE_FUNCTION_MAPPING)}" - ) - validation_errors[name] = f"`{name}` is not recognized as an antispam rule." - continue - for required_key in ('interval', 'max'): - if required_key not in config: - log.error( - f"`{required_key}` is required but was not " - f"set in rule `{name}`'s configuration." - ) - validation_errors[name] = f"Key `{required_key}` is required but not set for rule `{name}`" - return validation_errors - - -async def setup(bot: Bot) -> None: - """Validate the AntiSpam configs and load the AntiSpam cog.""" - validation_errors = validate_config() - await bot.add_cog(AntiSpam(bot, validation_errors)) diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py deleted file mode 100644 index 538744204..000000000 --- a/bot/exts/filters/filter_lists.py +++ /dev/null @@ -1,359 +0,0 @@ -import datetime -import re -from collections import defaultdict -from typing import Optional - -import arrow -import discord -from discord.ext import tasks -from discord.ext.commands import BadArgument, Cog, Context, IDConverter, command, group, has_any_role -from pydis_core.site_api import ResponseCodeError - -from bot import constants -from bot.bot import Bot -from bot.constants import Channels, Colours -from bot.converters import ValidDiscordServerInvite, ValidFilterListType -from bot.log import get_logger -from bot.pagination import LinePaginator -from bot.utils.channel import is_mod_channel - -log = get_logger(__name__) -WEEKLY_REPORT_ISO_DAY = 3 # 1=Monday, 7=Sunday - - -class FilterLists(Cog): - """Commands for blacklisting and whitelisting things.""" - - methods_with_filterlist_types = [ - "allow_add", - "allow_delete", - "allow_get", - "deny_add", - "deny_delete", - "deny_get", - ] - - def __init__(self, bot: Bot) -> None: - self.bot = bot - - async def cog_load(self) -> None: - """Add the valid FilterList types to the docstrings, so they'll appear in !help invocations.""" - await self.bot.wait_until_guild_available() - self.weekly_autoban_report_task.start() - - # Add valid filterlist types to the docstrings - valid_types = await ValidFilterListType.get_valid_types(self.bot) - valid_types = [f"`{type_.lower()}`" for type_ in valid_types] - - for method_name in self.methods_with_filterlist_types: - command = getattr(self, method_name) - command.help = ( - f"{command.help}\n\nValid **list_type** values are {', '.join(valid_types)}." - ) - - async def _add_data( - self, - ctx: Context, - allowed: bool, - list_type: ValidFilterListType, - content: str, - comment: Optional[str] = None, - ) -> None: - """Add an item to a filterlist.""" - allow_type = "whitelist" if allowed else "blacklist" - - # If this is a guild invite, we gotta validate it. - if list_type == "GUILD_INVITE": - guild_data = await self._validate_guild_invite(ctx, content) - content = guild_data.get("id") - - # Some guild invites are autoban filters, which require the mod - # to set a comment which includes [autoban]. - # Having the guild name in the comment is still useful when reviewing - # filter list, so prepend it to the set comment in case some mod forgets. - guild_name_part = f'Guild "{guild_data["name"]}"' if "name" in guild_data else None - - comment = " - ".join( - comment_part - for comment_part in (guild_name_part, comment) - if comment_part - ) - - # If it's a file format, let's make sure it has a leading dot. - elif list_type == "FILE_FORMAT" and not content.startswith("."): - content = f".{content}" - - # If it's a filter token, validate the passed regex - elif list_type == "FILTER_TOKEN": - try: - re.compile(content) - except re.error as e: - await ctx.message.add_reaction("❌") - await ctx.send( - f"{ctx.author.mention} that's not a valid regex! " - f"Regex error message: {e.msg}." - ) - return - - # Try to add the item to the database - log.trace(f"Trying to add the {content} item to the {list_type} {allow_type}") - payload = { - "allowed": allowed, - "type": list_type, - "content": content, - "comment": comment, - } - - try: - item = await self.bot.api_client.post( - "bot/filter-lists", - json=payload - ) - except ResponseCodeError as e: - if e.status == 400: - await ctx.message.add_reaction("❌") - log.debug( - f"{ctx.author} tried to add data to a {allow_type}, but the API returned 400, " - "probably because the request violated the UniqueConstraint." - ) - raise BadArgument( - f"Unable to add the item to the {allow_type}. " - "The item probably already exists. Keep in mind that a " - "blacklist and a whitelist for the same item cannot co-exist, " - "and we do not permit any duplicates." - ) - raise - - # If it is an autoban trigger we send a warning in #filter-log - if comment and "[autoban]" in comment: - await self.bot.get_channel(Channels.filter_log).send( - f":warning: Heads-up! The new `{list_type}` filter " - f"`{content}` (`{comment}`) will automatically ban users." - ) - - # Insert the item into the cache - self.bot.insert_item_into_filter_list_cache(item) - await ctx.message.add_reaction("✅") - - async def _delete_data(self, ctx: Context, allowed: bool, list_type: ValidFilterListType, content: str) -> None: - """Remove an item from a filterlist.""" - allow_type = "whitelist" if allowed else "blacklist" - - # If this is a server invite, we need to convert it. - if list_type == "GUILD_INVITE" and not IDConverter()._get_id_match(content): - guild_data = await self._validate_guild_invite(ctx, content) - content = guild_data.get("id") - - # If it's a file format, let's make sure it has a leading dot. - elif list_type == "FILE_FORMAT" and not content.startswith("."): - content = f".{content}" - - # Find the content and delete it. - log.trace(f"Trying to delete the {content} item from the {list_type} {allow_type}") - item = self.bot.filter_list_cache[f"{list_type}.{allowed}"].get(content) - - if item is not None: - try: - await self.bot.api_client.delete( - f"bot/filter-lists/{item['id']}" - ) - del self.bot.filter_list_cache[f"{list_type}.{allowed}"][content] - await ctx.message.add_reaction("✅") - except ResponseCodeError as e: - log.debug( - f"{ctx.author} tried to delete an item with the id {item['id']}, but " - f"the API raised an unexpected error: {e}" - ) - await ctx.message.add_reaction("❌") - else: - await ctx.message.add_reaction("❌") - - async def _list_all_data(self, ctx: Context, allowed: bool, list_type: ValidFilterListType) -> None: - """Paginate and display all items in a filterlist.""" - allow_type = "whitelist" if allowed else "blacklist" - result = self.bot.filter_list_cache[f"{list_type}.{allowed}"] - - # Build a list of lines we want to show in the paginator - lines = [] - for content, metadata in result.items(): - line = f"• `{content}`" - - if comment := metadata.get("comment"): - line += f" - {comment}" - - lines.append(line) - lines = sorted(lines) - - # Build the embed - list_type_plural = list_type.lower().replace("_", " ").title() + "s" - embed = discord.Embed( - title=f"{allow_type.title()}ed {list_type_plural} ({len(result)} total)", - colour=Colours.blue - ) - log.trace(f"Trying to list {len(result)} items from the {list_type.lower()} {allow_type}") - - if result: - await LinePaginator.paginate(lines, ctx, embed, max_lines=15, empty=False) - else: - embed.description = "Hmmm, seems like there's nothing here yet." - await ctx.send(embed=embed) - await ctx.message.add_reaction("❌") - - async def _sync_data(self, ctx: Context) -> None: - """Syncs the filterlists with the API.""" - try: - log.trace("Attempting to sync FilterList cache with data from the API.") - await self.bot.cache_filter_list_data() - await ctx.message.add_reaction("✅") - except ResponseCodeError as e: - log.debug( - f"{ctx.author} tried to sync FilterList cache data but " - f"the API raised an unexpected error: {e}" - ) - await ctx.message.add_reaction("❌") - - @staticmethod - async def _validate_guild_invite(ctx: Context, invite: str) -> dict: - """ - Validates a guild invite, and returns the guild info as a dict. - - Will raise a BadArgument if the guild invite is invalid. - """ - log.trace(f"Attempting to validate whether or not {invite} is a guild invite.") - validator = ValidDiscordServerInvite() - guild_data = await validator.convert(ctx, invite) - - # If we make it this far without raising a BadArgument, the invite is - # valid. Let's return a dict of guild information. - log.trace(f"{invite} validated as server invite. Converting to ID.") - return guild_data - - @group(aliases=("allowlist", "allow", "al", "wl")) - async def whitelist(self, ctx: Context) -> None: - """Group for whitelisting commands.""" - if not ctx.invoked_subcommand: - await ctx.send_help(ctx.command) - - @group(aliases=("denylist", "deny", "bl", "dl")) - async def blacklist(self, ctx: Context) -> None: - """Group for blacklisting commands.""" - if not ctx.invoked_subcommand: - await ctx.send_help(ctx.command) - - @whitelist.command(name="add", aliases=("a", "set")) - async def allow_add( - self, - ctx: Context, - list_type: ValidFilterListType, - content: str, - *, - comment: Optional[str] = None, - ) -> None: - """Add an item to the specified allowlist.""" - await self._add_data(ctx, True, list_type, content, comment) - - @blacklist.command(name="add", aliases=("a", "set")) - async def deny_add( - self, - ctx: Context, - list_type: ValidFilterListType, - content: str, - *, - comment: Optional[str] = None, - ) -> None: - """Add an item to the specified denylist.""" - await self._add_data(ctx, False, list_type, content, comment) - - @whitelist.command(name="remove", aliases=("delete", "rm",)) - async def allow_delete(self, ctx: Context, list_type: ValidFilterListType, content: str) -> None: - """Remove an item from the specified allowlist.""" - await self._delete_data(ctx, True, list_type, content) - - @blacklist.command(name="remove", aliases=("delete", "rm",)) - async def deny_delete(self, ctx: Context, list_type: ValidFilterListType, content: str) -> None: - """Remove an item from the specified denylist.""" - await self._delete_data(ctx, False, list_type, content) - - @whitelist.command(name="get", aliases=("list", "ls", "fetch", "show")) - async def allow_get(self, ctx: Context, list_type: ValidFilterListType) -> None: - """Get the contents of a specified allowlist.""" - await self._list_all_data(ctx, True, list_type) - - @blacklist.command(name="get", aliases=("list", "ls", "fetch", "show")) - async def deny_get(self, ctx: Context, list_type: ValidFilterListType) -> None: - """Get the contents of a specified denylist.""" - await self._list_all_data(ctx, False, list_type) - - @whitelist.command(name="sync", aliases=("s",)) - async def allow_sync(self, ctx: Context) -> None: - """Syncs both allowlists and denylists with the API.""" - await self._sync_data(ctx) - - @blacklist.command(name="sync", aliases=("s",)) - async def deny_sync(self, ctx: Context) -> None: - """Syncs both allowlists and denylists with the API.""" - await self._sync_data(ctx) - - @command(name="filter_report") - async def force_send_weekly_report(self, ctx: Context) -> None: - """Respond with a list of autobans added in the last 7 days.""" - await self.send_weekly_autoban_report(ctx.channel) - - @tasks.loop(time=datetime.time(hour=18)) - async def weekly_autoban_report_task(self) -> None: - """Trigger autoban report to be sent if it is the desired day of the week (WEEKLY_REPORT_ISO_DAY).""" - if arrow.utcnow().isoweekday() != WEEKLY_REPORT_ISO_DAY: - return - - await self.send_weekly_autoban_report() - - async def send_weekly_autoban_report(self, channel: discord.abc.Messageable = None) -> None: - """ - Send a list of autobans added in the last 7 days to the specified channel. - - If chanel is not specified, it is sent to #mod-meta. - """ - seven_days_ago = arrow.utcnow().shift(days=-7) - if not channel: - channel = self.bot.get_channel(Channels.mod_meta) - elif not is_mod_channel(channel): - # Silently fail if output is going to be a non-mod channel. - return - - added_autobans = defaultdict(list) - # Extract all autoban filters added in the past 7 days from each filter type - for filter_list, filters in self.bot.filter_list_cache.items(): - filter_type, allow = filter_list.split(".") - allow_type = "Allow list" if allow.lower() == "true" else "Deny list" - - for filter_content, filter_details in filters.items(): - created_at = arrow.get(filter_details["created_at"]) - updated_at = arrow.get(filter_details["updated_at"]) - # Default to empty string so that the in check below doesn't error on None type - comment = filter_details["comment"] or "" - if max(created_at, updated_at) > seven_days_ago and "[autoban]" in comment: - line = f"`{filter_content}`: {comment}" - added_autobans[f"**{filter_type} {allow_type}**"].append(line) - - # Nicely format the output so each filter list type is grouped - lines = [f"**Autoban filters added since {seven_days_ago.format('YYYY-MM-DD')}**"] - for filter_list, recently_added_autobans in added_autobans.items(): - lines.append("\n".join([filter_list]+recently_added_autobans)) - - if len(lines) == 1: - lines.append("Nothing to show") - - await channel.send("\n\n".join(lines)) - - async def cog_check(self, ctx: Context) -> bool: - """Only allow moderators to invoke the commands in this cog.""" - return await has_any_role(*constants.MODERATION_ROLES).predicate(ctx) - - async def cog_unload(self) -> None: - """Cancel the weekly autoban filter report on cog unload.""" - self.weekly_autoban_report_task.cancel() - - -async def setup(bot: Bot) -> None: - """Load the FilterLists cog.""" - await bot.add_cog(FilterLists(bot)) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py deleted file mode 100644 index 23a6f2d92..000000000 --- a/bot/exts/filters/filtering.py +++ /dev/null @@ -1,743 +0,0 @@ -import asyncio -import re -import unicodedata -import urllib.parse -from datetime import timedelta -from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union - -import arrow -import dateutil.parser -import regex -import tldextract -from async_rediscache import RedisCache -from dateutil.relativedelta import relativedelta -from discord import ChannelType, Colour, Embed, Forbidden, HTTPException, Member, Message, NotFound, TextChannel -from discord.ext.commands import Cog -from discord.utils import escape_markdown -from pydis_core.site_api import ResponseCodeError -from pydis_core.utils import scheduling -from pydis_core.utils.regex import DISCORD_INVITE - -from bot.bot import Bot -from bot.constants import Bot as BotConfig, Channels, Colours, Filter, Guild, Icons, URLs -from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME -from bot.exts.moderation.modlog import ModLog -from bot.log import get_logger -from bot.utils.helpers import remove_subdomain_from_url -from bot.utils.messages import format_user - -log = get_logger(__name__) - - -# Regular expressions -CODE_BLOCK_RE = re.compile( - r"(?P<delim>``?)[^`]+?(?P=delim)(?!`+)" # Inline codeblock - r"|```(.+?)```", # Multiline codeblock - re.DOTALL | re.MULTILINE -) -EVERYONE_PING_RE = re.compile(rf"@everyone|<@&{Guild.id}>|@here") -SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL) -URL_RE = re.compile(r"(https?://[^\s]+)", flags=re.IGNORECASE) - -# Exclude variation selectors from zalgo because they're actually invisible. -VARIATION_SELECTORS = r"\uFE00-\uFE0F\U000E0100-\U000E01EF" -INVISIBLE_RE = regex.compile(rf"[{VARIATION_SELECTORS}\p{{UNASSIGNED}}\p{{FORMAT}}\p{{CONTROL}}--\s]", regex.V1) -ZALGO_RE = regex.compile(rf"[\p{{NONSPACING MARK}}\p{{ENCLOSING MARK}}--[{VARIATION_SELECTORS}]]", regex.V1) - -# Other constants. -DAYS_BETWEEN_ALERTS = 3 -OFFENSIVE_MSG_DELETE_TIME = timedelta(days=Filter.offensive_msg_delete_days) - -# Autoban -LINK_PASSWORD = "https://support.discord.com/hc/en-us/articles/218410947-I-forgot-my-Password-Where-can-I-set-a-new-one" -LINK_2FA = "https://support.discord.com/hc/en-us/articles/219576828-Setting-up-Two-Factor-Authentication" -AUTO_BAN_REASON = ( - "Your account has been used to send links to a phishing website. You have been automatically banned. " - "If you are not aware of sending them, that means your account has been compromised.\n\n" - - f"Here is a guide from Discord on [how to change your password]({LINK_PASSWORD}).\n\n" - - f"We also highly recommend that you [enable 2 factor authentication on your account]({LINK_2FA}), " - "for heightened security.\n\n" - - "Once you have changed your password, feel free to follow the instructions at the bottom of " - "this message to appeal your ban." -) -AUTO_BAN_DURATION = timedelta(days=4) - -FilterMatch = Union[re.Match, dict, bool, List[Embed]] - - -class Stats(NamedTuple): - """Additional stats on a triggered filter to append to a mod log.""" - - message_content: str - additional_embeds: Optional[List[Embed]] - - -class Filtering(Cog): - """Filtering out invites, blacklisting domains, and warning us of certain regular expressions.""" - - # Redis cache mapping a user ID to the last timestamp a bad nickname alert was sent - name_alerts = RedisCache() - - def __init__(self, bot: Bot): - self.bot = bot - self.scheduler = scheduling.Scheduler(self.__class__.__name__) - self.name_lock = asyncio.Lock() - - staff_mistake_str = "If you believe this was a mistake, please let staff know!" - self.filters = { - "filter_zalgo": { - "enabled": Filter.filter_zalgo, - "function": self._has_zalgo, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_zalgo, - "notification_msg": ( - "Your post has been removed for abusing Unicode character rendering (aka Zalgo text). " - f"{staff_mistake_str}" - ), - "schedule_deletion": False - }, - "filter_invites": { - "enabled": Filter.filter_invites, - "function": self._has_invites, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_invites, - "notification_msg": ( - f"Per Rule 6, your invite link has been removed. {staff_mistake_str}\n\n" - r"Our server rules can be found here: <https://pythondiscord.com/pages/rules>" - ), - "schedule_deletion": False - }, - "filter_domains": { - "enabled": Filter.filter_domains, - "function": self._has_urls, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_domains, - "notification_msg": ( - f"Your URL has been removed because it matched a blacklisted domain. {staff_mistake_str}" - ), - "schedule_deletion": False - }, - "watch_regex": { - "enabled": Filter.watch_regex, - "function": self._has_watch_regex_match, - "type": "watchlist", - "content_only": True, - "schedule_deletion": True - }, - "watch_rich_embeds": { - "enabled": Filter.watch_rich_embeds, - "function": self._has_rich_embed, - "type": "watchlist", - "content_only": False, - "schedule_deletion": False - }, - "filter_everyone_ping": { - "enabled": Filter.filter_everyone_ping, - "function": self._has_everyone_ping, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_everyone_ping, - "notification_msg": ( - "Please don't try to ping `@everyone` or `@here`. " - f"Your message has been removed. {staff_mistake_str}" - ), - "schedule_deletion": False, - "ping_everyone": False - }, - } - - async def cog_unload(self) -> None: - """Cancel scheduled tasks.""" - self.scheduler.cancel_all() - - def _get_filterlist_items(self, list_type: str, *, allowed: bool) -> list: - """Fetch items from the filter_list_cache.""" - return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"].keys() - - def _get_filterlist_value(self, list_type: str, value: Any, *, allowed: bool) -> dict: - """Fetch one specific value from filter_list_cache.""" - return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"][value] - - @staticmethod - def _expand_spoilers(text: str) -> str: - """Return a string containing all interpretations of a spoilered message.""" - split_text = SPOILER_RE.split(text) - return ''.join( - split_text[0::2] + split_text[1::2] + split_text - ) - - @property - def mod_log(self) -> ModLog: - """Get currently loaded ModLog cog instance.""" - return self.bot.get_cog("ModLog") - - @Cog.listener() - async def on_message(self, msg: Message) -> None: - """Invoke message filter for new messages.""" - await self._filter_message(msg) - - # Ignore webhook messages. - if msg.webhook_id is None: - await self.check_bad_words_in_name(msg.author) - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message) -> None: - """ - Invoke message filter for message edits. - - Also calculates the time delta from the previous edit or when message was sent if there's no prior edits. - """ - # We only care about changes to the message contents/attachments and embed additions, not pin status etc. - if all(( - before.content == after.content, # content hasn't changed - before.attachments == after.attachments, # attachments haven't changed - len(before.embeds) >= len(after.embeds) # embeds haven't been added - )): - return - - if not before.edited_at: - delta = relativedelta(after.edited_at, before.created_at).microseconds - else: - delta = relativedelta(after.edited_at, before.edited_at).microseconds - await self._filter_message(after, delta) - - @Cog.listener() - async def on_voice_state_update(self, member: Member, *_) -> None: - """Checks for bad words in usernames when users join, switch or leave a voice channel.""" - await self.check_bad_words_in_name(member) - - def get_name_match(self, name: str) -> Optional[re.Match]: - """Check bad words from passed string (name). Return the first match found.""" - normalised_name = unicodedata.normalize("NFKC", name) - cleaned_normalised_name = "".join([c for c in normalised_name if not unicodedata.combining(c)]) - - # Run filters against normalised, cleaned normalised and the original name, - # in case we have filters for one but not the other. - names_to_check = (name, normalised_name, cleaned_normalised_name) - - watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) - for pattern in watchlist_patterns: - for name in names_to_check: - if match := re.search(pattern, name, flags=re.IGNORECASE): - return match - return None - - async def check_send_alert(self, member: Member) -> bool: - """When there is less than 3 days after last alert, return `False`, otherwise `True`.""" - if last_alert := await self.name_alerts.get(member.id): - last_alert = arrow.get(last_alert) - if arrow.utcnow() - timedelta(days=DAYS_BETWEEN_ALERTS) < last_alert: - log.trace(f"Last alert was too recent for {member}'s nickname.") - return False - - return True - - async def check_bad_words_in_name(self, member: Member) -> None: - """Send a mod alert every 3 days if a username still matches a watchlist pattern.""" - # Use lock to avoid race conditions - async with self.name_lock: - # Check if we recently alerted about this user first, - # to avoid running all the filter tokens against their name again. - if not await self.check_send_alert(member): - return - - # Check whether the users display name contains any words in our blacklist - match = self.get_name_match(member.display_name) - if not match: - return - - log.info(f"Sending bad nickname alert for '{member.display_name}' ({member.id}).") - - log_string = ( - f"**User:** {format_user(member)}\n" - f"**Display Name:** {escape_markdown(member.display_name)}\n" - f"**Bad Match:** {match.group()}" - ) - - await self.mod_log.send_log_message( - content=str(member.id), # quality-of-life improvement for mobile moderators - icon_url=Icons.token_removed, - colour=Colours.soft_red, - title="Username filtering alert", - text=log_string, - channel_id=Channels.mod_alerts, - thumbnail=member.display_avatar.url, - ping_everyone=True - ) - - # Update time when alert sent - await self.name_alerts.set(member.id, arrow.utcnow().timestamp()) - - async def filter_snekbox_output(self, result: str, msg: Message) -> bool: - """ - Filter the result of a snekbox command to see if it violates any of our rules, and then respond accordingly. - - Also requires the original message, to check whether to filter and for mod logs. - Returns whether a filter was triggered or not. - """ - filter_triggered = False - # Should we filter this message? - if self._check_filter(msg): - for filter_name, _filter in self.filters.items(): - # Is this specific filter enabled in the config? - # We also do not need to worry about filters that take the full message, - # since all we have is an arbitrary string. - if _filter["enabled"] and _filter["content_only"]: - filter_result = await _filter["function"](result) - reason = None - - if isinstance(filter_result, tuple): - match, reason = filter_result - else: - match = filter_result - - if match: - # If this is a filter (not a watchlist), we set the variable so we know - # that it has been triggered - if _filter["type"] == "filter": - filter_triggered = True - - stats = self._add_stats(filter_name, match, result) - await self._send_log(filter_name, _filter, msg, stats, reason, is_eval=True) - - break # We don't want multiple filters to trigger - - return filter_triggered - - async def _filter_message(self, msg: Message, delta: Optional[int] = None) -> None: - """Filter the input message to see if it violates any of our rules, and then respond accordingly.""" - # Should we filter this message? - if self._check_filter(msg): - for filter_name, _filter in self.filters.items(): - # Is this specific filter enabled in the config? - if _filter["enabled"]: - # Double trigger check for the embeds filter - if filter_name == "watch_rich_embeds": - # If the edit delta is less than 0.001 seconds, then we're probably dealing - # with a double filter trigger. - if delta is not None and delta < 100: - continue - - if filter_name in ("filter_invites", "filter_everyone_ping"): - # Disable invites filter in codejam team channels - category = getattr(msg.channel, "category", None) - if category and category.name == JAM_CATEGORY_NAME: - continue - - # Does the filter only need the message content or the full message? - if _filter["content_only"]: - payload = msg.content - else: - payload = msg - - result = await _filter["function"](payload) - reason = None - - if isinstance(result, tuple): - match, reason = result - else: - match = result - - if match: - is_private = msg.channel.type is ChannelType.private - - # If this is a filter (not a watchlist) and not in a DM, delete the message. - if _filter["type"] == "filter" and not is_private: - try: - # Embeds (can?) trigger both the `on_message` and `on_message_edit` - # event handlers, triggering filtering twice for the same message. - # - # If `on_message`-triggered filtering already deleted the message - # then `on_message_edit`-triggered filtering will raise exception - # since the message no longer exists. - # - # In addition, to avoid sending two notifications to the user, the - # logs, and mod_alert, we return if the message no longer exists. - await msg.delete() - except NotFound: - return - - # Notify the user if the filter specifies - if _filter["user_notification"]: - await self.notify_member(msg.author, _filter["notification_msg"], msg.channel) - - # If the message is classed as offensive, we store it in the site db and - # it will be deleted after one week. - if _filter["schedule_deletion"] and not is_private: - delete_date = (msg.created_at + OFFENSIVE_MSG_DELETE_TIME).isoformat() - data = { - 'id': msg.id, - 'channel_id': msg.channel.id, - 'delete_date': delete_date - } - - try: - await self.bot.api_client.post('bot/offensive-messages', json=data) - except ResponseCodeError as e: - if e.status == 400 and "already exists" in e.response_json.get("id", [""])[0]: - log.debug(f"Offensive message {msg.id} already exists.") - else: - log.error(f"Offensive message {msg.id} failed to post: {e}") - else: - self.schedule_msg_delete(data) - log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}") - - stats = self._add_stats(filter_name, match, msg.content) - - # If the filter reason contains `[autoban]`, we want to auto-ban the user. - # Also pass this to _send_log so mods are not pinged filter matches that are auto-actioned - autoban = reason and "[autoban]" in reason.lower() - if not autoban and filter_name == "filter_invites" and isinstance(result, dict): - autoban = any( - "[autoban]" in invite_info["reason"].lower() - for invite_info in result.values() - if invite_info.get("reason") - ) - - await self._send_log(filter_name, _filter, msg, stats, reason, autoban=autoban) - - if autoban: - # Create a new context, with the author as is the bot, and the channel as #mod-alerts. - # This sends the ban confirmation directly under watchlist trigger embed, to inform - # mods that the user was auto-banned for the message. - context = await self.bot.get_context(msg) - context.guild = self.bot.get_guild(Guild.id) - context.author = context.guild.get_member(self.bot.user.id) - context.channel = self.bot.get_channel(Channels.mod_alerts) - context.command = self.bot.get_command("tempban") - - await context.invoke( - context.command, - msg.author, - (arrow.utcnow() + AUTO_BAN_DURATION).datetime, - reason=AUTO_BAN_REASON - ) - - break # We don't want multiple filters to trigger - - async def _send_log( - self, - filter_name: str, - _filter: Dict[str, Any], - msg: Message, - stats: Stats, - reason: Optional[str] = None, - *, - is_eval: bool = False, - autoban: bool = False, - ) -> None: - """Send a mod log for a triggered filter.""" - if msg.channel.type is ChannelType.private: - channel_str = "via DM" - ping_everyone = False - else: - channel_str = f"in {msg.channel.mention}" - # Allow specific filters to override ping_everyone - ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True) - - content = str(msg.author.id) # quality-of-life improvement for mobile moderators - - # If we are going to autoban, we don't want to ping and don't need the user ID - if autoban: - ping_everyone = False - content = None - - eval_msg = f"using {BotConfig.prefix}eval " if is_eval else "" - footer = f"Reason: {reason}" if reason else None - message = ( - f"The {filter_name} {_filter['type']} was triggered by {format_user(msg.author)} " - f"{channel_str} {eval_msg}with [the following message]({msg.jump_url}):\n\n" - f"{stats.message_content}" - ) - - log.debug(message) - - # Send pretty mod log embed to mod-alerts - await self.mod_log.send_log_message( - content=content, - icon_url=Icons.filtering, - colour=Colour(Colours.soft_red), - title=f"{_filter['type'].title()} triggered!", - text=message, - thumbnail=msg.author.display_avatar.url, - channel_id=Channels.mod_alerts, - ping_everyone=ping_everyone, - additional_embeds=stats.additional_embeds, - footer=footer, - ) - - def _add_stats(self, name: str, match: FilterMatch, content: str) -> Stats: - """Adds relevant statistical information to the relevant filter and increments the bot's stats.""" - # Word and match stats for watch_regex - if name == "watch_regex": - surroundings = match.string[max(match.start() - 10, 0): match.end() + 10] - message_content = ( - f"**Match:** '{match[0]}'\n" - f"**Location:** '...{escape_markdown(surroundings)}...'\n" - f"\n**Original Message:**\n{escape_markdown(content)}" - ) - else: # Use original content - message_content = content - - additional_embeds = None - - self.bot.stats.incr(f"filters.{name}") - - # The function returns True for invalid invites. - # They have no data so additional embeds can't be created for them. - if name == "filter_invites" and match is not True: - additional_embeds = [] - for _, data in match.items(): - reason = f"Reason: {data['reason']} | " if data.get('reason') else "" - embed = Embed(description=( - f"**Members:**\n{data['members']}\n" - f"**Active:**\n{data['active']}" - )) - embed.set_author(name=data["name"]) - embed.set_thumbnail(url=data["icon"]) - embed.set_footer(text=f"{reason}Guild ID: {data['id']}") - additional_embeds.append(embed) - - elif name == "watch_rich_embeds": - additional_embeds = match - - return Stats(message_content, additional_embeds) - - @staticmethod - def _check_filter(msg: Message) -> bool: - """Check whitelists to see if we should filter this message.""" - role_whitelisted = False - - if type(msg.author) is Member: # Only Member has roles, not User. - for role in msg.author.roles: - if role.id in Filter.role_whitelist: - role_whitelisted = True - - return ( - msg.channel.id not in Filter.channel_whitelist # Channel not in whitelist - and not role_whitelisted # Role not in whitelist - and not msg.author.bot # Author not a bot - ) - - async def _has_watch_regex_match(self, text: str) -> Tuple[Union[bool, re.Match], Optional[str]]: - """ - Return True if `text` matches any regex from `word_watchlist` or `token_watchlist` configs. - - `word_watchlist`'s patterns are placed between word boundaries while `token_watchlist` is - matched as-is. Spoilers are expanded, if any, and URLs are ignored. - Second return value is a reason written to database about blacklist entry (can be None). - """ - if SPOILER_RE.search(text): - text = self._expand_spoilers(text) - - text = self.clean_input(text) - - watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) - for pattern in watchlist_patterns: - match = re.search(pattern, text, flags=re.IGNORECASE) - if match: - return match, self._get_filterlist_value('filter_token', pattern, allowed=False)['comment'] - - return False, None - - async def _has_urls(self, text: str) -> Tuple[bool, Optional[str]]: - """ - Returns True if the text contains one of the blacklisted URLs from the config file. - - Second return value is a reason of URL blacklisting (can be None). - """ - text = self.clean_input(text) - - domain_blacklist = self._get_filterlist_items("domain_name", allowed=False) - for match in URL_RE.finditer(text): - for url in domain_blacklist: - if url.lower() in match.group(1).lower(): - blacklisted_parsed = tldextract.extract(url.lower()) - url_parsed = tldextract.extract(match.group(1).lower()) - if blacklisted_parsed.registered_domain == url_parsed.registered_domain: - return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"] - return False, None - - @staticmethod - async def _has_zalgo(text: str) -> bool: - """ - Returns True if the text contains zalgo characters. - - Zalgo range is \u0300 – \u036F and \u0489. - """ - return bool(ZALGO_RE.search(text)) - - async def _has_invites(self, text: str) -> Union[dict, bool]: - """ - Checks if there's any invites in the text content that aren't in the guild whitelist. - - If any are detected, a dictionary of invite data is returned, with a key per invite. - If none are detected, False is returned. - If we are unable to process an invite, True is returned. - - Attempts to catch some of common ways to try to cheat the system. - """ - text = self.clean_input(text) - - # Remove backslashes to prevent escape character fuckaroundery like - # discord\.gg/gdudes-pony-farm - text = text.replace("\\", "") - - invites = [m.group("invite") for m in DISCORD_INVITE.finditer(text)] - invite_data = dict() - for invite in invites: - invite = urllib.parse.quote_plus(invite.rstrip("/")) - if invite in invite_data: - continue - - response = await self.bot.http_session.get( - f"{URLs.discord_invite_api}/{invite}", params={"with_counts": "true"} - ) - response = await response.json() - guild = response.get("guild") - if guild is None: - # Lack of a "guild" key in the JSON response indicates either an group DM invite, an - # expired invite, or an invalid invite. The API does not currently differentiate - # between invalid and expired invites - return True - - guild_id = guild.get("id") - guild_invite_whitelist = self._get_filterlist_items("guild_invite", allowed=True) - guild_invite_blacklist = self._get_filterlist_items("guild_invite", allowed=False) - - # Is this invite allowed? - guild_partnered_or_verified = ( - 'PARTNERED' in guild.get("features", []) - or 'VERIFIED' in guild.get("features", []) - ) - invite_not_allowed = ( - guild_id in guild_invite_blacklist # Blacklisted guilds are never permitted. - or guild_id not in guild_invite_whitelist # Whitelisted guilds are always permitted. - and not guild_partnered_or_verified # Otherwise guilds have to be Verified or Partnered. - ) - - if invite_not_allowed: - reason = None - if guild_id in guild_invite_blacklist: - reason = self._get_filterlist_value("guild_invite", guild_id, allowed=False)["comment"] - - guild_icon_hash = guild["icon"] - guild_icon = ( - "https://cdn.discordapp.com/icons/" - f"{guild_id}/{guild_icon_hash}.png?size=512" - ) - - invite_data[invite] = { - "name": guild["name"], - "id": guild['id'], - "icon": guild_icon, - "members": response["approximate_member_count"], - "active": response["approximate_presence_count"], - "reason": reason - } - - return invite_data if invite_data else False - - @staticmethod - async def _has_rich_embed(msg: Message) -> Union[bool, List[Embed]]: - """Determines if `msg` contains any rich embeds not auto-generated from a URL.""" - if msg.embeds: - for embed in msg.embeds: - if embed.type == "rich": - urls = URL_RE.findall(msg.content) - final_urls = set(urls) - # This is due to way discord renders relative urls in Embdes - # if we send the following url: https://mobile.twitter.com/something - # Discord renders it as https://twitter.com/something - for url in urls: - final_urls.add(remove_subdomain_from_url(url)) - if not embed.url or embed.url not in final_urls: - # If `embed.url` does not exist or if `embed.url` is not part of the content - # of the message, it's unlikely to be an auto-generated embed by Discord. - return msg.embeds - else: - log.trace( - "Found a rich embed sent by a regular user account, " - "but it was likely just an automatic URL embed." - ) - return False - return False - - @staticmethod - async def _has_everyone_ping(text: str) -> bool: - """Determines if `msg` contains an @everyone or @here ping outside of a codeblock.""" - # First pass to avoid running re.sub on every message - if not EVERYONE_PING_RE.search(text): - return False - - content_without_codeblocks = CODE_BLOCK_RE.sub("", text) - return bool(EVERYONE_PING_RE.search(content_without_codeblocks)) - - async def notify_member(self, filtered_member: Member, reason: str, channel: TextChannel) -> None: - """ - Notify filtered_member about a moderation action with the reason str. - - First attempts to DM the user, fall back to in-channel notification if user has DMs disabled - """ - try: - await filtered_member.send(reason) - except Forbidden: - await channel.send(f"{filtered_member.mention} {reason}") - - def schedule_msg_delete(self, msg: dict) -> None: - """Delete an offensive message once its deletion date is reached.""" - delete_at = dateutil.parser.isoparse(msg['delete_date']) - self.scheduler.schedule_at(delete_at, msg['id'], self.delete_offensive_msg(msg)) - - async def cog_load(self) -> None: - """Get all the pending message deletion from the API and reschedule them.""" - await self.bot.wait_until_ready() - response = await self.bot.api_client.get('bot/offensive-messages',) - - now = arrow.utcnow() - - for msg in response: - delete_at = dateutil.parser.isoparse(msg['delete_date']) - - if delete_at < now: - await self.delete_offensive_msg(msg) - else: - self.schedule_msg_delete(msg) - - async def delete_offensive_msg(self, msg: Mapping[str, int]) -> None: - """Delete an offensive message, and then delete it from the db.""" - try: - channel = self.bot.get_channel(msg['channel_id']) - if channel: - msg_obj = await channel.fetch_message(msg['id']) - await msg_obj.delete() - except NotFound: - log.info( - f"Tried to delete message {msg['id']}, but the message can't be found " - f"(it has been probably already deleted)." - ) - except HTTPException as e: - log.warning(f"Failed to delete message {msg['id']}: status {e.status}") - - await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}') - log.info(f"Deleted the offensive message with id {msg['id']}.") - - @staticmethod - def clean_input(string: str) -> str: - """Remove zalgo and invisible characters from `string`.""" - # For future consideration: remove characters in the Mc, Sk, and Lm categories too. - # Can be normalised with form C to merge char + combining char into a single char to avoid - # removing legit diacritics, but this would open up a way to bypass filters. - no_zalgo = ZALGO_RE.sub("", string) - return INVISIBLE_RE.sub("", no_zalgo) - - -async def setup(bot: Bot) -> None: - """Load the Filtering cog.""" - await bot.add_cog(Filtering(bot)) diff --git a/bot/exts/filters/webhook_remover.py b/bot/exts/filters/webhook_remover.py deleted file mode 100644 index b42613804..000000000 --- a/bot/exts/filters/webhook_remover.py +++ /dev/null @@ -1,94 +0,0 @@ -import re - -from discord import Colour, Message, NotFound -from discord.ext.commands import Cog - -from bot.bot import Bot -from bot.constants import Channels, Colours, Event, Icons -from bot.exts.moderation.modlog import ModLog -from bot.log import get_logger -from bot.utils.messages import format_user - -WEBHOOK_URL_RE = re.compile( - r"((?:https?:\/\/)?(?:ptb\.|canary\.)?discord(?:app)?\.com\/api\/webhooks\/\d+\/)\S+\/?", - re.IGNORECASE -) - -ALERT_MESSAGE_TEMPLATE = ( - "{user}, looks like you posted a Discord webhook URL. Therefore, your " - "message has been removed, and your webhook has been deleted. " - "You can re-create it if you wish to. If you believe this was a " - "mistake, please let us know." -) - -log = get_logger(__name__) - - -class WebhookRemover(Cog): - """Scan messages to detect Discord webhooks links.""" - - def __init__(self, bot: Bot): - self.bot = bot - - @property - def mod_log(self) -> ModLog: - """Get current instance of `ModLog`.""" - return self.bot.get_cog("ModLog") - - async def delete_and_respond(self, msg: Message, redacted_url: str, *, webhook_deleted: bool) -> None: - """Delete `msg` and send a warning that it contained the Discord webhook `redacted_url`.""" - # Don't log this, due internal delete, not by user. Will make different entry. - self.mod_log.ignore(Event.message_delete, msg.id) - - try: - await msg.delete() - except NotFound: - log.debug(f"Failed to remove webhook in message {msg.id}: message already deleted.") - return - - await msg.channel.send(ALERT_MESSAGE_TEMPLATE.format(user=msg.author.mention)) - if webhook_deleted: - delete_state = "The webhook was successfully deleted." - else: - delete_state = "There was an error when deleting the webhook, it might have already been removed." - message = ( - f"{format_user(msg.author)} posted a Discord webhook URL to {msg.channel.mention}. {delete_state} " - f"Webhook URL was `{redacted_url}`" - ) - log.debug(message) - - # Send entry to moderation alerts. - await self.mod_log.send_log_message( - icon_url=Icons.token_removed, - colour=Colour(Colours.soft_red), - title="Discord webhook URL removed!", - text=message, - thumbnail=msg.author.display_avatar.url, - channel_id=Channels.mod_alerts - ) - - self.bot.stats.incr("tokens.removed_webhooks") - - @Cog.listener() - async def on_message(self, msg: Message) -> None: - """Check if a Discord webhook URL is in `message`.""" - # Ignore DMs; can't delete messages in there anyway. - if not msg.guild or msg.author.bot: - return - - matches = WEBHOOK_URL_RE.search(msg.content) - if matches: - async with self.bot.http_session.delete(matches[0]) as resp: - # The Discord API Returns a 204 NO CONTENT response on success. - deleted_successfully = resp.status == 204 - await self.delete_and_respond(msg, matches[1] + "xxx", webhook_deleted=deleted_successfully) - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message) -> None: - """Check if a Discord webhook URL is in the edited message `after`.""" - await self.on_message(after) - - -async def setup(bot: Bot) -> None: - """Load `WebhookRemover` cog.""" - await bot.add_cog(WebhookRemover(bot)) diff --git a/bot/exts/info/codeblock/_cog.py b/bot/exts/info/codeblock/_cog.py index a431175fd..d981b95f5 100644 --- a/bot/exts/info/codeblock/_cog.py +++ b/bot/exts/info/codeblock/_cog.py @@ -8,8 +8,8 @@ from pydis_core.utils import scheduling from bot import constants from bot.bot import Bot -from bot.exts.filters.token_remover import TokenRemover -from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE +from bot.exts.filtering._filters.unique.discord_token import DiscordTokenFilter +from bot.exts.filtering._filters.unique.webhook import WEBHOOK_URL_RE from bot.exts.help_channels._channel import is_help_forum_post from bot.exts.info.codeblock._instructions import get_instructions from bot.log import get_logger @@ -135,7 +135,7 @@ class CodeBlockCog(Cog, name="Code Block"): not message.author.bot and self.is_valid_channel(message.channel) and has_lines(message.content, constants.CodeBlock.minimum_lines) - and not TokenRemover.find_token_in_message(message) + and not DiscordTokenFilter.find_token_in_message(message.content) and not WEBHOOK_URL_RE.search(message.content) ) diff --git a/bot/exts/moderation/clean.py b/bot/exts/moderation/clean.py index fd9404b1a..aee751345 100644 --- a/bot/exts/moderation/clean.py +++ b/bot/exts/moderation/clean.py @@ -19,6 +19,7 @@ from bot.converters import Age, ISODateTime from bot.exts.moderation.modlog import ModLog from bot.log import get_logger from bot.utils.channel import is_mod_channel +from bot.utils.messages import upload_log log = get_logger(__name__) @@ -351,7 +352,7 @@ class Clean(Cog): # Reverse the list to have reverse chronological order log_messages = reversed(messages) - log_url = await self.mod_log.upload_log(log_messages, ctx.author.id) + log_url = await upload_log(log_messages, ctx.author.id) # Build the embed and send it if channels == "*": diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index 60b4428b7..1ef43a945 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -1,5 +1,6 @@ import textwrap import typing as t +from datetime import timedelta import arrow import discord @@ -12,7 +13,6 @@ from bot.bot import Bot from bot.constants import Event from bot.converters import Age, Duration, DurationOrExpiry, MemberOrUser, UnambiguousMemberOrUser from bot.decorators import ensure_future_timestamp, respect_role_hierarchy -from bot.exts.filters.filtering import AUTO_BAN_DURATION, AUTO_BAN_REASON from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler from bot.log import get_logger @@ -27,6 +27,24 @@ if t.TYPE_CHECKING: from bot.exts.moderation.watchchannels.bigbrother import BigBrother +# Comp ban +LINK_PASSWORD = "https://support.discord.com/hc/en-us/articles/218410947-I-forgot-my-Password-Where-can-I-set-a-new-one" +LINK_2FA = "https://support.discord.com/hc/en-us/articles/219576828-Setting-up-Two-Factor-Authentication" +COMP_BAN_REASON = ( + "Your account has been used to send links to a phishing website. You have been automatically banned. " + "If you are not aware of sending them, that means your account has been compromised.\n\n" + + f"Here is a guide from Discord on [how to change your password]({LINK_PASSWORD}).\n\n" + + f"We also highly recommend that you [enable 2 factor authentication on your account]({LINK_2FA}), " + "for heightened security.\n\n" + + "Once you have changed your password, feel free to follow the instructions at the bottom of " + "this message to appeal your ban." +) +COMP_BAN_DURATION = timedelta(days=4) + + class Infractions(InfractionScheduler, commands.Cog): """Apply and pardon infractions on users for moderation purposes.""" @@ -61,7 +79,7 @@ class Infractions(InfractionScheduler, commands.Cog): # region: Permanent infractions - @command() + @command(aliases=("warning",)) async def warn(self, ctx: Context, user: UnambiguousMemberOrUser, *, reason: t.Optional[str] = None) -> None: """Warn a user for the given reason.""" if not isinstance(user, Member): @@ -157,7 +175,7 @@ class Infractions(InfractionScheduler, commands.Cog): @command() async def compban(self, ctx: Context, user: UnambiguousMemberOrUser) -> None: """Same as cleanban, but specifically with the ban reason and duration used for compromised accounts.""" - await self.cleanban(ctx, user, duration=(arrow.utcnow() + AUTO_BAN_DURATION).datetime, reason=AUTO_BAN_REASON) + await self.cleanban(ctx, user, duration=(arrow.utcnow() + COMP_BAN_DURATION).datetime, reason=COMP_BAN_REASON) @command(aliases=("vban",)) async def voiceban(self, ctx: Context) -> None: diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index 2c94d1af8..47a21753c 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -3,7 +3,6 @@ import difflib import itertools import typing as t from datetime import datetime, timezone -from itertools import zip_longest import discord from dateutil.relativedelta import relativedelta @@ -12,14 +11,12 @@ from discord import Colour, Message, Thread from discord.abc import GuildChannel from discord.ext.commands import Cog, Context from discord.utils import escape_markdown, format_dt, snowflake_time -from pydis_core.site_api import ResponseCodeError -from sentry_sdk import add_breadcrumb from bot.bot import Bot -from bot.constants import Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles, URLs +from bot.constants import Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles from bot.log import get_logger from bot.utils import time -from bot.utils.messages import format_user +from bot.utils.messages import format_user, upload_log log = get_logger(__name__) @@ -45,48 +42,6 @@ class ModLog(Cog, name="ModLog"): self._cached_edits = [] - async def upload_log( - self, - messages: t.Iterable[discord.Message], - actor_id: int, - attachments: t.Iterable[t.List[str]] = None - ) -> str: - """Upload message logs to the database and return a URL to a page for viewing the logs.""" - if attachments is None: - attachments = [] - - deletedmessage_set = [ - { - "id": message.id, - "author": message.author.id, - "channel_id": message.channel.id, - "content": message.content.replace("\0", ""), # Null chars cause 400. - "embeds": [embed.to_dict() for embed in message.embeds], - "attachments": attachment, - } - for message, attachment in zip_longest(messages, attachments, fillvalue=[]) - ] - - try: - response = await self.bot.api_client.post( - "bot/deleted-messages", - json={ - "actor": actor_id, - "creation": datetime.now(timezone.utc).isoformat(), - "deletedmessage_set": deletedmessage_set, - } - ) - except ResponseCodeError as e: - add_breadcrumb( - category="api_error", - message=str(e), - level="error", - data=deletedmessage_set, - ) - raise - - return f"{URLs.site_logs_view}/{response['id']}" - def ignore(self, event: Event, *items: int) -> None: """Add event to ignored events to suppress log emission.""" for item in items: @@ -604,7 +559,7 @@ class ModLog(Cog, name="ModLog"): remaining_chars = 4090 - len(response) if len(content) > remaining_chars: - botlog_url = await self.upload_log(messages=[message], actor_id=message.author.id) + botlog_url = await upload_log(messages=[message], actor_id=message.author.id) ending = f"\n\nMessage truncated, [full message here]({botlog_url})." truncation_point = remaining_chars - len(ending) content = f"{content[:truncation_point]}...{ending}" diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index 2871eb5de..5cb696575 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -14,8 +14,8 @@ from pydis_core.utils import scheduling from bot.bot import Bot from bot.constants import BigBrother as BigBrotherConfig, Guild as GuildConfig, Icons -from bot.exts.filters.token_remover import TokenRemover -from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE +from bot.exts.filtering._filters.unique.discord_token import DiscordTokenFilter +from bot.exts.filtering._filters.unique.webhook import WEBHOOK_URL_RE from bot.exts.moderation.modlog import ModLog from bot.log import CustomLogger, get_logger from bot.pagination import LinePaginator @@ -235,7 +235,7 @@ class WatchChannel(metaclass=CogABCMeta): await self.send_header(msg) - if TokenRemover.find_token_in_message(msg) or WEBHOOK_URL_RE.search(msg.content): + if DiscordTokenFilter.find_token_in_message(msg.content) or WEBHOOK_URL_RE.search(msg.content): cleaned_content = "Content is censored because it contains a bot or webhook token." elif cleaned_content := msg.clean_content: # Put all non-media URLs in a code block to prevent embeds diff --git a/bot/pagination.py b/bot/pagination.py index c39ce211b..679108933 100644 --- a/bot/pagination.py +++ b/bot/pagination.py @@ -204,6 +204,7 @@ class LinePaginator(Paginator): footer_text: str = None, url: str = None, exception_on_empty_embed: bool = False, + reply: bool = False, ) -> t.Optional[discord.Message]: """ Use a paginator and set of reactions to provide pagination over a set of lines. @@ -254,6 +255,8 @@ class LinePaginator(Paginator): embed.description = paginator.pages[current_page] + reference = ctx.message if reply else None + if len(paginator.pages) <= 1: if footer_text: embed.set_footer(text=footer_text) @@ -264,9 +267,10 @@ class LinePaginator(Paginator): log.trace(f"Setting embed url to '{url}'") log.debug("There's less than two pages, so we won't paginate - sending single page on its own") + if isinstance(ctx, discord.Interaction): return await ctx.response.send_message(embed=embed) - return await ctx.send(embed=embed) + return await ctx.send(embed=embed, reference=reference) else: if footer_text: embed.set_footer(text=f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})") @@ -279,11 +283,12 @@ class LinePaginator(Paginator): log.trace(f"Setting embed url to '{url}'") log.debug("Sending first page to channel...") + if isinstance(ctx, discord.Interaction): await ctx.response.send_message(embed=embed) message = await ctx.original_response() else: - message = await ctx.send(embed=embed) + message = await ctx.send(embed=embed, reference=reference) log.debug("Adding emoji reactions to message...") diff --git a/bot/rules/__init__.py b/bot/rules/__init__.py deleted file mode 100644 index a01ceae73..000000000 --- a/bot/rules/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# flake8: noqa - -from .attachments import apply as apply_attachments -from .burst import apply as apply_burst -from .burst_shared import apply as apply_burst_shared -from .chars import apply as apply_chars -from .discord_emojis import apply as apply_discord_emojis -from .duplicates import apply as apply_duplicates -from .links import apply as apply_links -from .mentions import apply as apply_mentions -from .newlines import apply as apply_newlines -from .role_mentions import apply as apply_role_mentions diff --git a/bot/rules/attachments.py b/bot/rules/attachments.py deleted file mode 100644 index 8903c385c..000000000 --- a/bot/rules/attachments.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total attachments exceeding the limit sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if ( - msg.author == last_message.author - and len(msg.attachments) > 0 - ) - ) - total_recent_attachments = sum(len(msg.attachments) for msg in relevant_messages) - - if total_recent_attachments > config['max']: - return ( - f"sent {total_recent_attachments} attachments in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - return None diff --git a/bot/rules/burst.py b/bot/rules/burst.py deleted file mode 100644 index 25c5a2f33..000000000 --- a/bot/rules/burst.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects repeated messages sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if msg.author == last_message.author - ) - total_relevant = len(relevant_messages) - - if total_relevant > config['max']: - return ( - f"sent {total_relevant} messages in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - return None diff --git a/bot/rules/burst_shared.py b/bot/rules/burst_shared.py deleted file mode 100644 index bbe9271b3..000000000 --- a/bot/rules/burst_shared.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects repeated messages sent by multiple users.""" - total_recent = len(recent_messages) - - if total_recent > config['max']: - return ( - f"sent {total_recent} messages in {config['interval']}s", - set(msg.author for msg in recent_messages), - recent_messages - ) - return None diff --git a/bot/rules/chars.py b/bot/rules/chars.py deleted file mode 100644 index 1f587422c..000000000 --- a/bot/rules/chars.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total message char count exceeding the limit sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if msg.author == last_message.author - ) - - total_recent_chars = sum(len(msg.content) for msg in relevant_messages) - - if total_recent_chars > config['max']: - return ( - f"sent {total_recent_chars} characters in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - return None diff --git a/bot/rules/discord_emojis.py b/bot/rules/discord_emojis.py deleted file mode 100644 index d979ac5e7..000000000 --- a/bot/rules/discord_emojis.py +++ /dev/null @@ -1,34 +0,0 @@ -import re -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message -from emoji import demojize - -DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>|:\w+:") -CODE_BLOCK_RE = re.compile(r"```.*?```", flags=re.DOTALL) - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total Discord emojis exceeding the limit sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if msg.author == last_message.author - ) - - # Get rid of code blocks in the message before searching for emojis. - # Convert Unicode emojis to :emoji: format to get their count. - total_emojis = sum( - len(DISCORD_EMOJI_RE.findall(demojize(CODE_BLOCK_RE.sub("", msg.content)))) - for msg in relevant_messages - ) - - if total_emojis > config['max']: - return ( - f"sent {total_emojis} emojis in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - return None diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py deleted file mode 100644 index 8e4fbc12d..000000000 --- a/bot/rules/duplicates.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects duplicated messages sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if ( - msg.author == last_message.author - and msg.content == last_message.content - and msg.content - ) - ) - - total_duplicated = len(relevant_messages) - - if total_duplicated > config['max']: - return ( - f"sent {total_duplicated} duplicated messages in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - return None diff --git a/bot/rules/links.py b/bot/rules/links.py deleted file mode 100644 index c46b783c5..000000000 --- a/bot/rules/links.py +++ /dev/null @@ -1,36 +0,0 @@ -import re -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - -LINK_RE = re.compile(r"(https?://[^\s]+)") - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total links exceeding the limit sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if msg.author == last_message.author - ) - total_links = 0 - messages_with_links = 0 - - for msg in relevant_messages: - total_matches = len(LINK_RE.findall(msg.content)) - if total_matches: - messages_with_links += 1 - total_links += total_matches - - # Only apply the filter if we found more than one message with - # links to prevent wrongfully firing the rule on users posting - # e.g. an installation log of pip packages from GitHub. - if total_links > config['max'] and messages_with_links > 1: - return ( - f"sent {total_links} links in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - return None diff --git a/bot/rules/newlines.py b/bot/rules/newlines.py deleted file mode 100644 index 4e66e1359..000000000 --- a/bot/rules/newlines.py +++ /dev/null @@ -1,45 +0,0 @@ -import re -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total newlines exceeding the set limit sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if msg.author == last_message.author - ) - - # Identify groups of newline characters and get group & total counts - exp = r"(\n+)" - newline_counts = [] - for msg in relevant_messages: - newline_counts += [len(group) for group in re.findall(exp, msg.content)] - total_recent_newlines = sum(newline_counts) - - # Get maximum newline group size - if newline_counts: - max_newline_group = max(newline_counts) - else: - # If no newlines are found, newline_counts will be an empty list, which will error out max() - max_newline_group = 0 - - # Check first for total newlines, if this passes then check for large groupings - if total_recent_newlines > config['max']: - return ( - f"sent {total_recent_newlines} newlines in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - elif max_newline_group > config['max_consecutive']: - return ( - f"sent {max_newline_group} consecutive newlines in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - - return None diff --git a/bot/rules/role_mentions.py b/bot/rules/role_mentions.py deleted file mode 100644 index 0649540b6..000000000 --- a/bot/rules/role_mentions.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Dict, Iterable, List, Optional, Tuple - -from discord import Member, Message - - -async def apply( - last_message: Message, recent_messages: List[Message], config: Dict[str, int] -) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total role mentions exceeding the limit sent by a single user.""" - relevant_messages = tuple( - msg - for msg in recent_messages - if msg.author == last_message.author - ) - - total_recent_mentions = sum(len(msg.role_mentions) for msg in relevant_messages) - - if total_recent_mentions > config['max']: - return ( - f"sent {total_recent_mentions} role mentions in {config['interval']}s", - (last_message.author,), - relevant_messages - ) - return None diff --git a/bot/utils/message_cache.py b/bot/utils/message_cache.py index f68d280c9..5deb2376b 100644 --- a/bot/utils/message_cache.py +++ b/bot/utils/message_cache.py @@ -31,20 +31,23 @@ class MessageCache: self._start = 0 self._end = 0 - self._messages: list[t.Optional[Message]] = [None] * self.maxlen + self._messages: list[Message | None] = [None] * self.maxlen self._message_id_mapping = {} + self._message_metadata = {} - def append(self, message: Message) -> None: + def append(self, message: Message, *, metadata: dict | None = None) -> None: """Add the received message to the cache, depending on the order of messages defined by `newest_first`.""" if self.newest_first: self._appendleft(message) else: self._appendright(message) + self._message_metadata[message.id] = metadata def _appendright(self, message: Message) -> None: """Add the received message to the end of the cache.""" if self._is_full(): del self._message_id_mapping[self._messages[self._start].id] + del self._message_metadata[self._messages[self._start].id] self._start = (self._start + 1) % self.maxlen self._messages[self._end] = message @@ -56,6 +59,7 @@ class MessageCache: if self._is_full(): self._end = (self._end - 1) % self.maxlen del self._message_id_mapping[self._messages[self._end].id] + del self._message_metadata[self._messages[self._end].id] self._start = (self._start - 1) % self.maxlen self._messages[self._start] = message @@ -69,6 +73,7 @@ class MessageCache: self._end = (self._end - 1) % self.maxlen message = self._messages[self._end] del self._message_id_mapping[message.id] + del self._message_metadata[message.id] self._messages[self._end] = None return message @@ -80,6 +85,7 @@ class MessageCache: message = self._messages[self._start] del self._message_id_mapping[message.id] + del self._message_metadata[message.id] self._messages[self._start] = None self._start = (self._start + 1) % self.maxlen @@ -89,16 +95,21 @@ class MessageCache: """Remove all messages from the cache.""" self._messages = [None] * self.maxlen self._message_id_mapping = {} + self._message_metadata = {} self._start = 0 self._end = 0 - def get_message(self, message_id: int) -> t.Optional[Message]: + def get_message(self, message_id: int) -> Message | None: """Return the message that has the given message ID, if it is cached.""" index = self._message_id_mapping.get(message_id, None) return self._messages[index] if index is not None else None - def update(self, message: Message) -> bool: + def get_message_metadata(self, message_id: int) -> dict | None: + """Return the metadata of the message that has the given message ID, if it is cached.""" + return self._message_metadata.get(message_id, None) + + def update(self, message: Message, *, metadata: dict | None = None) -> bool: """ Update a cached message with new contents. @@ -108,13 +119,15 @@ class MessageCache: if index is None: return False self._messages[index] = message + if metadata is not None: + self._message_metadata[message.id] = metadata return True def __contains__(self, message_id: int) -> bool: """Return True if the cache contains a message with the given ID .""" return message_id in self._message_id_mapping - def __getitem__(self, item: t.Union[int, slice]) -> t.Union[Message, list[Message]]: + def __getitem__(self, item: int | slice) -> Message | list[Message]: """ Return the message(s) in the index or slice provided. diff --git a/bot/utils/messages.py b/bot/utils/messages.py index f6bdceaef..8d765ebfc 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -1,16 +1,21 @@ import asyncio import random import re +from collections.abc import Iterable +from datetime import datetime, timezone from functools import partial from io import BytesIO from typing import Callable, List, Optional, Sequence, Union import discord +from discord import Message from discord.ext.commands import Context +from pydis_core.site_api import ResponseCodeError from pydis_core.utils import scheduling +from sentry_sdk import add_breadcrumb import bot -from bot.constants import Emojis, MODERATION_ROLES, NEGATIVE_REPLIES +from bot.constants import Emojis, MODERATION_ROLES, NEGATIVE_REPLIES, URLs from bot.log import get_logger log = get_logger(__name__) @@ -241,6 +246,55 @@ async def send_denial(ctx: Context, reason: str) -> discord.Message: return await ctx.send(embed=embed) -def format_user(user: discord.abc.User) -> str: +def format_user(user: discord.User | discord.Member) -> str: """Return a string for `user` which has their mention and ID.""" return f"{user.mention} (`{user.id}`)" + + +def format_channel(channel: discord.abc.Messageable) -> str: + """Return a string for `channel` with its mention, ID, and the parent channel if it is a thread.""" + formatted = f"{channel.mention} ({channel.category}/#{channel}" + if hasattr(channel, "parent"): + formatted += f"/{channel.parent}" + formatted += ")" + return formatted + + +async def upload_log(messages: Iterable[Message], actor_id: int, attachments: dict[int, list[str]] = None) -> str: + """Upload message logs to the database and return a URL to a page for viewing the logs.""" + if attachments is None: + attachments = [] + else: + attachments = [attachments.get(message.id, []) for message in messages] + + deletedmessage_set = [ + { + "id": message.id, + "author": message.author.id, + "channel_id": message.channel.id, + "content": message.content.replace("\0", ""), # Null chars cause 400. + "embeds": [embed.to_dict() for embed in message.embeds], + "attachments": attachment, + } + for message, attachment in zip(messages, attachments) + ] + + try: + response = await bot.instance.api_client.post( + "bot/deleted-messages", + json={ + "actor": actor_id, + "creation": datetime.now(timezone.utc).isoformat(), + "deletedmessage_set": deletedmessage_set, + } + ) + except ResponseCodeError as e: + add_breadcrumb( + category="api_error", + message=str(e), + level="error", + data=deletedmessage_set, + ) + raise + + return f"{URLs.site_logs_view}/{response['id']}" diff --git a/config-default.yml b/config-default.yml index de0f7e4e8..3a1c3f612 100644 --- a/config-default.yml +++ b/config-default.yml @@ -326,45 +326,6 @@ guild: python_news: &PYNEWS_WEBHOOK 704381182279942324 -filter: - # What do we filter? - filter_domains: true - filter_everyone_ping: true - filter_invites: true - filter_zalgo: false - watch_regex: true - watch_rich_embeds: true - - # Notify user on filter? - # Notifications are not expected for "watchlist" type filters - notify_user_domains: false - notify_user_everyone_ping: true - notify_user_invites: true - notify_user_zalgo: false - - # Filter configuration - offensive_msg_delete_days: 7 # How many days before deleting an offensive message? - ping_everyone: true - - # Censor doesn't apply to these - channel_whitelist: - - *ADMINS - - *BB_LOGS - - *DEV_LOG - - *MESSAGE_LOG - - *MOD_LOG - - *STAFF_LOUNGE - - role_whitelist: - - *ADMINS_ROLE - - *HELPERS_ROLE - - *MODS_ROLE - - *OWNERS_ROLE - - *PY_COMMUNITY_ROLE - - *SPRINTERS - - *PY_PARTNER_ROLE - - keys: github: !ENV "GITHUB_API_KEY" site_api: !ENV "BOT_API_KEY" @@ -397,64 +358,6 @@ urls: github_bot_repo: "https://github.com/python-discord/bot" -anti_spam: - cache_size: 100 - - # Clean messages that violate a rule. - clean_offending: true - ping_everyone: true - - punishment: - remove_after: 600 - role_id: *MUTED_ROLE - - rules: - attachments: - interval: 10 - max: 6 - - burst: - interval: 10 - max: 7 - - # Burst shared it (temporarily) disabled to prevent - # the bug that triggers multiple infractions/DMs per - # user. It also tends to catch a lot of innocent users - # now that we're so big. - # burst_shared: - # interval: 10 - # max: 20 - - chars: - interval: 5 - max: 4_200 - - discord_emojis: - interval: 10 - max: 20 - - duplicates: - interval: 10 - max: 3 - - links: - interval: 10 - max: 10 - - mentions: - interval: 10 - max: 5 - - newlines: - interval: 10 - max: 100 - max_consecutive: 10 - - role_mentions: - interval: 10 - max: 3 - - metabase: username: !ENV "METABASE_USERNAME" password: !ENV "METABASE_PASSWORD" diff --git a/poetry.lock b/poetry.lock index db694b8f9..72859e4ac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,3 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. - [[package]] name = "aiodns" version = "3.0.0" @@ -7,10 +5,6 @@ description = "Simple DNS resolver for asyncio" category = "main" optional = false python-versions = "*" -files = [ - {file = "aiodns-3.0.0-py3-none-any.whl", hash = "sha256:2b19bc5f97e5c936638d28e665923c093d8af2bf3aa88d35c43417fa25d136a2"}, - {file = "aiodns-3.0.0.tar.gz", hash = "sha256:946bdfabe743fceeeb093c8a010f5d1645f708a241be849e17edfb0e49e08cd6"}, -] [package.dependencies] pycares = ">=4.0.0" @@ -22,95 +16,6 @@ description = "Async http client/server framework (asyncio)" category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b"}, - {file = "aiohttp-3.8.3-cp310-cp310-win32.whl", hash = "sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb"}, - {file = "aiohttp-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34"}, - {file = "aiohttp-3.8.3-cp311-cp311-win32.whl", hash = "sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697"}, - {file = "aiohttp-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290"}, - {file = "aiohttp-3.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win32.whl", hash = "sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494"}, - {file = "aiohttp-3.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win32.whl", hash = "sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937"}, - {file = "aiohttp-3.8.3-cp38-cp38-win32.whl", hash = "sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76"}, - {file = "aiohttp-3.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403"}, - {file = "aiohttp-3.8.3-cp39-cp39-win32.whl", hash = "sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618"}, - {file = "aiohttp-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7"}, - {file = "aiohttp-3.8.3.tar.gz", hash = "sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269"}, -] [package.dependencies] aiosignal = ">=1.1.2" @@ -122,7 +27,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +speedups = ["aiodns", "brotli", "cchardet"] [[package]] name = "aiosignal" @@ -131,10 +36,6 @@ description = "aiosignal: a list of registered asynchronous callbacks" category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] [package.dependencies] frozenlist = ">=1.1.0" @@ -146,10 +47,6 @@ description = "Better dates & times for Python" category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, -] [package.dependencies] python-dateutil = ">=2.7.0" @@ -161,10 +58,6 @@ description = "An easy to use asynchronous Redis cache" category = "main" optional = false python-versions = "~=3.7" -files = [ - {file = "async-rediscache-1.0.0rc2.tar.gz", hash = "sha256:65b1f67df0bd92defe37a3e645ea4c868da29eb41bfa493643a3b4ae7c0e109c"}, - {file = "async_rediscache-1.0.0rc2-py3-none-any.whl", hash = "sha256:b156cc42b3285e1bd620487c594d7238552f95e48dc07b4e5d0b1c095c3acc86"}, -] [package.dependencies] fakeredis = {version = ">=1.7.1", extras = ["lua"], optional = true, markers = "extra == \"fakeredis\""} @@ -180,28 +73,22 @@ description = "Timeout context manager for asyncio programs" category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" -files = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] +python-versions = ">=3.6" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs"] +docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["attrs", "zope.interface"] +tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] +tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] [[package]] name = "beautifulsoup4" @@ -210,10 +97,6 @@ description = "Screen-scraping library" category = "main" optional = false python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] [package.dependencies] soupsieve = ">1.2" @@ -229,10 +112,6 @@ description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, -] [[package]] name = "cffi" @@ -241,72 +120,6 @@ description = "Foreign Function Interface for Python calling C code." category = "main" optional = false python-versions = "*" -files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] [package.dependencies] pycparser = "*" @@ -318,10 +131,6 @@ description = "Validate configuration and produce human readable error messages. category = "dev" optional = false python-versions = ">=3.6.1" -files = [ - {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, - {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, -] [[package]] name = "charset-normalizer" @@ -330,13 +139,9 @@ description = "The Real First Universal Charset Detector. Open, modern and activ category = "main" optional = false python-versions = ">=3.6.0" -files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] [package.extras] -unicode-backport = ["unicodedata2"] +unicode_backport = ["unicodedata2"] [[package]] name = "colorama" @@ -345,10 +150,6 @@ description = "Cross-platform colored terminal text." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] [[package]] name = "coloredlogs" @@ -357,10 +158,6 @@ description = "Colored terminal output for Python's logging module" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, - {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, -] [package.dependencies] humanfriendly = ">=9.1" @@ -375,58 +172,6 @@ description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} @@ -441,36 +186,28 @@ description = "Deep Difference and Search of any Python object/data. Recreate ob category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "deepdiff-6.2.1-py3-none-any.whl", hash = "sha256:8ba27c185f9197b78c316ce7bb0c743d25d14f7cdb8ec3b340437dbc93dcbff2"}, - {file = "deepdiff-6.2.1.tar.gz", hash = "sha256:3fe134dde5b3922ff8c51fc1e95a972e659c853797231b836a5ccf15532fd516"}, -] [package.dependencies] ordered-set = ">=4.0.2,<4.2.0" [package.extras] -cli = ["clevercsv (==0.7.4)", "click (==8.1.3)", "pyyaml (==6.0)", "toml (==0.10.2)"] +cli = ["click (==8.1.3)", "pyyaml (==6.0)", "toml (==0.10.2)", "clevercsv (==0.7.4)"] [[package]] -name = "discord-py" +name = "discord.py" version = "2.2.0" description = "A Python wrapper for the Discord API" category = "main" optional = false python-versions = ">=3.8.0" -files = [ - {file = "discord.py-2.2.0-py3-none-any.whl", hash = "sha256:012e98571af6847467e81f9501bbe4c6ebfe292c842f5ef8e951908839ee1cd0"}, - {file = "discord.py-2.2.0.tar.gz", hash = "sha256:a92d69ab6f982998693d0c371ea19235fa0f9900b50068fc461086d02c33e6bb"}, -] [package.dependencies] aiohttp = ">=3.7.4,<4" [package.extras] docs = ["sphinx (==4.4.0)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport", "typing-extensions (>=4.3,<5)"] -speed = ["Brotli", "aiodns (>=1.1)", "cchardet (==2.1.7)", "orjson (>=3.5.4)"] -test = ["coverage[toml]", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mock", "typing-extensions (>=4.3,<5)"] +speed = ["orjson (>=3.5.4)", "aiodns (>=1.1)", "brotli", "cchardet (==2.1.7)"] +test = ["coverage", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mock", "typing-extensions (>=4.3,<5)"] voice = ["PyNaCl (>=1.3.0,<1.6)"] [[package]] @@ -480,10 +217,6 @@ description = "Distribution utilities" category = "dev" optional = false python-versions = "*" -files = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, -] [[package]] name = "emoji" @@ -492,24 +225,17 @@ description = "Emoji for Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "emoji-2.2.0.tar.gz", hash = "sha256:a2986c21e4aba6b9870df40ef487a17be863cb7778dcf1c01e25917b7cd210bb"}, -] [package.extras] -dev = ["coverage", "coveralls", "pytest"] +dev = ["pytest", "coverage", "coveralls"] [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, -] [package.extras] test = ["pytest (>=6)"] @@ -521,10 +247,6 @@ description = "execnet: rapid multi-Python deployment" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, - {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, -] [package.extras] testing = ["pre-commit"] @@ -536,10 +258,6 @@ description = "Fake implementation of redis API for testing purposes." category = "main" optional = false python-versions = ">=3.7,<4.0" -files = [ - {file = "fakeredis-2.0.0-py3-none-any.whl", hash = "sha256:fb3186cbbe4c549f922b0f08eb84b09c0e51ecf8efbed3572d20544254f93a97"}, - {file = "fakeredis-2.0.0.tar.gz", hash = "sha256:6d1dc2417921b7ce56a80877afa390d6335a3154146f201a86e3a14417bdc79e"}, -] [package.dependencies] lupa = {version = ">=1.13,<2.0", optional = true, markers = "extra == \"lua\""} @@ -557,29 +275,21 @@ description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, A category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"}, - {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"}, -] [package.dependencies] sgmllib3k = "*" [[package]] name = "filelock" -version = "3.8.0" +version = "3.9.0" description = "A platform independent file lock." category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, -] [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx-autodoc-typehints (>=1.19.5)", "sphinx (>=5.3)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)", "pytest (>=7.2)"] [[package]] name = "flake8" @@ -588,10 +298,6 @@ description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" @@ -605,10 +311,6 @@ description = "Flake8 Type Annotation Checks" category = "dev" optional = false python-versions = ">=3.7,<4.0" -files = [ - {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"}, - {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"}, -] [package.dependencies] attrs = ">=21.4" @@ -621,17 +323,13 @@ description = "A plugin for flake8 finding likely bugs and design problems in yo category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, - {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, -] [package.dependencies] attrs = ">=19.2.0" flake8 = ">=3.0.0" [package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] +dev = ["tox", "coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] [[package]] name = "flake8-docstrings" @@ -640,10 +338,6 @@ description = "Extension for flake8 which uses pydocstyle to check docstrings" category = "dev" optional = false python-versions = "*" -files = [ - {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, - {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, -] [package.dependencies] flake8 = ">=3" @@ -656,10 +350,6 @@ description = "flake8 plugin that integrates isort ." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "flake8-isort-5.0.3.tar.gz", hash = "sha256:0951398c343c67f4933407adbbfb495d4df7c038650c5d05753a006efcfeb390"}, - {file = "flake8_isort-5.0.3-py3-none-any.whl", hash = "sha256:8c4ab431d87780d0c8336e9614e50ef11201bc848ef64ca017532dec39d4bf49"}, -] [package.dependencies] flake8 = "*" @@ -675,10 +365,6 @@ description = "string format checker, plugin for flake8" category = "dev" optional = false python-versions = "*" -files = [ - {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, - {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, -] [package.dependencies] flake8 = "*" @@ -690,10 +376,6 @@ description = "A flake8 plugin that helps you write tidier imports." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, - {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, -] [package.dependencies] flake8 = ">=3.8.0" @@ -705,9 +387,6 @@ description = "TODO notes checker, plugin for flake8" category = "dev" optional = false python-versions = "*" -files = [ - {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, -] [package.dependencies] pycodestyle = ">=2.0.0,<3.0.0" @@ -719,82 +398,6 @@ description = "A list-like structure which implements collections.abc.MutableSeq category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, -] [[package]] name = "humanfriendly" @@ -803,25 +406,17 @@ description = "Human friendly output for text interfaces using Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, - {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, -] [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} [[package]] name = "identify" -version = "2.5.9" +version = "2.5.18" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "identify-2.5.9-py2.py3-none-any.whl", hash = "sha256:a390fb696e164dbddb047a0db26e57972ae52fbd037ae68797e5ae2f4492485d"}, - {file = "identify-2.5.9.tar.gz", hash = "sha256:906036344ca769539610436e40a684e170c3648b552194980bb7b617a8daeb9f"}, -] [package.extras] license = ["ukkonen"] @@ -833,40 +428,28 @@ description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = "*" -files = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] +python-versions = ">=3.7" [[package]] name = "isort" -version = "5.10.1" +version = "5.12.0" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6.1,<4.0" -files = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] +python-versions = ">=3.8.0" [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] +colors = ["colorama (>=0.4.3)"] requirements-deprecated-finder = ["pip-api", "pipreqs"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] [[package]] name = "lupa" @@ -875,83 +458,6 @@ description = "Python wrapper around Lua and LuaJIT" category = "main" optional = false python-versions = "*" -files = [ - {file = "lupa-1.14.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:20b486cda76ff141cfb5f28df9c757224c9ed91e78c5242d402d2e9cb699d464"}, - {file = "lupa-1.14.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c685143b18c79a3a1fa25a4cc774a87b5a61c606f249bcf824d125d8accb6b2c"}, - {file = "lupa-1.14.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3865f9dbe9a84bd6a471250e52068aaf1147f206a51905fb6d93e1db9efb00ee"}, - {file = "lupa-1.14.1-cp27-cp27m-win32.whl", hash = "sha256:2dacdddd5e28c6f5fd96a46c868ec5c34b0fad1ec7235b5bbb56f06183a37f20"}, - {file = "lupa-1.14.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e754cbc6cacc9bca6ff2b39025e9659a2098420639d214054b06b466825f4470"}, - {file = "lupa-1.14.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e36f3eb70705841bce9c15e12bc6fc3b2f4f68a41ba0e4af303b22fc4d8667c"}, - {file = "lupa-1.14.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0aac06098d46729edd2d04e80b55d9d310e902f042f27521308df77cb1ba0191"}, - {file = "lupa-1.14.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:9706a192339efa1a6b7d806389572a669dd9ae2250469ff1ce13f684085af0b4"}, - {file = "lupa-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d688a35f7fe614720ed7b820cbb739b37eff577a764c2003e229c2a752201cea"}, - {file = "lupa-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:36d888bd42589ecad21a5fb957b46bc799640d18eff2fd0c47a79ffb4a1b286c"}, - {file = "lupa-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:0423acd739cf25dbdbf1e33a0aa8026f35e1edea0573db63d156f14a082d77c8"}, - {file = "lupa-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7068ae0d6a1a35ea8718ef6e103955c1ee143181bf0684604a76acc67f69de55"}, - {file = "lupa-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5fef8b755591f0466438ad0a3e92ecb21dd6bb1f05d0215139b6ff8c87b2ce65"}, - {file = "lupa-1.14.1-cp310-cp310-win32.whl", hash = "sha256:4a44e1fd0e9f4a546fbddd2e0fd913c823c9ac58a5f3160fb4f9109f633cb027"}, - {file = "lupa-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:b83100cd7b48a7ca85dda4e9a6a5e7bc3312691e7f94c6a78d1f9a48a86a7fec"}, - {file = "lupa-1.14.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:1b8bda50c61c98ff9bb41d1f4934640c323e9f1539021810016a2eae25a66c3d"}, - {file = "lupa-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa1449aa1ab46c557344867496dee324b47ede0c41643df8f392b00262d21b12"}, - {file = "lupa-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a17ebf91b3aa1c5c36661e34c9cf10e04bb4cc00076e8b966f86749647162050"}, - {file = "lupa-1.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:b1d9cfa469e7a2ad7e9a00fea7196b0022aa52f43a2043c2e0be92122e7bcfe8"}, - {file = "lupa-1.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bc4f5e84aee0d567aa2e116ff6844d06086ef7404d5102807e59af5ce9daf3c0"}, - {file = "lupa-1.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:40cf2eb90087dfe8ee002740469f2c4c5230d5e7d10ffb676602066d2f9b1ac9"}, - {file = "lupa-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:63a27c38295aa971730795941270fff2ce65576f68ec63cb3ecb90d7a4526d03"}, - {file = "lupa-1.14.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:457330e7a5456c4415fc6d38822036bd4cff214f9d8f7906200f6b588f1b2932"}, - {file = "lupa-1.14.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d61fb507a36e18dc68f2d9e9e2ea19e1114b1a5e578a36f18e9be7a17d2931d1"}, - {file = "lupa-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:f26b73d10130ad73e07d45dfe9b7c3833e3a2aa1871a4ecf5ce2dc1abeeae74d"}, - {file = "lupa-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:297d801ba8e4e882b295c25d92f1634dde5e76d07ec6c35b13882401248c485d"}, - {file = "lupa-1.14.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c8bddd22eaeea0ce9d302b390d8bc606f003bf6c51be68e8b007504433b91280"}, - {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1661c890861cf0f7002d7a7e00f50c885577954c2d85a7173b218d3228fa3869"}, - {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:2ee480d31555f00f8bf97dd949c596508bd60264cff1921a3797a03dd369e8cd"}, - {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:1ff93560c2546d7627ab2f95b5e88f000705db70a3d6041ac29d050f094f2a35"}, - {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:47f1459e2c98480c291ae3b70688d762f82dbb197ef121d529aa2c4e8bab1ba3"}, - {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8986dba002346505ee44c78303339c97a346b883015d5cf3aaa0d76d3b952744"}, - {file = "lupa-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8912459fddf691e70f2add799a128822bae725826cfb86f69720a38bdfa42410"}, - {file = "lupa-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:9b9d1b98391959ae531bbb8df7559ac2c408fcbd33721921b6a05fd6414161e0"}, - {file = "lupa-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:61ff409040fa3a6c358b7274c10e556ba22afeb3470f8d23cd0a6bf418fb30c9"}, - {file = "lupa-1.14.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:350ba2218eea800898854b02753dc0c9cfe83db315b30c0dc10ab17493f0321a"}, - {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:46dcbc0eae63899468686bb1dfc2fe4ed21fe06f69416113f039d88aab18f5dc"}, - {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7ad96923e2092d8edbf0c1b274f9b522690b932ed47a70d9a0c1c329f169f107"}, - {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:364b291bf2b55555c87b4bffb4db5a9619bcdb3c02e58aebde5319c3c59ec9b2"}, - {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ed071efc8ee231fac1fcd6b6fce44dc6da75a352b9b78403af89a48d759743c"}, - {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bce60847bebb4aa9ed3436fab3e84585e9094e15e1cb8d32e16e041c4ef65331"}, - {file = "lupa-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5fbe7f83b0007cda3b158a93726c80dfd39003a8c5c5d608f6fdf8c60c42117f"}, - {file = "lupa-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4bd789967cbb5c84470f358c7fa8fcbf7464185adbd872a6c3de9b42d29a6d26"}, - {file = "lupa-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:ca58da94a6495dda0063ba975fe2e6f722c5e84c94f09955671b279c41cfde96"}, - {file = "lupa-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:51d6965663b2be1a593beabfa10803fdbbcf0b293aa4a53ea09a23db89787d0d"}, - {file = "lupa-1.14.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d251ba009996a47231615ea6b78123c88446979ae99b5585269ec46f7a9197aa"}, - {file = "lupa-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:abe3fc103d7bd34e7028d06db557304979f13ebf9050ad0ea6c1cc3a1caea017"}, - {file = "lupa-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:4ea185c394bf7d07e9643d868e50cc94a530bb298d4bdae4915672b3809cc72b"}, - {file = "lupa-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:6aff7257b5953de620db489899406cddb22093d1124fc5b31f8900e44a9dbc2a"}, - {file = "lupa-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d6f5bfbd8fc48c27786aef8f30c84fd9197747fa0b53761e69eb968d81156cbf"}, - {file = "lupa-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dec7580b86975bc5bdf4cc54638c93daaec10143b4acc4a6c674c0f7e27dd363"}, - {file = "lupa-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:96a201537930813b34145daf337dcd934ddfaebeba6452caf8a32a418e145e82"}, - {file = "lupa-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c0efaae8e7276f4feb82cba43c3cd45c82db820c9dab3965a8f2e0cb8b0bc30b"}, - {file = "lupa-1.14.1-cp38-cp38-win32.whl", hash = "sha256:b6953854a343abdfe11aa52a2d021fadf3d77d0cd2b288b650f149b597e0d02d"}, - {file = "lupa-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:c79ced2aaf7577e3d06933cf0d323fa968e6864c498c376b0bd475ded86f01f3"}, - {file = "lupa-1.14.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:72589a21a3776c7dd4b05374780e7ecf1b49c490056077fc91486461935eaaa3"}, - {file = "lupa-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:30d356a433653b53f1fe29477faaf5e547b61953b971b010d2185a561f4ce82a"}, - {file = "lupa-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:2116eb467797d5a134b2c997dfc7974b9a84b3aa5776c17ba8578ed4f5f41a9b"}, - {file = "lupa-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:24d6c3435d38614083d197f3e7bcfe6d3d9eb02ee393d60a4ab9c719bc000162"}, - {file = "lupa-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9144ecfa5e363f03e4d1c1e678b081cd223438be08f96604fca478591c3e3b53"}, - {file = "lupa-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69be1d6c3f3ab9fc988c9a0e5801f23f68e2c8b5900a8fd3ae57d1d0e9c5539c"}, - {file = "lupa-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77b587043d0bee9cc738e00c12718095cf808dd269b171f852bd82026c664c69"}, - {file = "lupa-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:62530cf0a9c749a3cd13ad92b31eaf178939d642b6176b46cfcd98f6c5006383"}, - {file = "lupa-1.14.1-cp39-cp39-win32.whl", hash = "sha256:d891b43b8810191eb4c42a0bc57c32f481098029aac42b176108e09ffe118cdc"}, - {file = "lupa-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:cf643bc48a152e2c572d8be7fc1de1c417a6a9648d337ffedebf00f57016b786"}, - {file = "lupa-1.14.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0ac862c6d2eb542ac70d294a8e960b9ae7f46297559733b4c25f9e3c945e522a"}, - {file = "lupa-1.14.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:0a15680f425b91ec220eb84b0ab59d24c4bee69d15b88245a6998a7d38c78ba6"}, - {file = "lupa-1.14.1-pp37-pypy37_pp73-win32.whl", hash = "sha256:8a064d72991ba53aeea9720d95f2055f7f8a1e2f35b32a35d92248b63a94bcd1"}, - {file = "lupa-1.14.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6d87d6c51e6c3b6326d18af83e81f4860ba0b287cda1101b1ab8562389d598f5"}, - {file = "lupa-1.14.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b3efe9d887cfdf459054308ecb716e0eb11acb9a96c3022ee4e677c1f510d244"}, - {file = "lupa-1.14.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:723fff6fcab5e7045e0fa79014729577f98082bd1fd1050f907f83a41e4c9865"}, - {file = "lupa-1.14.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:930092a27157241d07d6d09ff01d5530a9e4c0dd515228211f2902b7e88ec1f0"}, - {file = "lupa-1.14.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7f6bc9852bdf7b16840c984a1e9f952815f7d4b3764585d20d2e062bd1128074"}, - {file = "lupa-1.14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:8f65d2007092a04616c215fea5ad05ba8f661bd0f45cde5265d27150f64d3dd8"}, - {file = "lupa-1.14.1.tar.gz", hash = "sha256:d0fd4e60ad149fe25c90530e2a0e032a42a6f0455f29ca0edb8170d6ec751c6e"}, -] [[package]] name = "lxml" @@ -960,83 +466,11 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, - {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, - {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, - {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, - {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, - {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, - {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, - {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, - {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, - {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, - {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, - {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, - {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, - {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, - {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, - {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, - {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, - {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, - {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, - {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, - {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, - {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, - {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, - {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, - {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, - {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, - {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, - {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, - {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, - {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, - {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, - {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, - {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, - {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, - {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, - {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, - {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, - {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, -] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] +htmlsoup = ["beautifulsoup4"] source = ["Cython (>=0.29.7)"] [[package]] @@ -1046,10 +480,6 @@ description = "Convert HTML to markdown." category = "main" optional = false python-versions = "*" -files = [ - {file = "markdownify-0.6.1-py3-none-any.whl", hash = "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"}, - {file = "markdownify-0.6.1.tar.gz", hash = "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d"}, -] [package.dependencies] beautifulsoup4 = "*" @@ -1062,10 +492,6 @@ description = "McCabe checker, plugin for flake8" category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] [[package]] name = "more-itertools" @@ -1074,10 +500,6 @@ description = "More routines for operating on iterables, beyond itertools" category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "more-itertools-9.0.0.tar.gz", hash = "sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab"}, - {file = "more_itertools-9.0.0-py3-none-any.whl", hash = "sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41"}, -] [[package]] name = "mslex" @@ -1086,79 +508,14 @@ description = "shlex for windows" category = "dev" optional = false python-versions = ">=3.5" -files = [ - {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, - {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"}, -] [[package]] name = "multidict" -version = "6.0.2" +version = "6.0.4" description = "multidict implementation" category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"}, - {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"}, - {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"}, - {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"}, - {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"}, - {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"}, - {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"}, - {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"}, - {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"}, - {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, - {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, -] [[package]] name = "nodeenv" @@ -1167,13 +524,6 @@ description = "Node.js virtual environment builder" category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ - {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, - {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, -] - -[package.dependencies] -setuptools = "*" [[package]] name = "ordered-set" @@ -1182,28 +532,17 @@ description = "An OrderedSet is a custom MutableSet that remembers its order, so category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] [package.extras] -dev = ["black", "mypy", "pytest"] +dev = ["pytest", "black", "mypy"] [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=3.6" -files = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" [[package]] name = "pep8-naming" @@ -1212,10 +551,6 @@ description = "Check PEP-8 naming conventions, plugin for flake8" category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, - {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, -] [package.dependencies] flake8 = ">=3.9.1" @@ -1227,10 +562,6 @@ description = "Dump the software license list of Python packages installed with category = "dev" optional = false python-versions = "~=3.8" -files = [ - {file = "pip-licenses-4.0.1.tar.gz", hash = "sha256:05a180f5610b262e2d56eea99f04e380db7080e79655abf1c916125f39fe207d"}, - {file = "pip_licenses-4.0.1-py3-none-any.whl", hash = "sha256:5896c18b7897e38fdd7be9a9ea0de02d6ff3264b7411967d6b679019ddc31878"}, -] [package.dependencies] prettytable = ">=2.3.0" @@ -1240,19 +571,15 @@ test = ["docutils", "pytest-cov", "pytest-pycodestyle", "pytest-runner"] [[package]] name = "platformdirs" -version = "2.5.4" +version = "3.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, -] [package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx (>=6.1.3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2.1)"] [[package]] name = "pluggy" @@ -1261,10 +588,6 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] [package.extras] dev = ["pre-commit", "tox"] @@ -1277,10 +600,6 @@ description = "A framework for managing and maintaining multi-language pre-commi category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, - {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, -] [package.dependencies] cfgv = ">=2.0.0" @@ -1292,15 +611,11 @@ virtualenv = ">=20.0.8" [[package]] name = "prettytable" -version = "3.5.0" +version = "3.6.0" description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "prettytable-3.5.0-py3-none-any.whl", hash = "sha256:fe391c3b545800028edf5dbb6a5360893feb398367fcc1cf8d7a5b29ce5c59a1"}, - {file = "prettytable-3.5.0.tar.gz", hash = "sha256:52f682ba4efe29dccb38ff0fe5bac8a23007d0780ff92a8b85af64bc4fc74d72"}, -] [package.dependencies] wcwidth = "*" @@ -1315,84 +630,17 @@ description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, -] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] [[package]] name = "pycares" -version = "4.2.2" +version = "4.3.0" description = "Python interface for c-ares" category = "main" optional = false python-versions = "*" -files = [ - {file = "pycares-4.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5dc6418e87729105d93162155793002b3fa95490e2f2df33afec08b0b0d44989"}, - {file = "pycares-4.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9481ee42df7e34c9ef7b2f045e534062b980b2c971677868df9f17730b147ceb"}, - {file = "pycares-4.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05e029e594c27a0066cdb89dfc5bba28ba94e2b27b0ca7aceb94f9aea06812cd"}, - {file = "pycares-4.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0eb203ceedcf7f9865ed3abb6128dfbb3498c5e76342e3c820c4274cc0c8e873"}, - {file = "pycares-4.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4a01ba75e8a2947fc0b954850f8db9d52166634a206056febef2f833c8cfa1e"}, - {file = "pycares-4.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:064543e222e3587a92bccae704fcc5f4ce1ba1ce66aac96483c9cf504d554a67"}, - {file = "pycares-4.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a5a28f1d041aa2102bd2512e7361671e4ef46bc927e95b6932ed95cc45273480"}, - {file = "pycares-4.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:650b16f025bd3dad6a331e63bb8c9d55994c1b5d0d289ebe03c0bc16edad114f"}, - {file = "pycares-4.2.2-cp310-cp310-win32.whl", hash = "sha256:f8b76c13275b319b850e28bb9b3f5815de7521b1e0a581453d1acf10011bafef"}, - {file = "pycares-4.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:bcfcafbb376376c9cca6d37a8497dfd6dbd82333bf37627067b34dcaf5039612"}, - {file = "pycares-4.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ae5accd693c6910bbd1a99d1f4551a9e99decd65d792a80f10c27b8fcc32b497"}, - {file = "pycares-4.2.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f1901b309cb5cf7ade5822d74b904f55c49369e4ff9328818e554d4c34b4714"}, - {file = "pycares-4.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bc61edb98aff9cb4b2e07c25383100b81459a676ca0b0bd5fe77226eb1f850e"}, - {file = "pycares-4.2.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:241155687db7b45cb4ef84a18755ebc78c3ad624fd2578b48ea52ac16a4c8d9f"}, - {file = "pycares-4.2.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:27a21184ba35fff12eec36375d5b064516a0c3401dbf66a7eded7da34c5ca282"}, - {file = "pycares-4.2.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8a376e637ecd79db62761ca40cda080b9383a07d6dedbc799dd1a31e053862d9"}, - {file = "pycares-4.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6c411610be8de17cd5257845ebba5104b8e6356c62e66768728985a2ac0e9d1c"}, - {file = "pycares-4.2.2-cp36-cp36m-win32.whl", hash = "sha256:6a5af6443a1cefb36ddca47af37e29cae94a734c6c7cea3eb94e5de5cc2a4f1a"}, - {file = "pycares-4.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a01ab41405dd4dd8449f9323b2dac25e1d856ef02d85c8aedea0130b65275b2a"}, - {file = "pycares-4.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9a2053b34163d13d6d135248c65e71cefce3f25b3611677a1428ec7a57bae856"}, - {file = "pycares-4.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8064eaae5084e5155008b8f9d089055a432ff2115960273fc570f55dccedf666"}, - {file = "pycares-4.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc045040c094068d5de28e61a6fd0babe8522e8f61829839b893f7aff928173b"}, - {file = "pycares-4.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:135a356d52773f02d7babd2b38ad64493418363274972cc786fdce847875ca03"}, - {file = "pycares-4.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:512fb2c04c28e0e5a7de0b61624ab9c15d2df52db113f63a0aba6c6f1174b92f"}, - {file = "pycares-4.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eb374525c6231920509612f197ca47bdaa6ec9a0728aa199ba536dc0c25bb55"}, - {file = "pycares-4.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:47c6e18bbe6f2f4ce42fbdfa4ab2602268590f76110f06af60d02f964b72fada"}, - {file = "pycares-4.2.2-cp37-cp37m-win32.whl", hash = "sha256:a2c7fb5d3cb633e3f23344194da9b5caa54eb40da34dbe4465f0ebcede2e1e1a"}, - {file = "pycares-4.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:90f374fae2af5eb728841b4c2a0c8038a6889ba2a5a421e4c4e4e0f15bcc5912"}, - {file = "pycares-4.2.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c0a7e0f9371c47cf028e2389f11385e906ba2797900419509adfa86587a2ac"}, - {file = "pycares-4.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0fb3944af9492cfde6e1167780c9b8a701a56cf7d3fb29086cfb906b8261648f"}, - {file = "pycares-4.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7466315e76ab0ca4dc1354f3d7cb53f6d99d365b3778d9849e52643270beb6f2"}, - {file = "pycares-4.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f58398bd9fa99cc2dd79f7fecddc85837ccb452d673168037ea603b15aa11b"}, - {file = "pycares-4.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47eae9809826cea5c0eb08eec9da584dd6330e51c075c2f6963ca2067555cd07"}, - {file = "pycares-4.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6cbd4df536d2c32d2d74b854db25f1d15cc61cdd182b03206afbd7ccbe7b8f11"}, - {file = "pycares-4.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3e4519bc51b744331c968eef0bd0071ca9c3e5863b8b8c1d99540ab8bfb04235"}, - {file = "pycares-4.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e2af8ca3bc49894a87d2b5629b993c22b0e602ecb7fd2fad660ebb9be584829"}, - {file = "pycares-4.2.2-cp38-cp38-win32.whl", hash = "sha256:f6b5360e2278fae1e79479a4b56198fc7faf46ab350da18756c4de789835dbcd"}, - {file = "pycares-4.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:4304e5f0c10281abcee3c2547140a6b280c70866f2828956c9bcb2de6cffa211"}, - {file = "pycares-4.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9155e95cbe26b4b57ca691e9d8bfb5a002c7ce14ac02ddfcfe7849d4d349badb"}, - {file = "pycares-4.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:612a20514685a3d999dd0a99eede9da851be11171d599b211fac287eee452ff1"}, - {file = "pycares-4.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:075d4bdde10590a2d0456eab20028aab997207e45469d30dd01a4a65caa7f8da"}, - {file = "pycares-4.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6eebdf34477c9bfb00497f8e58a674fd22b348bd928d19d29c84e8923554e1"}, - {file = "pycares-4.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55d39f2c38d1285d1ae248b9d2d965b161dcf51a4b6eacf97ff056da6f09dd30"}, - {file = "pycares-4.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:64261640fd52910e7960f30888abeca4e6a7a91371d351ccebc70ac1625ca74e"}, - {file = "pycares-4.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:72184b1510866c9bc97a6daca7d8218a6954c4a78640197f0840e604ba1182f9"}, - {file = "pycares-4.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02fdf5ce48b21da6eafc5cb4508d344a0d48ac1a31e8df178f7c2fb548fcbc14"}, - {file = "pycares-4.2.2-cp39-cp39-win32.whl", hash = "sha256:fe8e0f8ed7fd795868bfc2211e345963174a9f4d1e2125753e1715a60441c8a0"}, - {file = "pycares-4.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:bb09c084de909206e6db1f014d4c6d662c7df04194df31f4831088d426afe8f1"}, - {file = "pycares-4.2.2.tar.gz", hash = "sha256:e1f57a8004370080694bd6fb969a1ffc9171a59c6824d54f791c1b2e4d298385"}, -] [package.dependencies] cffi = ">=1.5.0" @@ -1407,10 +655,6 @@ description = "Python style guide checker" category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] [[package]] name = "pycparser" @@ -1419,10 +663,6 @@ description = "C parser in Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] [[package]] name = "pydantic" @@ -1431,44 +671,6 @@ description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, - {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, - {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, - {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, - {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, - {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, - {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, - {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, - {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, - {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, -] [package.dependencies] typing-extensions = ">=4.1.0" @@ -1484,10 +686,6 @@ description = "PyDis core provides core functionality and utility to the bots of category = "main" optional = false python-versions = ">=3.10.0,<3.12.0" -files = [ - {file = "pydis_core-9.5.0-py3-none-any.whl", hash = "sha256:35834274a80b86a5426f27cb546b3fada8a5711bbf01bbcf1b0a8860a2afee94"}, - {file = "pydis_core-9.5.0.tar.gz", hash = "sha256:1cf9c223af9b5377e08cc0eb046a12130518e4743afbdd4f052d5556b7dae805"}, -] [package.dependencies] aiodns = "3.0.0" @@ -1500,21 +698,17 @@ async-rediscache = ["async-rediscache[fakeredis] (==1.0.0rc2)"] [[package]] name = "pydocstyle" -version = "6.1.1" +version = "6.3.0" description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, -] [package.dependencies] -snowballstemmer = "*" +snowballstemmer = ">=2.2.0" [package.extras] -toml = ["toml"] +toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" @@ -1523,25 +717,6 @@ description = "passive checker of Python programs" category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyreadline3" @@ -1550,10 +725,6 @@ description = "A python implementation of GNU readline." category = "main" optional = false python-versions = "*" -files = [ - {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, - {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, -] [[package]] name = "pytest" @@ -1562,10 +733,6 @@ description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] [package.dependencies] attrs = ">=19.2.0" @@ -1586,17 +753,13 @@ description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, -] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-subtests" @@ -1605,10 +768,6 @@ description = "unittest subTest() support and subtests fixture" category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pytest-subtests-0.9.0.tar.gz", hash = "sha256:c0317cd5f6a5eb3e957e89dbe4fc3322a9afddba2db8414355ed2a2cb91a844e"}, - {file = "pytest_subtests-0.9.0-py3-none-any.whl", hash = "sha256:f5f616b92c13405909d210569d6d3914db6fe156333ff5426534f97d5b447861"}, -] [package.dependencies] pytest = ">=7.0" @@ -1620,10 +779,6 @@ description = "pytest xdist plugin for distributed testing and loop-on-failing m category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "pytest-xdist-3.0.2.tar.gz", hash = "sha256:688da9b814370e891ba5de650c9327d1a9d861721a524eb917e620eec3e90291"}, - {file = "pytest_xdist-3.0.2-py3-none-any.whl", hash = "sha256:9feb9a18e1790696ea23e1434fa73b325ed4998b0e9fcb221f16fd1945e6df1b"}, -] [package.dependencies] execnet = ">=1.1" @@ -1641,10 +796,6 @@ description = "Extensions to the standard Python datetime module" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] [package.dependencies] six = ">=1.5" @@ -1656,10 +807,6 @@ description = "Read key-value pairs from a .env file and set them as environment category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, - {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, -] [package.extras] cli = ["click (>=5.0)"] @@ -1671,17 +818,13 @@ description = "Parse and manage posts with YAML (or other) frontmatter" category = "main" optional = false python-versions = "*" -files = [ - {file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"}, - {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"}, -] [package.dependencies] PyYAML = "*" [package.extras] docs = ["sphinx"] -test = ["pyaml", "pytest", "toml"] +test = ["pytest", "toml", "pyaml"] [[package]] name = "pyyaml" @@ -1690,7 +833,977 @@ description = "YAML parser and emitter for Python" category = "main" optional = false python-versions = ">=3.6" -files = [ + +[[package]] +name = "rapidfuzz" +version = "2.13.2" +description = "rapid fuzzy string matching" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +full = ["numpy"] + +[[package]] +name = "redis" +version = "4.3.5" +description = "Python client for Redis database and key-value store" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +async-timeout = ">=4.0.2" +packaging = ">=20.4" + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "regex" +version = "2022.10.31" +description = "Alternative regular expression module, to replace re." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.28.2" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-file" +version = "1.5.1" +description = "File transport adapter for Requests" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +requests = ">=1.0.0" +six = "*" + +[[package]] +name = "sentry-sdk" +version = "1.11.1" +description = "Python client for Sentry (https://sentry.io)" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["flask (>=0.11)", "blinker (>=1.1)"] +httpx = ["httpx (>=0.16.0)"] +pure_eval = ["pure-eval", "executing", "asttokens"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["quart (>=0.16.1)", "blinker (>=1.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "sgmllib3k" +version = "1.0.0" +description = "Py3k port of sgmllib." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "soupsieve" +version = "2.4" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "statsd" +version = "4.0.1" +description = "A simple statsd client." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "taskipy" +version = "1.10.3" +description = "tasks runner for python projects" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +colorama = ">=0.4.4,<0.5.0" +mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""} +psutil = ">=5.7.2,<6.0.0" +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""} + +[[package]] +name = "tldextract" +version = "3.4.0" +description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +filelock = ">=3.0.8" +idna = "*" +requests = ">=2.1.0" +requests-file = ">=1.4" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.14" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.20.0" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<4" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx-argparse (>=0.4)", "sphinx (>=6.1.3)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage-enable-subprocess (>=1)", "coverage (>=7.1)", "flaky (>=3.7)", "packaging (>=23)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "pytest (>=7.2.1)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "yarl" +version = "1.8.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "1.1" +python-versions = "3.10.*" +content-hash = "7e51e1e6e1b060e332aaec2c14bdefc6dcb572320daafccd46881c227c58f519" + +[metadata.files] +aiodns = [ + {file = "aiodns-3.0.0-py3-none-any.whl", hash = "sha256:2b19bc5f97e5c936638d28e665923c093d8af2bf3aa88d35c43417fa25d136a2"}, + {file = "aiodns-3.0.0.tar.gz", hash = "sha256:946bdfabe743fceeeb093c8a010f5d1645f708a241be849e17edfb0e49e08cd6"}, +] +aiohttp = [ + {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9"}, + {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e"}, + {file = "aiohttp-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b"}, + {file = "aiohttp-3.8.3-cp310-cp310-win32.whl", hash = "sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb"}, + {file = "aiohttp-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715"}, + {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008"}, + {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d"}, + {file = "aiohttp-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34"}, + {file = "aiohttp-3.8.3-cp311-cp311-win32.whl", hash = "sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697"}, + {file = "aiohttp-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290"}, + {file = "aiohttp-3.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1"}, + {file = "aiohttp-3.8.3-cp36-cp36m-win32.whl", hash = "sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b"}, + {file = "aiohttp-3.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494"}, + {file = "aiohttp-3.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0"}, + {file = "aiohttp-3.8.3-cp37-cp37m-win32.whl", hash = "sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033"}, + {file = "aiohttp-3.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091"}, + {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb"}, + {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4"}, + {file = "aiohttp-3.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937"}, + {file = "aiohttp-3.8.3-cp38-cp38-win32.whl", hash = "sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76"}, + {file = "aiohttp-3.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446"}, + {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06"}, + {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba"}, + {file = "aiohttp-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403"}, + {file = "aiohttp-3.8.3-cp39-cp39-win32.whl", hash = "sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618"}, + {file = "aiohttp-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7"}, + {file = "aiohttp-3.8.3.tar.gz", hash = "sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269"}, +] +aiosignal = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] +arrow = [ + {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, + {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, +] +async-rediscache = [ + {file = "async-rediscache-1.0.0rc2.tar.gz", hash = "sha256:65b1f67df0bd92defe37a3e645ea4c868da29eb41bfa493643a3b4ae7c0e109c"}, + {file = "async_rediscache-1.0.0rc2-py3-none-any.whl", hash = "sha256:b156cc42b3285e1bd620487c594d7238552f95e48dc07b4e5d0b1c095c3acc86"}, +] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] +attrs = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] +certifi = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] +cffi = [] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] +colorama = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +coloredlogs = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] +coverage = [ + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] +deepdiff = [ + {file = "deepdiff-6.2.1-py3-none-any.whl", hash = "sha256:8ba27c185f9197b78c316ce7bb0c743d25d14f7cdb8ec3b340437dbc93dcbff2"}, + {file = "deepdiff-6.2.1.tar.gz", hash = "sha256:3fe134dde5b3922ff8c51fc1e95a972e659c853797231b836a5ccf15532fd516"}, +] +"discord.py" = [ + {file = "discord.py-2.2.0-py3-none-any.whl", hash = "sha256:012e98571af6847467e81f9501bbe4c6ebfe292c842f5ef8e951908839ee1cd0"}, + {file = "discord.py-2.2.0.tar.gz", hash = "sha256:a92d69ab6f982998693d0c371ea19235fa0f9900b50068fc461086d02c33e6bb"}, +] +distlib = [ + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, +] +emoji = [ + {file = "emoji-2.2.0.tar.gz", hash = "sha256:a2986c21e4aba6b9870df40ef487a17be863cb7778dcf1c01e25917b7cd210bb"}, +] +exceptiongroup = [ + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, +] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] +fakeredis = [ + {file = "fakeredis-2.0.0-py3-none-any.whl", hash = "sha256:fb3186cbbe4c549f922b0f08eb84b09c0e51ecf8efbed3572d20544254f93a97"}, + {file = "fakeredis-2.0.0.tar.gz", hash = "sha256:6d1dc2417921b7ce56a80877afa390d6335a3154146f201a86e3a14417bdc79e"}, +] +feedparser = [ + {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"}, + {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"}, +] +filelock = [ + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, +] +flake8 = [ + {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, + {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, +] +flake8-annotations = [ + {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"}, + {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"}, +] +flake8-bugbear = [ + {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, + {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, +] +flake8-docstrings = [ + {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, + {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, +] +flake8-isort = [ + {file = "flake8-isort-5.0.3.tar.gz", hash = "sha256:0951398c343c67f4933407adbbfb495d4df7c038650c5d05753a006efcfeb390"}, + {file = "flake8_isort-5.0.3-py3-none-any.whl", hash = "sha256:8c4ab431d87780d0c8336e9614e50ef11201bc848ef64ca017532dec39d4bf49"}, +] +flake8-string-format = [ + {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, + {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, +] +flake8-tidy-imports = [ + {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, + {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, +] +flake8-todo = [ + {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, +] +frozenlist = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] +humanfriendly = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] +identify = [ + {file = "identify-2.5.18-py2.py3-none-any.whl", hash = "sha256:93aac7ecf2f6abf879b8f29a8002d3c6de7086b8c28d88e1ad15045a15ab63f9"}, + {file = "identify-2.5.18.tar.gz", hash = "sha256:89e144fa560cc4cffb6ef2ab5e9fb18ed9f9b3cb054384bab4b95c12f6c309fe"}, +] +idna = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] +iniconfig = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] +isort = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] +lupa = [ + {file = "lupa-1.14.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:20b486cda76ff141cfb5f28df9c757224c9ed91e78c5242d402d2e9cb699d464"}, + {file = "lupa-1.14.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c685143b18c79a3a1fa25a4cc774a87b5a61c606f249bcf824d125d8accb6b2c"}, + {file = "lupa-1.14.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3865f9dbe9a84bd6a471250e52068aaf1147f206a51905fb6d93e1db9efb00ee"}, + {file = "lupa-1.14.1-cp27-cp27m-win32.whl", hash = "sha256:2dacdddd5e28c6f5fd96a46c868ec5c34b0fad1ec7235b5bbb56f06183a37f20"}, + {file = "lupa-1.14.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e754cbc6cacc9bca6ff2b39025e9659a2098420639d214054b06b466825f4470"}, + {file = "lupa-1.14.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e36f3eb70705841bce9c15e12bc6fc3b2f4f68a41ba0e4af303b22fc4d8667c"}, + {file = "lupa-1.14.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0aac06098d46729edd2d04e80b55d9d310e902f042f27521308df77cb1ba0191"}, + {file = "lupa-1.14.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:9706a192339efa1a6b7d806389572a669dd9ae2250469ff1ce13f684085af0b4"}, + {file = "lupa-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d688a35f7fe614720ed7b820cbb739b37eff577a764c2003e229c2a752201cea"}, + {file = "lupa-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:36d888bd42589ecad21a5fb957b46bc799640d18eff2fd0c47a79ffb4a1b286c"}, + {file = "lupa-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:0423acd739cf25dbdbf1e33a0aa8026f35e1edea0573db63d156f14a082d77c8"}, + {file = "lupa-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7068ae0d6a1a35ea8718ef6e103955c1ee143181bf0684604a76acc67f69de55"}, + {file = "lupa-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5fef8b755591f0466438ad0a3e92ecb21dd6bb1f05d0215139b6ff8c87b2ce65"}, + {file = "lupa-1.14.1-cp310-cp310-win32.whl", hash = "sha256:4a44e1fd0e9f4a546fbddd2e0fd913c823c9ac58a5f3160fb4f9109f633cb027"}, + {file = "lupa-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:b83100cd7b48a7ca85dda4e9a6a5e7bc3312691e7f94c6a78d1f9a48a86a7fec"}, + {file = "lupa-1.14.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:1b8bda50c61c98ff9bb41d1f4934640c323e9f1539021810016a2eae25a66c3d"}, + {file = "lupa-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa1449aa1ab46c557344867496dee324b47ede0c41643df8f392b00262d21b12"}, + {file = "lupa-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a17ebf91b3aa1c5c36661e34c9cf10e04bb4cc00076e8b966f86749647162050"}, + {file = "lupa-1.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:b1d9cfa469e7a2ad7e9a00fea7196b0022aa52f43a2043c2e0be92122e7bcfe8"}, + {file = "lupa-1.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bc4f5e84aee0d567aa2e116ff6844d06086ef7404d5102807e59af5ce9daf3c0"}, + {file = "lupa-1.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:40cf2eb90087dfe8ee002740469f2c4c5230d5e7d10ffb676602066d2f9b1ac9"}, + {file = "lupa-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:63a27c38295aa971730795941270fff2ce65576f68ec63cb3ecb90d7a4526d03"}, + {file = "lupa-1.14.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:457330e7a5456c4415fc6d38822036bd4cff214f9d8f7906200f6b588f1b2932"}, + {file = "lupa-1.14.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d61fb507a36e18dc68f2d9e9e2ea19e1114b1a5e578a36f18e9be7a17d2931d1"}, + {file = "lupa-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:f26b73d10130ad73e07d45dfe9b7c3833e3a2aa1871a4ecf5ce2dc1abeeae74d"}, + {file = "lupa-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:297d801ba8e4e882b295c25d92f1634dde5e76d07ec6c35b13882401248c485d"}, + {file = "lupa-1.14.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c8bddd22eaeea0ce9d302b390d8bc606f003bf6c51be68e8b007504433b91280"}, + {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1661c890861cf0f7002d7a7e00f50c885577954c2d85a7173b218d3228fa3869"}, + {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:2ee480d31555f00f8bf97dd949c596508bd60264cff1921a3797a03dd369e8cd"}, + {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:1ff93560c2546d7627ab2f95b5e88f000705db70a3d6041ac29d050f094f2a35"}, + {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:47f1459e2c98480c291ae3b70688d762f82dbb197ef121d529aa2c4e8bab1ba3"}, + {file = "lupa-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8986dba002346505ee44c78303339c97a346b883015d5cf3aaa0d76d3b952744"}, + {file = "lupa-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8912459fddf691e70f2add799a128822bae725826cfb86f69720a38bdfa42410"}, + {file = "lupa-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:9b9d1b98391959ae531bbb8df7559ac2c408fcbd33721921b6a05fd6414161e0"}, + {file = "lupa-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:61ff409040fa3a6c358b7274c10e556ba22afeb3470f8d23cd0a6bf418fb30c9"}, + {file = "lupa-1.14.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:350ba2218eea800898854b02753dc0c9cfe83db315b30c0dc10ab17493f0321a"}, + {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:46dcbc0eae63899468686bb1dfc2fe4ed21fe06f69416113f039d88aab18f5dc"}, + {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7ad96923e2092d8edbf0c1b274f9b522690b932ed47a70d9a0c1c329f169f107"}, + {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:364b291bf2b55555c87b4bffb4db5a9619bcdb3c02e58aebde5319c3c59ec9b2"}, + {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ed071efc8ee231fac1fcd6b6fce44dc6da75a352b9b78403af89a48d759743c"}, + {file = "lupa-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bce60847bebb4aa9ed3436fab3e84585e9094e15e1cb8d32e16e041c4ef65331"}, + {file = "lupa-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5fbe7f83b0007cda3b158a93726c80dfd39003a8c5c5d608f6fdf8c60c42117f"}, + {file = "lupa-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4bd789967cbb5c84470f358c7fa8fcbf7464185adbd872a6c3de9b42d29a6d26"}, + {file = "lupa-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:ca58da94a6495dda0063ba975fe2e6f722c5e84c94f09955671b279c41cfde96"}, + {file = "lupa-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:51d6965663b2be1a593beabfa10803fdbbcf0b293aa4a53ea09a23db89787d0d"}, + {file = "lupa-1.14.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d251ba009996a47231615ea6b78123c88446979ae99b5585269ec46f7a9197aa"}, + {file = "lupa-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:abe3fc103d7bd34e7028d06db557304979f13ebf9050ad0ea6c1cc3a1caea017"}, + {file = "lupa-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:4ea185c394bf7d07e9643d868e50cc94a530bb298d4bdae4915672b3809cc72b"}, + {file = "lupa-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:6aff7257b5953de620db489899406cddb22093d1124fc5b31f8900e44a9dbc2a"}, + {file = "lupa-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d6f5bfbd8fc48c27786aef8f30c84fd9197747fa0b53761e69eb968d81156cbf"}, + {file = "lupa-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dec7580b86975bc5bdf4cc54638c93daaec10143b4acc4a6c674c0f7e27dd363"}, + {file = "lupa-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:96a201537930813b34145daf337dcd934ddfaebeba6452caf8a32a418e145e82"}, + {file = "lupa-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c0efaae8e7276f4feb82cba43c3cd45c82db820c9dab3965a8f2e0cb8b0bc30b"}, + {file = "lupa-1.14.1-cp38-cp38-win32.whl", hash = "sha256:b6953854a343abdfe11aa52a2d021fadf3d77d0cd2b288b650f149b597e0d02d"}, + {file = "lupa-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:c79ced2aaf7577e3d06933cf0d323fa968e6864c498c376b0bd475ded86f01f3"}, + {file = "lupa-1.14.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:72589a21a3776c7dd4b05374780e7ecf1b49c490056077fc91486461935eaaa3"}, + {file = "lupa-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:30d356a433653b53f1fe29477faaf5e547b61953b971b010d2185a561f4ce82a"}, + {file = "lupa-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:2116eb467797d5a134b2c997dfc7974b9a84b3aa5776c17ba8578ed4f5f41a9b"}, + {file = "lupa-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:24d6c3435d38614083d197f3e7bcfe6d3d9eb02ee393d60a4ab9c719bc000162"}, + {file = "lupa-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9144ecfa5e363f03e4d1c1e678b081cd223438be08f96604fca478591c3e3b53"}, + {file = "lupa-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69be1d6c3f3ab9fc988c9a0e5801f23f68e2c8b5900a8fd3ae57d1d0e9c5539c"}, + {file = "lupa-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77b587043d0bee9cc738e00c12718095cf808dd269b171f852bd82026c664c69"}, + {file = "lupa-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:62530cf0a9c749a3cd13ad92b31eaf178939d642b6176b46cfcd98f6c5006383"}, + {file = "lupa-1.14.1-cp39-cp39-win32.whl", hash = "sha256:d891b43b8810191eb4c42a0bc57c32f481098029aac42b176108e09ffe118cdc"}, + {file = "lupa-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:cf643bc48a152e2c572d8be7fc1de1c417a6a9648d337ffedebf00f57016b786"}, + {file = "lupa-1.14.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0ac862c6d2eb542ac70d294a8e960b9ae7f46297559733b4c25f9e3c945e522a"}, + {file = "lupa-1.14.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:0a15680f425b91ec220eb84b0ab59d24c4bee69d15b88245a6998a7d38c78ba6"}, + {file = "lupa-1.14.1-pp37-pypy37_pp73-win32.whl", hash = "sha256:8a064d72991ba53aeea9720d95f2055f7f8a1e2f35b32a35d92248b63a94bcd1"}, + {file = "lupa-1.14.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6d87d6c51e6c3b6326d18af83e81f4860ba0b287cda1101b1ab8562389d598f5"}, + {file = "lupa-1.14.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b3efe9d887cfdf459054308ecb716e0eb11acb9a96c3022ee4e677c1f510d244"}, + {file = "lupa-1.14.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:723fff6fcab5e7045e0fa79014729577f98082bd1fd1050f907f83a41e4c9865"}, + {file = "lupa-1.14.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:930092a27157241d07d6d09ff01d5530a9e4c0dd515228211f2902b7e88ec1f0"}, + {file = "lupa-1.14.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7f6bc9852bdf7b16840c984a1e9f952815f7d4b3764585d20d2e062bd1128074"}, + {file = "lupa-1.14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:8f65d2007092a04616c215fea5ad05ba8f661bd0f45cde5265d27150f64d3dd8"}, + {file = "lupa-1.14.1.tar.gz", hash = "sha256:d0fd4e60ad149fe25c90530e2a0e032a42a6f0455f29ca0edb8170d6ec751c6e"}, +] +lxml = [] +markdownify = [ + {file = "markdownify-0.6.1-py3-none-any.whl", hash = "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"}, + {file = "markdownify-0.6.1.tar.gz", hash = "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d"}, +] +mccabe = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] +more-itertools = [ + {file = "more-itertools-9.0.0.tar.gz", hash = "sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab"}, + {file = "more_itertools-9.0.0-py3-none-any.whl", hash = "sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41"}, +] +mslex = [ + {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, + {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"}, +] +multidict = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] +nodeenv = [] +ordered-set = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] +packaging = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] +pep8-naming = [ + {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, + {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, +] +pip-licenses = [ + {file = "pip-licenses-4.0.1.tar.gz", hash = "sha256:05a180f5610b262e2d56eea99f04e380db7080e79655abf1c916125f39fe207d"}, + {file = "pip_licenses-4.0.1-py3-none-any.whl", hash = "sha256:5896c18b7897e38fdd7be9a9ea0de02d6ff3264b7411967d6b679019ddc31878"}, +] +platformdirs = [ + {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, + {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +pre-commit = [ + {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, + {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, +] +prettytable = [ + {file = "prettytable-3.6.0-py3-none-any.whl", hash = "sha256:3b767129491767a3a5108e6f305cbaa650f8020a7db5dfe994a2df7ef7bad0fe"}, + {file = "prettytable-3.6.0.tar.gz", hash = "sha256:2e0026af955b4ea67b22122f310b90eae890738c08cb0458693a49b6221530ac"}, +] +psutil = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] +pycares = [ + {file = "pycares-4.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:19c9cdd3322d422931982939773e453e491dfc5c0b2e23d7266959315c7a0824"}, + {file = "pycares-4.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e56e9cdf46a092970dc4b75bbabddea9f480be5eeadc3fcae3eb5c6807c4136"}, + {file = "pycares-4.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c75a6241c79b935048272cb77df498da64b8defc8c4b29fdf9870e43ba4cbb4"}, + {file = "pycares-4.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24d8654fac3742791b8bef59d1fbb3e19ae6a5c48876a6d98659f7c66ee546c4"}, + {file = "pycares-4.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebf50b049a245880f1aa16a6f72c4408e0a65b49ea1d3bf13383a44a2cabd2bf"}, + {file = "pycares-4.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84daf560962763c0359fd79c750ef480f0fda40c08b57765088dbe362e8dc452"}, + {file = "pycares-4.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:978d10da7ee74b9979c494afa8b646411119ad0186a29c7f13c72bb4295630c6"}, + {file = "pycares-4.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c5b9d7fe52eb3d243f5ead58d5c0011884226d961df8360a34618c38c7515"}, + {file = "pycares-4.3.0-cp310-cp310-win32.whl", hash = "sha256:da7c7089ae617317d2cbe38baefd3821387b3bfef7b3ee5b797b871cb1257974"}, + {file = "pycares-4.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7106dc683db30e1d851283b7b9df7a5ea4964d6bdd000d918d91d4b1f9bed329"}, + {file = "pycares-4.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4e7a24ecef0b1933f2a3fdbf328d1b529a76cda113f8364fa0742e5b3bd76566"}, + {file = "pycares-4.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7abccc2aa4771c06994e4d9ed596453061e2b8846f887d9c98a64ccdaf4790a"}, + {file = "pycares-4.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531fed46c5ed798a914c3207be4ae7b297c4d09e4183d3cf8fd9ee59a55d5080"}, + {file = "pycares-4.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c9335175af0c64a1e0ba67bdd349eb62d4eea0ad02c235ccdf0d535fd20f323"}, + {file = "pycares-4.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f0e95535027d2dcd51e780410632b0d3ed7e9e5ceb25dc0fe937f2c2960079"}, + {file = "pycares-4.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3692179ce5fb96908ba342e1e5303608d0c976f0d5d4619fa9d3d6d9d5a9a1b4"}, + {file = "pycares-4.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c4cb6cc7fe8e0606d30b60367f59fe26d1472e88555d61e202db70dea5c8edb"}, + {file = "pycares-4.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3215445396c74103e2054e6b349d9e85883ceda2006d0039fc2d58c9b11818a2"}, + {file = "pycares-4.3.0-cp311-cp311-win32.whl", hash = "sha256:6a0c0c3a0adf490bba9dbb37dbd07ec81e4a6584f095036ac34f06a633710ffe"}, + {file = "pycares-4.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:995cb37cc39bd40ca87bb16555a0f7724f3be30d9f9059a4caab2fde45b1b903"}, + {file = "pycares-4.3.0-cp36-cp36m-win32.whl", hash = "sha256:4c9187be72449c975c11daa1d94d7ddcc494f8a4c37a6c18f977cd7024a531d9"}, + {file = "pycares-4.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d7405ba10a2903a58b8b0faedcb54994c9ee002ad01963587fabf93e7e479783"}, + {file = "pycares-4.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40aaa12081495f879f11f4cfc95edfec1ea14711188563102f9e33fe98728fac"}, + {file = "pycares-4.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4972cac24b66c5997f3a3e2cb608e408066d80103d443e36d626a88a287b9ae7"}, + {file = "pycares-4.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35886dba7aa5b73affca8729aeb5a1f5e94d3d9a764adb1b7e75bafca44eeca5"}, + {file = "pycares-4.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cea6e1f3be016f155d60f27f16c1074d58b4d6e123228fdbc3326d076016af8"}, + {file = "pycares-4.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3a9fd2665b053afb39226ac6f8137a60910ca7729358456df2fb94866f4297de"}, + {file = "pycares-4.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e8e9195f869120e44e0aa0a6098bb5c19947f4753054365891f592e6f9eab3ef"}, + {file = "pycares-4.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:674486ecf2afb25ee219171b07cdaba481a1aaa2dabb155779c7be9ded03eaa9"}, + {file = "pycares-4.3.0-cp37-cp37m-win32.whl", hash = "sha256:1b6cd3161851499b6894d1e23bfd633e7b775472f5af35ae35409c4a47a2d45e"}, + {file = "pycares-4.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:710120c97b9afdba443564350c3f5f72fd9aae74d95b73dc062ca8ac3d7f36d7"}, + {file = "pycares-4.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9103649bd29d84bc6bcfaf09def9c0592bbc766018fad19d76d09989608b915d"}, + {file = "pycares-4.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c072dbaf73cb5434279578dc35322867d8d5df053e14fdcdcc589994ba4804ae"}, + {file = "pycares-4.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008531733f9c7a976b59c7760a3672b191159fd69ae76c01ca051f20b5e44164"}, + {file = "pycares-4.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2aae02d97d77dcff840ab55f86cb8b99bf644acbca17e1edb7048408b9782088"}, + {file = "pycares-4.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:257953ae6d400a934fd9193aeb20990ac84a78648bdf5978e998bd007a4045cd"}, + {file = "pycares-4.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c28d481efae26936ec08cb6beea305f4b145503b152cf2c4dc68cc4ad9644f0e"}, + {file = "pycares-4.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:976249b39037dbfb709ccf7e1c40d2785905a0065536385d501b94570cfed96d"}, + {file = "pycares-4.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:98568c30cfab6b327d94ae1acdf85bbba4cffd415980804985d34ca07e6f4791"}, + {file = "pycares-4.3.0-cp38-cp38-win32.whl", hash = "sha256:a2f3c4f49f43162f7e684419d9834c2c8ec165e54cb8dc47aa9dc0c2132701c0"}, + {file = "pycares-4.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:1730ef93e33e4682fbbf0e7fb19df2ed9822779d17de8ea6e20d5b0d71c1d2be"}, + {file = "pycares-4.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a26b3f1684557025da26ce65d076619890c82b95e38cc7284ce51c3539a1ce8"}, + {file = "pycares-4.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:86112cce01655b9f63c5e53b74722084e88e784a7a8ad138d373440337c591c9"}, + {file = "pycares-4.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01465a191dc78e923884bb45cd63c7e012623e520cf7ed67e542413ee334804"}, + {file = "pycares-4.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9fd5d6012f3ee8c8038cbfe16e988bbd17b2f21eea86650874bf63757ee6161"}, + {file = "pycares-4.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa36b8ea91eae20b5c7205f3e6654423f066af24a1df02b274770a96cbcafaa7"}, + {file = "pycares-4.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:61019151130557c1788cae52e4f2f388a7520c9d92574f3a0d61c974c6740db0"}, + {file = "pycares-4.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:231962bb46274c52632469a1e686fab065dbd106dbef586de4f7fb101e297587"}, + {file = "pycares-4.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6c979512fa51c7ccef5204fe10ed4e5c44c2bce5f335fe98a3e423f1672bd7d4"}, + {file = "pycares-4.3.0-cp39-cp39-win32.whl", hash = "sha256:655cf0df862ce3847a60e1a106dafa2ba2c14e6636bac49e874347acdc7312dc"}, + {file = "pycares-4.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:36f2251ad0f99a5ce13df45c94c3161d9734c9e9fa2b9b4cc163b853ca170dc5"}, + {file = "pycares-4.3.0.tar.gz", hash = "sha256:c542696f6dac978e9d99192384745a65f80a7d9450501151e4a7563e06010d45"}, +] +pycodestyle = [ + {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, + {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pydantic = [ + {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, + {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, + {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, + {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, + {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, + {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, + {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, + {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, + {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, +] +pydis-core = [ + {file = "pydis_core-9.5.0-py3-none-any.whl", hash = "sha256:35834274a80b86a5426f27cb546b3fada8a5711bbf01bbcf1b0a8860a2afee94"}, + {file = "pydis_core-9.5.0.tar.gz", hash = "sha256:1cf9c223af9b5377e08cc0eb046a12130518e4743afbdd4f052d5556b7dae805"}, +] +pydocstyle = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] +pyflakes = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] +pyreadline3 = [ + {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, + {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, +] +pytest = [ + {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, + {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, +] +pytest-cov = [ + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, +] +pytest-subtests = [ + {file = "pytest-subtests-0.9.0.tar.gz", hash = "sha256:c0317cd5f6a5eb3e957e89dbe4fc3322a9afddba2db8414355ed2a2cb91a844e"}, + {file = "pytest_subtests-0.9.0-py3-none-any.whl", hash = "sha256:f5f616b92c13405909d210569d6d3914db6fe156333ff5426534f97d5b447861"}, +] +pytest-xdist = [ + {file = "pytest-xdist-3.0.2.tar.gz", hash = "sha256:688da9b814370e891ba5de650c9327d1a9d861721a524eb917e620eec3e90291"}, + {file = "pytest_xdist-3.0.2-py3-none-any.whl", hash = "sha256:9feb9a18e1790696ea23e1434fa73b325ed4998b0e9fcb221f16fd1945e6df1b"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-dotenv = [ + {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, + {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, +] +python-frontmatter = [ + {file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"}, + {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"}, +] +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -1698,13 +1811,6 @@ files = [ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, @@ -1732,15 +1838,7 @@ files = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] - -[[package]] -name = "rapidfuzz" -version = "2.13.2" -description = "rapid fuzzy string matching" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +rapidfuzz = [ {file = "rapidfuzz-2.13.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91c049f7591d9e9f8bcc3c556c0c4b448223f564ad04511a8719d28f5d38daed"}, {file = "rapidfuzz-2.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:26e4b7f7941b92546a9b06ed75b40b5d7ceace8f3074d06cb3369349388d700d"}, {file = "rapidfuzz-2.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba2a8fbd21079093118c40e8e80068750c1619a5988e54220ea0929de48e7d65"}, @@ -1831,38 +1929,11 @@ files = [ {file = "rapidfuzz-2.13.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ac6a8a34f858f3862798383f51012788df6be823e2874fa426667a4da94ded7e"}, {file = "rapidfuzz-2.13.2.tar.gz", hash = "sha256:1c67007161655c59e13bba130a2db29d7c9e5c81bcecb8846a3dd7386065eb24"}, ] - -[package.extras] -full = ["numpy"] - -[[package]] -name = "redis" -version = "4.3.5" -description = "Python client for Redis database and key-value store" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +redis = [ {file = "redis-4.3.5-py3-none-any.whl", hash = "sha256:46652271dc7525cd5a9667e5b0ca983c848c75b2b8f7425403395bb8379dcf25"}, {file = "redis-4.3.5.tar.gz", hash = "sha256:30c07511627a4c5c4d970e060000772f323174f75e745a26938319817ead7a12"}, ] - -[package.dependencies] -async-timeout = ">=4.0.2" -packaging = ">=20.4" - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "regex" -version = "2022.10.31" -description = "Alternative regular expression module, to replace re." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +regex = [ {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, @@ -1952,366 +2023,146 @@ files = [ {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, ] - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" -files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +requests = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, ] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-file" -version = "1.5.1" -description = "File transport adapter for Requests" -category = "main" -optional = false -python-versions = "*" -files = [ +requests-file = [ {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, ] - -[package.dependencies] -requests = ">=1.0.0" -six = "*" - -[[package]] -name = "sentry-sdk" -version = "1.11.1" -description = "Python client for Sentry (https://sentry.io)" -category = "main" -optional = false -python-versions = "*" -files = [ +sentry-sdk = [ {file = "sentry-sdk-1.11.1.tar.gz", hash = "sha256:675f6279b6bb1fea09fd61751061f9a90dca3b5929ef631dd50dc8b3aeb245e9"}, {file = "sentry_sdk-1.11.1-py2.py3-none-any.whl", hash = "sha256:8b4ff696c0bdcceb3f70bbb87a57ba84fd3168b1332d493fcd16c137f709578c"}, ] - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -chalice = ["chalice (>=1.16.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] -httpx = ["httpx (>=0.16.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -tornado = ["tornado (>=5)"] - -[[package]] -name = "setuptools" -version = "65.6.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "sgmllib3k" -version = "1.0.0" -description = "Py3k port of sgmllib." -category = "main" -optional = false -python-versions = "*" -files = [ +sgmllib3k = [ {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, ] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" -files = [ +snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "main" -optional = false -python-versions = "*" -files = [ +sortedcontainers = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] - -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, +soupsieve = [ + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, ] - -[[package]] -name = "statsd" -version = "4.0.1" -description = "A simple statsd client." -category = "main" -optional = false -python-versions = "*" -files = [ +statsd = [ {file = "statsd-4.0.1-py2.py3-none-any.whl", hash = "sha256:c2676519927f7afade3723aca9ca8ea986ef5b059556a980a867721ca69df093"}, {file = "statsd-4.0.1.tar.gz", hash = "sha256:99763da81bfea8daf6b3d22d11aaccb01a8d0f52ea521daab37e758a4ca7d128"}, ] - -[[package]] -name = "taskipy" -version = "1.10.3" -description = "tasks runner for python projects" -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" -files = [ +taskipy = [ {file = "taskipy-1.10.3-py3-none-any.whl", hash = "sha256:4c0070ca53868d97989f7ab5c6f237525d52ee184f9b967576e8fe427ed9d0b8"}, {file = "taskipy-1.10.3.tar.gz", hash = "sha256:112beaf21e3d5569950b99162a1de003fa885fabee9e450757a6b874be914877"}, ] - -[package.dependencies] -colorama = ">=0.4.4,<0.5.0" -mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""} -psutil = ">=5.7.2,<6.0.0" -tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""} - -[[package]] -name = "tldextract" -version = "3.4.0" -description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +tldextract = [ {file = "tldextract-3.4.0-py3-none-any.whl", hash = "sha256:47aa4d8f1a4da79a44529c9a2ddc518663b25d371b805194ec5ce2a5f615ccd2"}, {file = "tldextract-3.4.0.tar.gz", hash = "sha256:78aef13ac1459d519b457a03f1f74c1bf1c2808122a6bcc0e6840f81ba55ad73"}, ] - -[package.dependencies] -filelock = ">=3.0.8" -idna = "*" -requests = ">=2.1.0" -requests-file = ">=1.4" - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, +typing-extensions = [ + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, +] +urllib3 = [ + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, +] +virtualenv = [ + {file = "virtualenv-20.20.0-py3-none-any.whl", hash = "sha256:3c22fa5a7c7aa106ced59934d2c20a2ecb7f49b4130b8bf444178a16b880fa45"}, + {file = "virtualenv-20.20.0.tar.gz", hash = "sha256:a8a4b8ca1e28f864b7514a253f98c1d62b64e31e77325ba279248c65fb4fcef4"}, +] +wcwidth = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] +yarl = [ + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, + {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, + {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, + {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, + {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, + {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, + {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, + {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, + {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, + {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, + {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, + {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, + {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, ] - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "virtualenv" -version = "20.16.7" -description = "Virtual Python Environment builder" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "virtualenv-20.16.7-py3-none-any.whl", hash = "sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29"}, - {file = "virtualenv-20.16.7.tar.gz", hash = "sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e"}, -] - -[package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" - -[package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] - -[[package]] -name = "yarl" -version = "1.8.1" -description = "Yet another URL library" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:abc06b97407868ef38f3d172762f4069323de52f2b70d133d096a48d72215d28"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07b21e274de4c637f3e3b7104694e53260b5fc10d51fb3ec5fed1da8e0f754e3"}, - {file = "yarl-1.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9de955d98e02fab288c7718662afb33aab64212ecb368c5dc866d9a57bf48880"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ec362167e2c9fd178f82f252b6d97669d7245695dc057ee182118042026da40"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20df6ff4089bc86e4a66e3b1380460f864df3dd9dccaf88d6b3385d24405893b"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5999c4662631cb798496535afbd837a102859568adc67d75d2045e31ec3ac497"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed19b74e81b10b592084a5ad1e70f845f0aacb57577018d31de064e71ffa267a"}, - {file = "yarl-1.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4808f996ca39a6463f45182e2af2fae55e2560be586d447ce8016f389f626f"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2d800b9c2eaf0684c08be5f50e52bfa2aa920e7163c2ea43f4f431e829b4f0fd"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6628d750041550c5d9da50bb40b5cf28a2e63b9388bac10fedd4f19236ef4957"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f5af52738e225fcc526ae64071b7e5342abe03f42e0e8918227b38c9aa711e28"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:76577f13333b4fe345c3704811ac7509b31499132ff0181f25ee26619de2c843"}, - {file = "yarl-1.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c03f456522d1ec815893d85fccb5def01ffaa74c1b16ff30f8aaa03eb21e453"}, - {file = "yarl-1.8.1-cp310-cp310-win32.whl", hash = "sha256:ea30a42dc94d42f2ba4d0f7c0ffb4f4f9baa1b23045910c0c32df9c9902cb272"}, - {file = "yarl-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:9130ddf1ae9978abe63808b6b60a897e41fccb834408cde79522feb37fb72fb0"}, - {file = "yarl-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0ab5a138211c1c366404d912824bdcf5545ccba5b3ff52c42c4af4cbdc2c5035"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0fb2cb4204ddb456a8e32381f9a90000429489a25f64e817e6ff94879d432fc"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85cba594433915d5c9a0d14b24cfba0339f57a2fff203a5d4fd070e593307d0b"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca7e596c55bd675432b11320b4eacc62310c2145d6801a1f8e9ad160685a231"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f77539733e0ec2475ddcd4e26777d08996f8cd55d2aef82ec4d3896687abda"}, - {file = "yarl-1.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29e256649f42771829974e742061c3501cc50cf16e63f91ed8d1bf98242e5507"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7fce6cbc6c170ede0221cc8c91b285f7f3c8b9fe28283b51885ff621bbe0f8ee"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:59ddd85a1214862ce7c7c66457f05543b6a275b70a65de366030d56159a979f0"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:12768232751689c1a89b0376a96a32bc7633c08da45ad985d0c49ede691f5c0d"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:b19255dde4b4f4c32e012038f2c169bb72e7f081552bea4641cab4d88bc409dd"}, - {file = "yarl-1.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6c8148e0b52bf9535c40c48faebb00cb294ee577ca069d21bd5c48d302a83780"}, - {file = "yarl-1.8.1-cp37-cp37m-win32.whl", hash = "sha256:de839c3a1826a909fdbfe05f6fe2167c4ab033f1133757b5936efe2f84904c07"}, - {file = "yarl-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:dd032e8422a52e5a4860e062eb84ac94ea08861d334a4bcaf142a63ce8ad4802"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19cd801d6f983918a3f3a39f3a45b553c015c5aac92ccd1fac619bd74beece4a"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6347f1a58e658b97b0a0d1ff7658a03cb79bdbda0331603bed24dd7054a6dea1"}, - {file = "yarl-1.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c0da7e44d0c9108d8b98469338705e07f4bb7dab96dbd8fa4e91b337db42548"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5587bba41399854703212b87071c6d8638fa6e61656385875f8c6dff92b2e461"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31a9a04ecccd6b03e2b0e12e82131f1488dea5555a13a4d32f064e22a6003cfe"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205904cffd69ae972a1707a1bd3ea7cded594b1d773a0ce66714edf17833cdae"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea513a25976d21733bff523e0ca836ef1679630ef4ad22d46987d04b372d57fc"}, - {file = "yarl-1.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b51530877d3ad7a8d47b2fff0c8df3b8f3b8deddf057379ba50b13df2a5eae"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2b8f245dad9e331540c350285910b20dd913dc86d4ee410c11d48523c4fd546"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab2a60d57ca88e1d4ca34a10e9fb4ab2ac5ad315543351de3a612bbb0560bead"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:449c957ffc6bc2309e1fbe67ab7d2c1efca89d3f4912baeb8ead207bb3cc1cd4"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a165442348c211b5dea67c0206fc61366212d7082ba8118c8c5c1c853ea4d82e"}, - {file = "yarl-1.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b3ded839a5c5608eec8b6f9ae9a62cb22cd037ea97c627f38ae0841a48f09eae"}, - {file = "yarl-1.8.1-cp38-cp38-win32.whl", hash = "sha256:c1445a0c562ed561d06d8cbc5c8916c6008a31c60bc3655cdd2de1d3bf5174a0"}, - {file = "yarl-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:56c11efb0a89700987d05597b08a1efcd78d74c52febe530126785e1b1a285f4"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e80ed5a9939ceb6fda42811542f31c8602be336b1fb977bccb012e83da7e4936"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6afb336e23a793cd3b6476c30f030a0d4c7539cd81649683b5e0c1b0ab0bf350"}, - {file = "yarl-1.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c322cbaa4ed78a8aac89b2174a6df398faf50e5fc12c4c191c40c59d5e28357"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fae37373155f5ef9b403ab48af5136ae9851151f7aacd9926251ab26b953118b"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5395da939ffa959974577eff2cbfc24b004a2fb6c346918f39966a5786874e54"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:076eede537ab978b605f41db79a56cad2e7efeea2aa6e0fa8f05a26c24a034fb"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1a50e461615747dd93c099f297c1994d472b0f4d2db8a64e55b1edf704ec1c"}, - {file = "yarl-1.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7de89c8456525650ffa2bb56a3eee6af891e98f498babd43ae307bd42dca98f6"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a88510731cd8d4befaba5fbd734a7dd914de5ab8132a5b3dde0bbd6c9476c64"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d93a049d29df172f48bcb09acf9226318e712ce67374f893b460b42cc1380ae"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:21ac44b763e0eec15746a3d440f5e09ad2ecc8b5f6dcd3ea8cb4773d6d4703e3"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0272228fabe78ce00a3365ffffd6f643f57a91043e119c289aaba202f4095b0"}, - {file = "yarl-1.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99449cd5366fe4608e7226c6cae80873296dfa0cde45d9b498fefa1de315a09e"}, - {file = "yarl-1.8.1-cp39-cp39-win32.whl", hash = "sha256:8b0af1cf36b93cee99a31a545fe91d08223e64390c5ecc5e94c39511832a4bb6"}, - {file = "yarl-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:de49d77e968de6626ba7ef4472323f9d2e5a56c1d85b7c0e2a190b2173d3b9be"}, - {file = "yarl-1.8.1.tar.gz", hash = "sha256:af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[metadata] -lock-version = "2.0" -python-versions = "3.10.*" -content-hash = "95d9251abd0a6125c99cc0c290fd8f97e9003cfe95cde3666ccc1817f751e814" diff --git a/pyproject.toml b/pyproject.toml index 13efe1f34..9e1d1d5d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,8 +26,8 @@ lxml = "4.9.1" # Must be kept on this version unless doc command output is fixed # See https://github.com/python-discord/bot/pull/2156 markdownify = "0.6.1" - -more-itertools = "9.0.0" +more_itertools = "9.0.0" +pydantic = "1.10.2" python-dateutil = "2.8.2" python-frontmatter = "1.0.0" pyyaml = "6.0" @@ -35,7 +35,6 @@ rapidfuzz = "2.13.2" regex = "2022.10.31" sentry-sdk = "1.11.1" tldextract = "3.4.0" -pydantic = "1.10.2" [tool.poetry.dev-dependencies] coverage = "6.5.0" diff --git a/tests/bot/exts/filtering/__init__.py b/tests/bot/exts/filtering/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/filtering/__init__.py diff --git a/tests/bot/exts/filtering/test_discord_token_filter.py b/tests/bot/exts/filtering/test_discord_token_filter.py new file mode 100644 index 000000000..ef124e6ff --- /dev/null +++ b/tests/bot/exts/filtering/test_discord_token_filter.py @@ -0,0 +1,276 @@ +import unittest +from re import Match +from unittest import mock +from unittest.mock import MagicMock, patch + +import arrow + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.unique import discord_token +from bot.exts.filtering._filters.unique.discord_token import DiscordTokenFilter, Token +from tests.helpers import MockBot, MockMember, MockMessage, MockTextChannel, autospec + + +class DiscordTokenFilterTests(unittest.IsolatedAsyncioTestCase): + """Tests the DiscordTokenFilter class.""" + + def setUp(self): + """Adds the filter, a bot, and a message to the instance for usage in tests.""" + now = arrow.utcnow().timestamp() + self.filter = DiscordTokenFilter({ + "id": 1, + "content": "discord_token", + "description": None, + "settings": {}, + "additional_field": "{}", # noqa: P103 + "created_at": now, + "updated_at": now + }) + + self.msg = MockMessage(id=555, content="hello world") + self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) + + member = MockMember(id=123) + channel = MockTextChannel(id=345) + self.ctx = FilterContext(Event.MESSAGE, member, channel, "", self.msg) + + def test_extract_user_id_valid(self): + """Should consider user IDs valid if they decode into an integer ID.""" + id_pairs = ( + ("NDcyMjY1OTQzMDYyNDEzMzMy", 472265943062413332), + ("NDc1MDczNjI5Mzk5NTQ3OTA0", 475073629399547904), + ("NDY3MjIzMjMwNjUwNzc3NjQx", 467223230650777641), + ) + + for token_id, user_id in id_pairs: + with self.subTest(token_id=token_id): + result = DiscordTokenFilter.extract_user_id(token_id) + self.assertEqual(result, user_id) + + def test_extract_user_id_invalid(self): + """Should consider non-digit and non-ASCII IDs invalid.""" + ids = ( + ("SGVsbG8gd29ybGQ", "non-digit ASCII"), + ("0J_RgNC40LLQtdGCINC80LjRgA", "cyrillic text"), + ("4pO14p6L4p6C4pG34p264pGl8J-EiOKSj-KCieKBsA", "Unicode digits"), + ("4oaA4oaB4oWh4oWi4Lyz4Lyq4Lyr4LG9", "Unicode numerals"), + ("8J2fjvCdn5nwnZ-k8J2fr_Cdn7rgravvvJngr6c", "Unicode decimals"), + ("{hello}[world]&(bye!)", "ASCII invalid Base64"), + ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), + ) + + for user_id, msg in ids: + with self.subTest(msg=msg): + result = DiscordTokenFilter.extract_user_id(user_id) + self.assertIsNone(result) + + def test_is_valid_timestamp_valid(self): + """Should consider timestamps valid if they're greater than the Discord epoch.""" + timestamps = ( + "XsyRkw", + "Xrim9Q", + "XsyR-w", + "XsySD_", + "Dn9r_A", + ) + + for timestamp in timestamps: + with self.subTest(timestamp=timestamp): + result = DiscordTokenFilter.is_valid_timestamp(timestamp) + self.assertTrue(result) + + def test_is_valid_timestamp_invalid(self): + """Should consider timestamps invalid if they're before Discord epoch or can't be parsed.""" + timestamps = ( + ("B4Yffw", "DISCORD_EPOCH - TOKEN_EPOCH - 1"), + ("ew", "123"), + ("AoIKgA", "42076800"), + ("{hello}[world]&(bye!)", "ASCII invalid Base64"), + ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), + ) + + for timestamp, msg in timestamps: + with self.subTest(msg=msg): + result = DiscordTokenFilter.is_valid_timestamp(timestamp) + self.assertFalse(result) + + def test_is_valid_hmac_valid(self): + """Should consider an HMAC valid if it has at least 3 unique characters.""" + valid_hmacs = ( + "VXmErH7j511turNpfURmb0rVNm8", + "Ysnu2wacjaKs7qnoo46S8Dm2us8", + "sJf6omBPORBPju3WJEIAcwW9Zds", + "s45jqDV_Iisn-symw0yDRrk_jf4", + ) + + for hmac in valid_hmacs: + with self.subTest(msg=hmac): + result = DiscordTokenFilter.is_maybe_valid_hmac(hmac) + self.assertTrue(result) + + def test_is_invalid_hmac_invalid(self): + """Should consider an HMAC invalid if has fewer than 3 unique characters.""" + invalid_hmacs = ( + ("xxxxxxxxxxxxxxxxxx", "Single character"), + ("XxXxXxXxXxXxXxXxXx", "Single character alternating case"), + ("ASFasfASFasfASFASsf", "Three characters alternating-case"), + ("asdasdasdasdasdasdasd", "Three characters one case"), + ) + + for hmac, msg in invalid_hmacs: + with self.subTest(msg=msg): + result = DiscordTokenFilter.is_maybe_valid_hmac(hmac) + self.assertFalse(result) + + async def test_no_trigger_when_no_token(self): + """False should be returned if the message doesn't contain a Discord token.""" + return_value = await self.filter.triggered_on(self.ctx) + + self.assertFalse(return_value) + + @autospec(DiscordTokenFilter, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac") + @autospec("bot.exts.filtering._filters.unique.discord_token", "Token") + @autospec("bot.exts.filtering._filters.unique.discord_token", "TOKEN_RE") + def test_find_token_valid_match( + self, + token_re, + token_cls, + extract_user_id, + is_valid_timestamp, + is_maybe_valid_hmac, + ): + """The first match with a valid user ID, timestamp, and HMAC should be returned as a `Token`.""" + matches = [ + mock.create_autospec(Match, spec_set=True, instance=True), + mock.create_autospec(Match, spec_set=True, instance=True), + ] + tokens = [ + mock.create_autospec(Token, spec_set=True, instance=True), + mock.create_autospec(Token, spec_set=True, instance=True), + ] + + token_re.finditer.return_value = matches + token_cls.side_effect = tokens + extract_user_id.side_effect = (None, True) # The 1st match will be invalid, 2nd one valid. + is_valid_timestamp.return_value = True + is_maybe_valid_hmac.return_value = True + + return_value = DiscordTokenFilter.find_token_in_message(self.msg) + + self.assertEqual(tokens[1], return_value) + + @autospec(DiscordTokenFilter, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac") + @autospec("bot.exts.filtering._filters.unique.discord_token", "Token") + @autospec("bot.exts.filtering._filters.unique.discord_token", "TOKEN_RE") + def test_find_token_invalid_matches( + self, + token_re, + token_cls, + extract_user_id, + is_valid_timestamp, + is_maybe_valid_hmac, + ): + """None should be returned if no matches have valid user IDs, HMACs, and timestamps.""" + token_re.finditer.return_value = [mock.create_autospec(Match, spec_set=True, instance=True)] + token_cls.return_value = mock.create_autospec(Token, spec_set=True, instance=True) + extract_user_id.return_value = None + is_valid_timestamp.return_value = False + is_maybe_valid_hmac.return_value = False + + return_value = DiscordTokenFilter.find_token_in_message(self.msg) + + self.assertIsNone(return_value) + + def test_regex_invalid_tokens(self): + """Messages without anything looking like a token are not matched.""" + tokens = ( + "", + "lemon wins", + "..", + "x.y", + "x.y.", + ".y.z", + ".y.", + "..z", + "x..z", + " . . ", + "\n.\n.\n", + "hellö.world.bye", + "base64.nötbåse64.morebase64", + "19jd3J.dfkm3d.€víł§tüff", + ) + + for token in tokens: + with self.subTest(token=token): + results = discord_token.TOKEN_RE.findall(token) + self.assertEqual(len(results), 0) + + def test_regex_valid_tokens(self): + """Messages that look like tokens should be matched.""" + # Don't worry, these tokens have been invalidated. + tokens = ( + "NDcyMjY1OTQzMDYy_DEzMz-y.XsyRkw.VXmErH7j511turNpfURmb0rVNm8", + "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8", + "NDc1MDczNjI5Mzk5NTQ3OTA0.XsyR-w.sJf6omBPORBPju3WJEIAcwW9Zds", + "NDY3MjIzMjMwNjUwNzc3NjQx.XsySD_.s45jqDV_Iisn-symw0yDRrk_jf4", + ) + + for token in tokens: + with self.subTest(token=token): + results = discord_token.TOKEN_RE.fullmatch(token) + self.assertIsNotNone(results, f"{token} was not matched by the regex") + + def test_regex_matches_multiple_valid(self): + """Should support multiple matches in the middle of a string.""" + token_1 = "NDY3MjIzMjMwNjUwNzc3NjQx.XsyWGg.uFNEQPCc4ePwGh7egG8UicQssz8" + token_2 = "NDcyMjY1OTQzMDYyNDEzMzMy.XsyWMw.l8XPnDqb0lp-EiQ2g_0xVFT1pyc" + message = f"garbage {token_1} hello {token_2} world" + + results = discord_token.TOKEN_RE.finditer(message) + results = [match[0] for match in results] + self.assertCountEqual((token_1, token_2), results) + + @autospec("bot.exts.filtering._filters.unique.discord_token", "LOG_MESSAGE") + def test_format_log_message(self, log_message): + """Should correctly format the log message with info from the message and token.""" + token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + log_message.format.return_value = "Howdy" + + return_value = DiscordTokenFilter.format_log_message(self.msg.author, self.msg.channel, token) + + self.assertEqual(return_value, log_message.format.return_value) + + @patch("bot.instance", MockBot()) + @autospec("bot.exts.filtering._filters.unique.discord_token", "UNKNOWN_USER_LOG_MESSAGE") + @autospec("bot.exts.filtering._filters.unique.discord_token", "get_or_fetch_member") + async def test_format_userid_log_message_unknown(self, get_or_fetch_member, unknown_user_log_message): + """Should correctly format the user ID portion when the actual user it belongs to is unknown.""" + token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + unknown_user_log_message.format.return_value = " Partner" + get_or_fetch_member.return_value = None + + return_value = await DiscordTokenFilter.format_userid_log_message(token) + + self.assertEqual(return_value, (unknown_user_log_message.format.return_value, False)) + + @patch("bot.instance", MockBot()) + @autospec("bot.exts.filtering._filters.unique.discord_token", "KNOWN_USER_LOG_MESSAGE") + async def test_format_userid_log_message_bot(self, known_user_log_message): + """Should correctly format the user ID portion when the ID belongs to a known bot.""" + token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + known_user_log_message.format.return_value = " Partner" + + return_value = await DiscordTokenFilter.format_userid_log_message(token) + + self.assertEqual(return_value, (known_user_log_message.format.return_value, True)) + + @patch("bot.instance", MockBot()) + @autospec("bot.exts.filtering._filters.unique.discord_token", "KNOWN_USER_LOG_MESSAGE") + async def test_format_log_message_user_token_user(self, user_token_message): + """Should correctly format the user ID portion when the ID belongs to a known user.""" + token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + user_token_message.format.return_value = "Partner" + + return_value = await DiscordTokenFilter.format_userid_log_message(token) + + self.assertEqual(return_value, (user_token_message.format.return_value, True)) diff --git a/tests/bot/exts/filtering/test_extension_filter.py b/tests/bot/exts/filtering/test_extension_filter.py new file mode 100644 index 000000000..0ad41116d --- /dev/null +++ b/tests/bot/exts/filtering/test_extension_filter.py @@ -0,0 +1,139 @@ +import unittest +from unittest.mock import MagicMock, patch + +import arrow + +from bot.constants import Channels +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filter_lists import extension +from bot.exts.filtering._filter_lists.extension import ExtensionsList +from bot.exts.filtering._filter_lists.filter_list import ListType +from tests.helpers import MockAttachment, MockBot, MockMember, MockMessage, MockTextChannel + +BOT = MockBot() + + +class ExtensionsListTests(unittest.IsolatedAsyncioTestCase): + """Test the ExtensionsList class.""" + + def setUp(self): + """Sets up fresh objects for each test.""" + self.filter_list = ExtensionsList(MagicMock()) + now = arrow.utcnow().timestamp() + filters = [] + self.whitelist = [".first", ".second", ".third"] + for i, filter_content in enumerate(self.whitelist, start=1): + filters.append({ + "id": i, "content": filter_content, "description": None, "settings": {}, + "additional_field": "{}", "created_at": now, "updated_at": now # noqa: P103 + }) + self.filter_list.add_list({ + "id": 1, + "list_type": 1, + "created_at": now, + "updated_at": now, + "settings": {}, + "filters": filters + }) + + self.message = MockMessage() + member = MockMember(id=123) + channel = MockTextChannel(id=345) + self.ctx = FilterContext(Event.MESSAGE, member, channel, "", self.message) + + @patch("bot.instance", BOT) + async def test_message_with_allowed_attachment(self): + """Messages with allowed extensions should trigger the whitelist and result in no actions or messages.""" + attachment = MockAttachment(filename="python.first") + self.message.attachments = [attachment] + + result = await self.filter_list.actions_for(self.ctx) + + self.assertEqual(result, (None, [], {ListType.ALLOW: [self.filter_list[ListType.ALLOW].filters[1]]})) + + @patch("bot.instance", BOT) + async def test_message_without_attachment(self): + """Messages without attachments should return no triggers, messages, or actions.""" + result = await self.filter_list.actions_for(self.ctx) + + self.assertEqual(result, (None, [], {})) + + @patch("bot.instance", BOT) + async def test_message_with_illegal_extension(self): + """A message with an illegal extension shouldn't trigger the whitelist, and return some action and message.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + + result = await self.filter_list.actions_for(self.ctx) + + self.assertEqual(result, ({}, ["`.disallowed`"], {ListType.ALLOW: []})) + + @patch("bot.instance", BOT) + async def test_python_file_redirect_embed_description(self): + """A message containing a .py file should result in an embed redirecting the user to our paste site.""" + attachment = MockAttachment(filename="python.py") + self.message.attachments = [attachment] + + await self.filter_list.actions_for(self.ctx) + + self.assertEqual(self.ctx.dm_embed, extension.PY_EMBED_DESCRIPTION) + + @patch("bot.instance", BOT) + async def test_txt_file_redirect_embed_description(self): + """A message containing a .txt/.json/.csv file should result in the correct embed.""" + test_values = ( + ("text", ".txt"), + ("json", ".json"), + ("csv", ".csv"), + ) + + for file_name, disallowed_extension in test_values: + with self.subTest(file_name=file_name, disallowed_extension=disallowed_extension): + + attachment = MockAttachment(filename=f"{file_name}{disallowed_extension}") + self.message.attachments = [attachment] + + await self.filter_list.actions_for(self.ctx) + + self.assertEqual( + self.ctx.dm_embed, + extension.TXT_EMBED_DESCRIPTION.format( + blocked_extension=disallowed_extension, + ) + ) + + @patch("bot.instance", BOT) + async def test_other_disallowed_extension_embed_description(self): + """Test the description for a non .py/.txt/.json/.csv disallowed extension.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + + await self.filter_list.actions_for(self.ctx) + meta_channel = BOT.get_channel(Channels.meta) + + self.assertEqual( + self.ctx.dm_embed, + extension.DISALLOWED_EMBED_DESCRIPTION.format( + joined_whitelist=", ".join(self.whitelist), + blocked_extensions_str=".disallowed", + meta_channel_mention=meta_channel.mention + ) + ) + + @patch("bot.instance", BOT) + async def test_get_disallowed_extensions(self): + """The return value should include all non-whitelisted extensions.""" + test_values = ( + ([], []), + (self.whitelist, []), + ([".first"], []), + ([".first", ".disallowed"], ["`.disallowed`"]), + ([".disallowed"], ["`.disallowed`"]), + ([".disallowed", ".illegal"], ["`.disallowed`", "`.illegal`"]), + ) + + for extensions, expected_disallowed_extensions in test_values: + with self.subTest(extensions=extensions, expected_disallowed_extensions=expected_disallowed_extensions): + self.message.attachments = [MockAttachment(filename=f"filename{ext}") for ext in extensions] + result = await self.filter_list.actions_for(self.ctx) + self.assertCountEqual(result[1], expected_disallowed_extensions) diff --git a/tests/bot/exts/filtering/test_settings.py b/tests/bot/exts/filtering/test_settings.py new file mode 100644 index 000000000..5a289c1cf --- /dev/null +++ b/tests/bot/exts/filtering/test_settings.py @@ -0,0 +1,20 @@ +import unittest + +import bot.exts.filtering._settings +from bot.exts.filtering._settings import create_settings + + +class FilterTests(unittest.TestCase): + """Test functionality of the Settings class and its subclasses.""" + + def test_create_settings_returns_none_for_empty_data(self): + """`create_settings` should return a tuple of two Nones when passed an empty dict.""" + result = create_settings({}) + + self.assertEqual(result, (None, None)) + + def test_unrecognized_entry_makes_a_warning(self): + """When an unrecognized entry name is passed to `create_settings`, it should be added to `_already_warned`.""" + create_settings({"abcd": {}}) + + self.assertIn("abcd", bot.exts.filtering._settings._already_warned) diff --git a/tests/bot/exts/filtering/test_settings_entries.py b/tests/bot/exts/filtering/test_settings_entries.py new file mode 100644 index 000000000..5a1eb6fe6 --- /dev/null +++ b/tests/bot/exts/filtering/test_settings_entries.py @@ -0,0 +1,216 @@ +import unittest + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._settings_types.actions.infraction_and_notification import Infraction, InfractionAndNotification +from bot.exts.filtering._settings_types.validations.bypass_roles import RoleBypass +from bot.exts.filtering._settings_types.validations.channel_scope import ChannelScope +from bot.exts.filtering._settings_types.validations.filter_dm import FilterDM +from tests.helpers import MockCategoryChannel, MockDMChannel, MockMember, MockMessage, MockRole, MockTextChannel + + +class FilterTests(unittest.TestCase): + """Test functionality of the Settings class and its subclasses.""" + + def setUp(self) -> None: + member = MockMember(id=123) + channel = MockTextChannel(id=345) + message = MockMessage(author=member, channel=channel) + self.ctx = FilterContext(Event.MESSAGE, member, channel, "", message) + + def test_role_bypass_is_off_for_user_without_roles(self): + """The role bypass should trigger when a user has no roles.""" + member = MockMember() + self.ctx.author = member + bypass_entry = RoleBypass(bypass_roles=["123"]) + + result = bypass_entry.triggers_on(self.ctx) + + self.assertTrue(result) + + def test_role_bypass_is_on_for_a_user_with_the_right_role(self): + """The role bypass should not trigger when the user has one of its roles.""" + cases = ( + ([123], ["123"]), + ([123, 234], ["123"]), + ([123], ["123", "234"]), + ([123, 234], ["123", "234"]) + ) + + for user_role_ids, bypasses in cases: + with self.subTest(user_role_ids=user_role_ids, bypasses=bypasses): + user_roles = [MockRole(id=role_id) for role_id in user_role_ids] + member = MockMember(roles=user_roles) + self.ctx.author = member + bypass_entry = RoleBypass(bypass_roles=bypasses) + + result = bypass_entry.triggers_on(self.ctx) + + self.assertFalse(result) + + def test_context_doesnt_trigger_for_empty_channel_scope(self): + """A filter is enabled for all channels by default.""" + channel = MockTextChannel() + scope = ChannelScope( + disabled_channels=None, disabled_categories=None, enabled_channels=None, enabled_categories=None + ) + self.ctx.channel = channel + + result = scope.triggers_on(self.ctx) + + self.assertTrue(result) + + def test_context_doesnt_trigger_for_disabled_channel(self): + """A filter shouldn't trigger if it's been disabled in the channel.""" + channel = MockTextChannel(id=123) + scope = ChannelScope( + disabled_channels=["123"], disabled_categories=None, enabled_channels=None, enabled_categories=None + ) + self.ctx.channel = channel + + result = scope.triggers_on(self.ctx) + + self.assertFalse(result) + + def test_context_doesnt_trigger_in_disabled_category(self): + """A filter shouldn't trigger if it's been disabled in the category.""" + channel = MockTextChannel(category=MockCategoryChannel(id=456)) + scope = ChannelScope( + disabled_channels=None, disabled_categories=["456"], enabled_channels=None, enabled_categories=None + ) + self.ctx.channel = channel + + result = scope.triggers_on(self.ctx) + + self.assertFalse(result) + + def test_context_triggers_in_enabled_channel_in_disabled_category(self): + """A filter should trigger in an enabled channel even if it's been disabled in the category.""" + channel = MockTextChannel(id=123, category=MockCategoryChannel(id=234)) + scope = ChannelScope( + disabled_channels=None, disabled_categories=["234"], enabled_channels=["123"], enabled_categories=None + ) + self.ctx.channel = channel + + result = scope.triggers_on(self.ctx) + + self.assertTrue(result) + + def test_context_triggers_inside_enabled_category(self): + """A filter shouldn't trigger outside enabled categories, if there are any.""" + channel = MockTextChannel(id=123, category=MockCategoryChannel(id=234)) + scope = ChannelScope( + disabled_channels=None, disabled_categories=None, enabled_channels=None, enabled_categories=["234"] + ) + self.ctx.channel = channel + + result = scope.triggers_on(self.ctx) + + self.assertTrue(result) + + def test_context_doesnt_trigger_outside_enabled_category(self): + """A filter shouldn't trigger outside enabled categories, if there are any.""" + channel = MockTextChannel(id=123, category=MockCategoryChannel(id=234)) + scope = ChannelScope( + disabled_channels=None, disabled_categories=None, enabled_channels=None, enabled_categories=["789"] + ) + self.ctx.channel = channel + + result = scope.triggers_on(self.ctx) + + self.assertFalse(result) + + def test_context_doesnt_trigger_inside_disabled_channel_in_enabled_category(self): + """A filter shouldn't trigger outside enabled categories, if there are any.""" + channel = MockTextChannel(id=123, category=MockCategoryChannel(id=234)) + scope = ChannelScope( + disabled_channels=["123"], disabled_categories=None, enabled_channels=None, enabled_categories=["234"] + ) + self.ctx.channel = channel + + result = scope.triggers_on(self.ctx) + + self.assertFalse(result) + + def test_filtering_dms_when_necessary(self): + """A filter correctly ignores or triggers in a channel depending on the value of FilterDM.""" + cases = ( + (True, MockDMChannel(), True), + (False, MockDMChannel(), False), + (True, MockTextChannel(), True), + (False, MockTextChannel(), True) + ) + + for apply_in_dms, channel, expected in cases: + with self.subTest(apply_in_dms=apply_in_dms, channel=channel): + filter_dms = FilterDM(filter_dm=apply_in_dms) + self.ctx.channel = channel + + result = filter_dms.triggers_on(self.ctx) + + self.assertEqual(expected, result) + + def test_infraction_merge_of_same_infraction_type(self): + """When both infractions are of the same type, the one with the longer duration wins.""" + infraction1 = InfractionAndNotification( + infraction_type="MUTE", + infraction_reason="hi", + infraction_duration=10, + dm_content="how", + dm_embed="what is", + infraction_channel=0 + ) + infraction2 = InfractionAndNotification( + infraction_type="MUTE", + infraction_reason="there", + infraction_duration=20, + dm_content="are you", + dm_embed="your name", + infraction_channel=0 + ) + + result = infraction1 | infraction2 + + self.assertDictEqual( + result.dict(), + { + "infraction_type": Infraction.MUTE, + "infraction_reason": "there", + "infraction_duration": 20.0, + "dm_content": "are you", + "dm_embed": "your name", + "infraction_channel": 0 + } + ) + + def test_infraction_merge_of_different_infraction_types(self): + """If there are two different infraction types, the one higher up the hierarchy should be picked.""" + infraction1 = InfractionAndNotification( + infraction_type="MUTE", + infraction_reason="hi", + infraction_duration=20, + dm_content="", + dm_embed="", + infraction_channel=0 + ) + infraction2 = InfractionAndNotification( + infraction_type="BAN", + infraction_reason="", + infraction_duration=10, + dm_content="there", + dm_embed="", + infraction_channel=0 + ) + + result = infraction1 | infraction2 + + self.assertDictEqual( + result.dict(), + { + "infraction_type": Infraction.BAN, + "infraction_reason": "", + "infraction_duration": 10.0, + "dm_content": "there", + "dm_embed": "", + "infraction_channel": 0 + } + ) diff --git a/tests/bot/exts/filtering/test_token_filter.py b/tests/bot/exts/filtering/test_token_filter.py new file mode 100644 index 000000000..0dfc8ae9f --- /dev/null +++ b/tests/bot/exts/filtering/test_token_filter.py @@ -0,0 +1,49 @@ +import unittest + +import arrow + +from bot.exts.filtering._filter_context import Event, FilterContext +from bot.exts.filtering._filters.token import TokenFilter +from tests.helpers import MockMember, MockMessage, MockTextChannel + + +class TokenFilterTests(unittest.IsolatedAsyncioTestCase): + """Test functionality of the token filter.""" + + def setUp(self) -> None: + member = MockMember(id=123) + channel = MockTextChannel(id=345) + message = MockMessage(author=member, channel=channel) + self.ctx = FilterContext(Event.MESSAGE, member, channel, "", message) + + async def test_token_filter_triggers(self): + """The filter should evaluate to True only if its token is found in the context content.""" + test_cases = ( + (r"hi", "oh hi there", True), + (r"hi", "goodbye", False), + (r"bla\d{2,4}", "bla18", True), + (r"bla\d{2,4}", "bla1", False), + # See advisory https://github.com/python-discord/bot/security/advisories/GHSA-j8c3-8x46-8pp6 + (r"TOKEN", "https://google.com TOKEN", True), + (r"TOKEN", "https://google.com something else", False) + ) + now = arrow.utcnow().timestamp() + + for pattern, content, expected in test_cases: + with self.subTest( + pattern=pattern, + content=content, + expected=expected, + ): + filter_ = TokenFilter({ + "id": 1, + "content": pattern, + "description": None, + "settings": {}, + "additional_field": "{}", # noqa: P103 + "created_at": now, + "updated_at": now + }) + self.ctx.content = content + result = await filter_.triggered_on(self.ctx) + self.assertEqual(result, expected) diff --git a/tests/bot/exts/filters/test_antimalware.py b/tests/bot/exts/filters/test_antimalware.py deleted file mode 100644 index 7282334e2..000000000 --- a/tests/bot/exts/filters/test_antimalware.py +++ /dev/null @@ -1,202 +0,0 @@ -import unittest -from unittest.mock import AsyncMock, Mock - -from discord import NotFound - -from bot.constants import Channels, STAFF_ROLES -from bot.exts.filters import antimalware -from tests.helpers import MockAttachment, MockBot, MockMessage, MockRole - - -class AntiMalwareCogTests(unittest.IsolatedAsyncioTestCase): - """Test the AntiMalware cog.""" - - def setUp(self): - """Sets up fresh objects for each test.""" - self.bot = MockBot() - self.bot.filter_list_cache = { - "FILE_FORMAT.True": { - ".first": {}, - ".second": {}, - ".third": {}, - } - } - self.cog = antimalware.AntiMalware(self.bot) - self.message = MockMessage() - self.message.webhook_id = None - self.message.author.bot = None - self.whitelist = [".first", ".second", ".third"] - - async def test_message_with_allowed_attachment(self): - """Messages with allowed extensions should not be deleted""" - attachment = MockAttachment(filename="python.first") - self.message.attachments = [attachment] - - await self.cog.on_message(self.message) - self.message.delete.assert_not_called() - - async def test_message_without_attachment(self): - """Messages without attachments should result in no action.""" - await self.cog.on_message(self.message) - self.message.delete.assert_not_called() - - async def test_direct_message_with_attachment(self): - """Direct messages should have no action taken.""" - attachment = MockAttachment(filename="python.disallowed") - self.message.attachments = [attachment] - self.message.guild = None - - await self.cog.on_message(self.message) - - self.message.delete.assert_not_called() - - async def test_webhook_message_with_illegal_extension(self): - """A webhook message containing an illegal extension should be ignored.""" - attachment = MockAttachment(filename="python.disallowed") - self.message.webhook_id = 697140105563078727 - self.message.attachments = [attachment] - - await self.cog.on_message(self.message) - - self.message.delete.assert_not_called() - - async def test_bot_message_with_illegal_extension(self): - """A bot message containing an illegal extension should be ignored.""" - attachment = MockAttachment(filename="python.disallowed") - self.message.author.bot = 409107086526644234 - self.message.attachments = [attachment] - - await self.cog.on_message(self.message) - - self.message.delete.assert_not_called() - - async def test_message_with_illegal_extension_gets_deleted(self): - """A message containing an illegal extension should send an embed.""" - attachment = MockAttachment(filename="python.disallowed") - self.message.attachments = [attachment] - - await self.cog.on_message(self.message) - - self.message.delete.assert_called_once() - - async def test_message_send_by_staff(self): - """A message send by a member of staff should be ignored.""" - staff_role = MockRole(id=STAFF_ROLES[0]) - self.message.author.roles.append(staff_role) - attachment = MockAttachment(filename="python.disallowed") - self.message.attachments = [attachment] - - await self.cog.on_message(self.message) - - self.message.delete.assert_not_called() - - async def test_python_file_redirect_embed_description(self): - """A message containing a .py file should result in an embed redirecting the user to our paste site""" - attachment = MockAttachment(filename="python.py") - self.message.attachments = [attachment] - self.message.channel.send = AsyncMock() - - await self.cog.on_message(self.message) - self.message.channel.send.assert_called_once() - args, kwargs = self.message.channel.send.call_args - embed = kwargs.pop("embed") - - self.assertEqual(embed.description, antimalware.PY_EMBED_DESCRIPTION) - - async def test_txt_file_redirect_embed_description(self): - """A message containing a .txt/.json/.csv file should result in the correct embed.""" - test_values = ( - ("text", ".txt"), - ("json", ".json"), - ("csv", ".csv"), - ) - - for file_name, disallowed_extension in test_values: - with self.subTest(file_name=file_name, disallowed_extension=disallowed_extension): - - attachment = MockAttachment(filename=f"{file_name}{disallowed_extension}") - self.message.attachments = [attachment] - self.message.channel.send = AsyncMock() - antimalware.TXT_EMBED_DESCRIPTION = Mock() - antimalware.TXT_EMBED_DESCRIPTION.format.return_value = "test" - - await self.cog.on_message(self.message) - self.message.channel.send.assert_called_once() - args, kwargs = self.message.channel.send.call_args - embed = kwargs.pop("embed") - cmd_channel = self.bot.get_channel(Channels.bot_commands) - - self.assertEqual( - embed.description, - antimalware.TXT_EMBED_DESCRIPTION.format.return_value - ) - antimalware.TXT_EMBED_DESCRIPTION.format.assert_called_with( - blocked_extension=disallowed_extension, - cmd_channel_mention=cmd_channel.mention - ) - - async def test_other_disallowed_extension_embed_description(self): - """Test the description for a non .py/.txt/.json/.csv disallowed extension.""" - attachment = MockAttachment(filename="python.disallowed") - self.message.attachments = [attachment] - self.message.channel.send = AsyncMock() - antimalware.DISALLOWED_EMBED_DESCRIPTION = Mock() - antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value = "test" - - await self.cog.on_message(self.message) - self.message.channel.send.assert_called_once() - args, kwargs = self.message.channel.send.call_args - embed = kwargs.pop("embed") - meta_channel = self.bot.get_channel(Channels.meta) - - self.assertEqual(embed.description, antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value) - antimalware.DISALLOWED_EMBED_DESCRIPTION.format.assert_called_with( - joined_whitelist=", ".join(self.whitelist), - blocked_extensions_str=".disallowed", - meta_channel_mention=meta_channel.mention - ) - - async def test_removing_deleted_message_logs(self): - """Removing an already deleted message logs the correct message""" - attachment = MockAttachment(filename="python.disallowed") - self.message.attachments = [attachment] - self.message.delete = AsyncMock(side_effect=NotFound(response=Mock(status=""), message="")) - - with self.assertLogs(logger=antimalware.log, level="INFO"): - await self.cog.on_message(self.message) - self.message.delete.assert_called_once() - - async def test_message_with_illegal_attachment_logs(self): - """Deleting a message with an illegal attachment should result in a log.""" - attachment = MockAttachment(filename="python.disallowed") - self.message.attachments = [attachment] - - with self.assertLogs(logger=antimalware.log, level="INFO"): - await self.cog.on_message(self.message) - - async def test_get_disallowed_extensions(self): - """The return value should include all non-whitelisted extensions.""" - test_values = ( - ([], []), - (self.whitelist, []), - ([".first"], []), - ([".first", ".disallowed"], [".disallowed"]), - ([".disallowed"], [".disallowed"]), - ([".disallowed", ".illegal"], [".disallowed", ".illegal"]), - ) - - for extensions, expected_disallowed_extensions in test_values: - with self.subTest(extensions=extensions, expected_disallowed_extensions=expected_disallowed_extensions): - self.message.attachments = [MockAttachment(filename=f"filename{extension}") for extension in extensions] - disallowed_extensions = self.cog._get_disallowed_extensions(self.message) - self.assertCountEqual(disallowed_extensions, expected_disallowed_extensions) - - -class AntiMalwareSetupTests(unittest.IsolatedAsyncioTestCase): - """Tests setup of the `AntiMalware` cog.""" - - async def test_setup(self): - """Setup of the extension should call add_cog.""" - bot = MockBot() - await antimalware.setup(bot) - bot.add_cog.assert_awaited_once() diff --git a/tests/bot/exts/filters/test_antispam.py b/tests/bot/exts/filters/test_antispam.py deleted file mode 100644 index 6a0e4fded..000000000 --- a/tests/bot/exts/filters/test_antispam.py +++ /dev/null @@ -1,35 +0,0 @@ -import unittest - -from bot.exts.filters import antispam - - -class AntispamConfigurationValidationTests(unittest.TestCase): - """Tests validation of the antispam cog configuration.""" - - def test_default_antispam_config_is_valid(self): - """The default antispam configuration is valid.""" - validation_errors = antispam.validate_config() - self.assertEqual(validation_errors, {}) - - def test_unknown_rule_returns_error(self): - """Configuring an unknown rule returns an error.""" - self.assertEqual( - antispam.validate_config({'invalid-rule': {}}), - {'invalid-rule': "`invalid-rule` is not recognized as an antispam rule."} - ) - - def test_missing_keys_returns_error(self): - """Not configuring required keys returns an error.""" - keys = (('interval', 'max'), ('max', 'interval')) - for configured_key, unconfigured_key in keys: - with self.subTest( - configured_key=configured_key, - unconfigured_key=unconfigured_key - ): - config = {'burst': {configured_key: 10}} - error = f"Key `{unconfigured_key}` is required but not set for rule `burst`" - - self.assertEqual( - antispam.validate_config(config), - {'burst': error} - ) diff --git a/tests/bot/exts/filters/test_filtering.py b/tests/bot/exts/filters/test_filtering.py deleted file mode 100644 index e47cf627b..000000000 --- a/tests/bot/exts/filters/test_filtering.py +++ /dev/null @@ -1,40 +0,0 @@ -import unittest -from unittest.mock import patch - -from bot.exts.filters import filtering -from tests.helpers import MockBot, autospec - - -class FilteringCogTests(unittest.IsolatedAsyncioTestCase): - """Tests the `Filtering` cog.""" - - def setUp(self): - """Instantiate the bot and cog.""" - self.bot = MockBot() - with patch("pydis_core.utils.scheduling.create_task", new=lambda task, **_: task.close()): - self.cog = filtering.Filtering(self.bot) - - @autospec(filtering.Filtering, "_get_filterlist_items", pass_mocks=False, return_value=["TOKEN"]) - async def test_token_filter(self): - """Ensure that a filter token is correctly detected in a message.""" - messages = { - "": False, - "no matches": False, - "TOKEN": True, - - # See advisory https://github.com/python-discord/bot/security/advisories/GHSA-j8c3-8x46-8pp6 - "https://google.com TOKEN": True, - "https://google.com something else": False, - } - - for message, match in messages.items(): - with self.subTest(input=message, match=match): - result, _ = await self.cog._has_watch_regex_match(message) - - self.assertEqual( - match, - bool(result), - msg=f"Hit was {'expected' if match else 'not expected'} for this input." - ) - if result: - self.assertEqual("TOKEN", result.group()) diff --git a/tests/bot/exts/filters/test_token_remover.py b/tests/bot/exts/filters/test_token_remover.py deleted file mode 100644 index c1f3762ac..000000000 --- a/tests/bot/exts/filters/test_token_remover.py +++ /dev/null @@ -1,409 +0,0 @@ -import unittest -from re import Match -from unittest import mock -from unittest.mock import MagicMock - -from discord import Colour, NotFound - -from bot import constants -from bot.exts.filters import token_remover -from bot.exts.filters.token_remover import Token, TokenRemover -from bot.exts.moderation.modlog import ModLog -from bot.utils.messages import format_user -from tests.helpers import MockBot, MockMessage, autospec - - -class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): - """Tests the `TokenRemover` cog.""" - - def setUp(self): - """Adds the cog, a bot, and a message to the instance for usage in tests.""" - self.bot = MockBot() - self.cog = TokenRemover(bot=self.bot) - - self.msg = MockMessage(id=555, content="hello world") - self.msg.channel.mention = "#lemonade-stand" - self.msg.guild.get_member.return_value.bot = False - self.msg.guild.get_member.return_value.__str__.return_value = "Woody" - self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) - self.msg.author.display_avatar.url = "picture-lemon.png" - - def test_extract_user_id_valid(self): - """Should consider user IDs valid if they decode into an integer ID.""" - id_pairs = ( - ("NDcyMjY1OTQzMDYyNDEzMzMy", 472265943062413332), - ("NDc1MDczNjI5Mzk5NTQ3OTA0", 475073629399547904), - ("NDY3MjIzMjMwNjUwNzc3NjQx", 467223230650777641), - ) - - for token_id, user_id in id_pairs: - with self.subTest(token_id=token_id): - result = TokenRemover.extract_user_id(token_id) - self.assertEqual(result, user_id) - - def test_extract_user_id_invalid(self): - """Should consider non-digit and non-ASCII IDs invalid.""" - ids = ( - ("SGVsbG8gd29ybGQ", "non-digit ASCII"), - ("0J_RgNC40LLQtdGCINC80LjRgA", "cyrillic text"), - ("4pO14p6L4p6C4pG34p264pGl8J-EiOKSj-KCieKBsA", "Unicode digits"), - ("4oaA4oaB4oWh4oWi4Lyz4Lyq4Lyr4LG9", "Unicode numerals"), - ("8J2fjvCdn5nwnZ-k8J2fr_Cdn7rgravvvJngr6c", "Unicode decimals"), - ("{hello}[world]&(bye!)", "ASCII invalid Base64"), - ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), - ) - - for user_id, msg in ids: - with self.subTest(msg=msg): - result = TokenRemover.extract_user_id(user_id) - self.assertIsNone(result) - - def test_is_valid_timestamp_valid(self): - """Should consider timestamps valid if they're greater than the Discord epoch.""" - timestamps = ( - "XsyRkw", - "Xrim9Q", - "XsyR-w", - "XsySD_", - "Dn9r_A", - ) - - for timestamp in timestamps: - with self.subTest(timestamp=timestamp): - result = TokenRemover.is_valid_timestamp(timestamp) - self.assertTrue(result) - - def test_is_valid_timestamp_invalid(self): - """Should consider timestamps invalid if they're before Discord epoch or can't be parsed.""" - timestamps = ( - ("B4Yffw", "DISCORD_EPOCH - TOKEN_EPOCH - 1"), - ("ew", "123"), - ("AoIKgA", "42076800"), - ("{hello}[world]&(bye!)", "ASCII invalid Base64"), - ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), - ) - - for timestamp, msg in timestamps: - with self.subTest(msg=msg): - result = TokenRemover.is_valid_timestamp(timestamp) - self.assertFalse(result) - - def test_is_valid_hmac_valid(self): - """Should consider an HMAC valid if it has at least 3 unique characters.""" - valid_hmacs = ( - "VXmErH7j511turNpfURmb0rVNm8", - "Ysnu2wacjaKs7qnoo46S8Dm2us8", - "sJf6omBPORBPju3WJEIAcwW9Zds", - "s45jqDV_Iisn-symw0yDRrk_jf4", - ) - - for hmac in valid_hmacs: - with self.subTest(msg=hmac): - result = TokenRemover.is_maybe_valid_hmac(hmac) - self.assertTrue(result) - - def test_is_invalid_hmac_invalid(self): - """Should consider an HMAC invalid if has fewer than 3 unique characters.""" - invalid_hmacs = ( - ("xxxxxxxxxxxxxxxxxx", "Single character"), - ("XxXxXxXxXxXxXxXxXx", "Single character alternating case"), - ("ASFasfASFasfASFASsf", "Three characters alternating-case"), - ("asdasdasdasdasdasdasd", "Three characters one case"), - ) - - for hmac, msg in invalid_hmacs: - with self.subTest(msg=msg): - result = TokenRemover.is_maybe_valid_hmac(hmac) - self.assertFalse(result) - - def test_mod_log_property(self): - """The `mod_log` property should ask the bot to return the `ModLog` cog.""" - self.bot.get_cog.return_value = 'lemon' - self.assertEqual(self.cog.mod_log, self.bot.get_cog.return_value) - self.bot.get_cog.assert_called_once_with('ModLog') - - async def test_on_message_edit_uses_on_message(self): - """The edit listener should delegate handling of the message to the normal listener.""" - self.cog.on_message = mock.create_autospec(self.cog.on_message, spec_set=True) - - await self.cog.on_message_edit(MockMessage(), self.msg) - self.cog.on_message.assert_awaited_once_with(self.msg) - - @autospec(TokenRemover, "find_token_in_message", "take_action") - async def test_on_message_takes_action(self, find_token_in_message, take_action): - """Should take action if a valid token is found when a message is sent.""" - cog = TokenRemover(self.bot) - found_token = "foobar" - find_token_in_message.return_value = found_token - - await cog.on_message(self.msg) - - find_token_in_message.assert_called_once_with(self.msg) - take_action.assert_awaited_once_with(cog, self.msg, found_token) - - @autospec(TokenRemover, "find_token_in_message", "take_action") - async def test_on_message_skips_missing_token(self, find_token_in_message, take_action): - """Shouldn't take action if a valid token isn't found when a message is sent.""" - cog = TokenRemover(self.bot) - find_token_in_message.return_value = False - - await cog.on_message(self.msg) - - find_token_in_message.assert_called_once_with(self.msg) - take_action.assert_not_awaited() - - @autospec(TokenRemover, "find_token_in_message") - async def test_on_message_ignores_dms_bots(self, find_token_in_message): - """Shouldn't parse a message if it is a DM or authored by a bot.""" - cog = TokenRemover(self.bot) - dm_msg = MockMessage(guild=None) - bot_msg = MockMessage(author=MagicMock(bot=True)) - - for msg in (dm_msg, bot_msg): - await cog.on_message(msg) - find_token_in_message.assert_not_called() - - @autospec("bot.exts.filters.token_remover", "TOKEN_RE") - def test_find_token_no_matches(self, token_re): - """None should be returned if the regex matches no tokens in a message.""" - token_re.finditer.return_value = () - - return_value = TokenRemover.find_token_in_message(self.msg) - - self.assertIsNone(return_value) - token_re.finditer.assert_called_once_with(self.msg.content) - - @autospec(TokenRemover, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac") - @autospec("bot.exts.filters.token_remover", "Token") - @autospec("bot.exts.filters.token_remover", "TOKEN_RE") - def test_find_token_valid_match( - self, - token_re, - token_cls, - extract_user_id, - is_valid_timestamp, - is_maybe_valid_hmac, - ): - """The first match with a valid user ID, timestamp, and HMAC should be returned as a `Token`.""" - matches = [ - mock.create_autospec(Match, spec_set=True, instance=True), - mock.create_autospec(Match, spec_set=True, instance=True), - ] - tokens = [ - mock.create_autospec(Token, spec_set=True, instance=True), - mock.create_autospec(Token, spec_set=True, instance=True), - ] - - token_re.finditer.return_value = matches - token_cls.side_effect = tokens - extract_user_id.side_effect = (None, True) # The 1st match will be invalid, 2nd one valid. - is_valid_timestamp.return_value = True - is_maybe_valid_hmac.return_value = True - - return_value = TokenRemover.find_token_in_message(self.msg) - - self.assertEqual(tokens[1], return_value) - token_re.finditer.assert_called_once_with(self.msg.content) - - @autospec(TokenRemover, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac") - @autospec("bot.exts.filters.token_remover", "Token") - @autospec("bot.exts.filters.token_remover", "TOKEN_RE") - def test_find_token_invalid_matches( - self, - token_re, - token_cls, - extract_user_id, - is_valid_timestamp, - is_maybe_valid_hmac, - ): - """None should be returned if no matches have valid user IDs, HMACs, and timestamps.""" - token_re.finditer.return_value = [mock.create_autospec(Match, spec_set=True, instance=True)] - token_cls.return_value = mock.create_autospec(Token, spec_set=True, instance=True) - extract_user_id.return_value = None - is_valid_timestamp.return_value = False - is_maybe_valid_hmac.return_value = False - - return_value = TokenRemover.find_token_in_message(self.msg) - - self.assertIsNone(return_value) - token_re.finditer.assert_called_once_with(self.msg.content) - - def test_regex_invalid_tokens(self): - """Messages without anything looking like a token are not matched.""" - tokens = ( - "", - "lemon wins", - "..", - "x.y", - "x.y.", - ".y.z", - ".y.", - "..z", - "x..z", - " . . ", - "\n.\n.\n", - "hellö.world.bye", - "base64.nötbåse64.morebase64", - "19jd3J.dfkm3d.€víł§tüff", - ) - - for token in tokens: - with self.subTest(token=token): - results = token_remover.TOKEN_RE.findall(token) - self.assertEqual(len(results), 0) - - def test_regex_valid_tokens(self): - """Messages that look like tokens should be matched.""" - # Don't worry, these tokens have been invalidated. - tokens = ( - "NDcyMjY1OTQzMDYy_DEzMz-y.XsyRkw.VXmErH7j511turNpfURmb0rVNm8", - "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8", - "NDc1MDczNjI5Mzk5NTQ3OTA0.XsyR-w.sJf6omBPORBPju3WJEIAcwW9Zds", - "NDY3MjIzMjMwNjUwNzc3NjQx.XsySD_.s45jqDV_Iisn-symw0yDRrk_jf4", - ) - - for token in tokens: - with self.subTest(token=token): - results = token_remover.TOKEN_RE.fullmatch(token) - self.assertIsNotNone(results, f"{token} was not matched by the regex") - - def test_regex_matches_multiple_valid(self): - """Should support multiple matches in the middle of a string.""" - token_1 = "NDY3MjIzMjMwNjUwNzc3NjQx.XsyWGg.uFNEQPCc4ePwGh7egG8UicQssz8" - token_2 = "NDcyMjY1OTQzMDYyNDEzMzMy.XsyWMw.l8XPnDqb0lp-EiQ2g_0xVFT1pyc" - message = f"garbage {token_1} hello {token_2} world" - - results = token_remover.TOKEN_RE.finditer(message) - results = [match[0] for match in results] - self.assertCountEqual((token_1, token_2), results) - - @autospec("bot.exts.filters.token_remover", "LOG_MESSAGE") - def test_format_log_message(self, log_message): - """Should correctly format the log message with info from the message and token.""" - token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") - log_message.format.return_value = "Howdy" - - return_value = TokenRemover.format_log_message(self.msg, token) - - self.assertEqual(return_value, log_message.format.return_value) - log_message.format.assert_called_once_with( - author=format_user(self.msg.author), - channel=self.msg.channel.mention, - user_id=token.user_id, - timestamp=token.timestamp, - hmac="xxxxxxxxxxxxxxxxxxxxxxxxjf4", - ) - - @autospec("bot.exts.filters.token_remover", "UNKNOWN_USER_LOG_MESSAGE") - async def test_format_userid_log_message_unknown(self, unknown_user_log_message,): - """Should correctly format the user ID portion when the actual user it belongs to is unknown.""" - token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") - unknown_user_log_message.format.return_value = " Partner" - msg = MockMessage(id=555, content="hello world") - msg.guild.get_member.return_value = None - msg.guild.fetch_member.side_effect = NotFound(mock.Mock(status=404), "Not found") - - return_value = await TokenRemover.format_userid_log_message(msg, token) - - self.assertEqual(return_value, (unknown_user_log_message.format.return_value, False)) - unknown_user_log_message.format.assert_called_once_with(user_id=472265943062413332) - - @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE") - async def test_format_userid_log_message_bot(self, known_user_log_message): - """Should correctly format the user ID portion when the ID belongs to a known bot.""" - token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") - known_user_log_message.format.return_value = " Partner" - msg = MockMessage(id=555, content="hello world") - msg.guild.get_member.return_value.__str__.return_value = "Sam" - msg.guild.get_member.return_value.bot = True - - return_value = await TokenRemover.format_userid_log_message(msg, token) - - self.assertEqual(return_value, (known_user_log_message.format.return_value, True)) - - known_user_log_message.format.assert_called_once_with( - user_id=472265943062413332, - user_name="Sam", - kind="BOT", - ) - - @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE") - async def test_format_log_message_user_token_user(self, user_token_message): - """Should correctly format the user ID portion when the ID belongs to a known user.""" - token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") - user_token_message.format.return_value = "Partner" - - return_value = await TokenRemover.format_userid_log_message(self.msg, token) - - self.assertEqual(return_value, (user_token_message.format.return_value, True)) - user_token_message.format.assert_called_once_with( - user_id=467223230650777641, - user_name="Woody", - kind="USER", - ) - - @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) - @autospec("bot.exts.filters.token_remover", "log") - @autospec(TokenRemover, "format_log_message", "format_userid_log_message") - async def test_take_action(self, format_log_message, format_userid_log_message, logger, mod_log_property): - """Should delete the message and send a mod log.""" - cog = TokenRemover(self.bot) - mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True) - token = mock.create_autospec(Token, spec_set=True, instance=True) - token.user_id = "no-id" - log_msg = "testing123" - userid_log_message = "userid-log-message" - - mod_log_property.return_value = mod_log - format_log_message.return_value = log_msg - format_userid_log_message.return_value = (userid_log_message, True) - - await cog.take_action(self.msg, token) - - self.msg.delete.assert_called_once_with() - self.msg.channel.send.assert_called_once_with( - token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) - ) - - format_log_message.assert_called_once_with(self.msg, token) - format_userid_log_message.assert_called_once_with(self.msg, token) - logger.debug.assert_called_with(log_msg) - self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens") - - mod_log.ignore.assert_called_once_with(constants.Event.message_delete, self.msg.id) - mod_log.send_log_message.assert_called_once_with( - icon_url=constants.Icons.token_removed, - colour=Colour(constants.Colours.soft_red), - title="Token removed!", - text=log_msg + "\n" + userid_log_message, - thumbnail=self.msg.author.display_avatar.url, - channel_id=constants.Channels.mod_alerts, - ping_everyone=True, - ) - - @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) - async def test_take_action_delete_failure(self, mod_log_property): - """Shouldn't send any messages if the token message can't be deleted.""" - cog = TokenRemover(self.bot) - mod_log_property.return_value = mock.create_autospec(ModLog, spec_set=True, instance=True) - self.msg.delete.side_effect = NotFound(MagicMock(), MagicMock()) - - token = mock.create_autospec(Token, spec_set=True, instance=True) - await cog.take_action(self.msg, token) - - self.msg.delete.assert_called_once_with() - self.msg.channel.send.assert_not_awaited() - - -class TokenRemoverExtensionTests(unittest.IsolatedAsyncioTestCase): - """Tests for the token_remover extension.""" - - @autospec("bot.exts.filters.token_remover", "TokenRemover") - async def test_extension_setup(self, cog): - """The TokenRemover cog should be added.""" - bot = MockBot() - await token_remover.setup(bot) - - cog.assert_called_once_with(bot) - bot.add_cog.assert_awaited_once() - self.assertTrue(isinstance(bot.add_cog.call_args.args[0], TokenRemover)) diff --git a/tests/bot/rules/__init__.py b/tests/bot/rules/__init__.py deleted file mode 100644 index 0d570f5a3..000000000 --- a/tests/bot/rules/__init__.py +++ /dev/null @@ -1,76 +0,0 @@ -import unittest -from abc import ABCMeta, abstractmethod -from typing import Callable, Dict, Iterable, List, NamedTuple, Tuple - -from tests.helpers import MockMessage - - -class DisallowedCase(NamedTuple): - """Encapsulation for test cases expected to fail.""" - recent_messages: List[MockMessage] - culprits: Iterable[str] - n_violations: int - - -class RuleTest(unittest.IsolatedAsyncioTestCase, metaclass=ABCMeta): - """ - Abstract class for antispam rule test cases. - - Tests for specific rules should inherit from `RuleTest` and implement - `relevant_messages` and `get_report`. Each instance should also set the - `apply` and `config` attributes as necessary. - - The execution of test cases can then be delegated to the `run_allowed` - and `run_disallowed` methods. - """ - - apply: Callable # The tested rule's apply function - config: Dict[str, int] - - async def run_allowed(self, cases: Tuple[List[MockMessage], ...]) -> None: - """Run all `cases` against `self.apply` expecting them to pass.""" - for recent_messages in cases: - last_message = recent_messages[0] - - with self.subTest( - last_message=last_message, - recent_messages=recent_messages, - config=self.config, - ): - self.assertIsNone( - await self.apply(last_message, recent_messages, self.config) - ) - - async def run_disallowed(self, cases: Tuple[DisallowedCase, ...]) -> None: - """Run all `cases` against `self.apply` expecting them to fail.""" - for case in cases: - recent_messages, culprits, n_violations = case - last_message = recent_messages[0] - relevant_messages = self.relevant_messages(case) - desired_output = ( - self.get_report(case), - culprits, - relevant_messages, - ) - - with self.subTest( - last_message=last_message, - recent_messages=recent_messages, - relevant_messages=relevant_messages, - n_violations=n_violations, - config=self.config, - ): - self.assertTupleEqual( - await self.apply(last_message, recent_messages, self.config), - desired_output, - ) - - @abstractmethod - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - """Give expected relevant messages for `case`.""" - raise NotImplementedError # pragma: no cover - - @abstractmethod - def get_report(self, case: DisallowedCase) -> str: - """Give expected error report for `case`.""" - raise NotImplementedError # pragma: no cover diff --git a/tests/bot/rules/test_attachments.py b/tests/bot/rules/test_attachments.py deleted file mode 100644 index d7e779221..000000000 --- a/tests/bot/rules/test_attachments.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Iterable - -from bot.rules import attachments -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str, total_attachments: int) -> MockMessage: - """Builds a message with `total_attachments` attachments.""" - return MockMessage(author=author, attachments=list(range(total_attachments))) - - -class AttachmentRuleTests(RuleTest): - """Tests applying the `attachments` antispam rule.""" - - def setUp(self): - self.apply = attachments.apply - self.config = {"max": 5, "interval": 10} - - async def test_allows_messages_without_too_many_attachments(self): - """Messages without too many attachments are allowed as-is.""" - cases = ( - [make_msg("bob", 0), make_msg("bob", 0), make_msg("bob", 0)], - [make_msg("bob", 2), make_msg("bob", 2)], - [make_msg("bob", 2), make_msg("alice", 2), make_msg("bob", 2)], - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_with_too_many_attachments(self): - """Messages with too many attachments trigger the rule.""" - cases = ( - DisallowedCase( - [make_msg("bob", 4), make_msg("bob", 0), make_msg("bob", 6)], - ("bob",), - 10, - ), - DisallowedCase( - [make_msg("bob", 4), make_msg("alice", 6), make_msg("bob", 2)], - ("bob",), - 6, - ), - DisallowedCase( - [make_msg("alice", 6)], - ("alice",), - 6, - ), - DisallowedCase( - [make_msg("alice", 1) for _ in range(6)], - ("alice",), - 6, - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_message = case.recent_messages[0] - return tuple( - msg - for msg in case.recent_messages - if ( - msg.author == last_message.author - and len(msg.attachments) > 0 - ) - ) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} attachments in {self.config['interval']}s" diff --git a/tests/bot/rules/test_burst.py b/tests/bot/rules/test_burst.py deleted file mode 100644 index 03682966b..000000000 --- a/tests/bot/rules/test_burst.py +++ /dev/null @@ -1,54 +0,0 @@ -from typing import Iterable - -from bot.rules import burst -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str) -> MockMessage: - """ - Init a MockMessage instance with author set to `author`. - - This serves as a shorthand / alias to keep the test cases visually clean. - """ - return MockMessage(author=author) - - -class BurstRuleTests(RuleTest): - """Tests the `burst` antispam rule.""" - - def setUp(self): - self.apply = burst.apply - self.config = {"max": 2, "interval": 10} - - async def test_allows_messages_within_limit(self): - """Cases which do not violate the rule.""" - cases = ( - [make_msg("bob"), make_msg("bob")], - [make_msg("bob"), make_msg("alice"), make_msg("bob")], - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_beyond_limit(self): - """Cases where the amount of messages exceeds the limit, triggering the rule.""" - cases = ( - DisallowedCase( - [make_msg("bob"), make_msg("bob"), make_msg("bob")], - ("bob",), - 3, - ), - DisallowedCase( - [make_msg("bob"), make_msg("bob"), make_msg("alice"), make_msg("bob")], - ("bob",), - 3, - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - return tuple(msg for msg in case.recent_messages if msg.author in case.culprits) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} messages in {self.config['interval']}s" diff --git a/tests/bot/rules/test_burst_shared.py b/tests/bot/rules/test_burst_shared.py deleted file mode 100644 index 3275143d5..000000000 --- a/tests/bot/rules/test_burst_shared.py +++ /dev/null @@ -1,57 +0,0 @@ -from typing import Iterable - -from bot.rules import burst_shared -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str) -> MockMessage: - """ - Init a MockMessage instance with the passed arg. - - This serves as a shorthand / alias to keep the test cases visually clean. - """ - return MockMessage(author=author) - - -class BurstSharedRuleTests(RuleTest): - """Tests the `burst_shared` antispam rule.""" - - def setUp(self): - self.apply = burst_shared.apply - self.config = {"max": 2, "interval": 10} - - async def test_allows_messages_within_limit(self): - """ - Cases that do not violate the rule. - - There really isn't more to test here than a single case. - """ - cases = ( - [make_msg("spongebob"), make_msg("patrick")], - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_beyond_limit(self): - """Cases where the amount of messages exceeds the limit, triggering the rule.""" - cases = ( - DisallowedCase( - [make_msg("bob"), make_msg("bob"), make_msg("bob")], - {"bob"}, - 3, - ), - DisallowedCase( - [make_msg("bob"), make_msg("bob"), make_msg("alice"), make_msg("bob")], - {"bob", "alice"}, - 4, - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - return case.recent_messages - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} messages in {self.config['interval']}s" diff --git a/tests/bot/rules/test_chars.py b/tests/bot/rules/test_chars.py deleted file mode 100644 index f1e3c76a7..000000000 --- a/tests/bot/rules/test_chars.py +++ /dev/null @@ -1,64 +0,0 @@ -from typing import Iterable - -from bot.rules import chars -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str, n_chars: int) -> MockMessage: - """Build a message with arbitrary content of `n_chars` length.""" - return MockMessage(author=author, content="A" * n_chars) - - -class CharsRuleTests(RuleTest): - """Tests the `chars` antispam rule.""" - - def setUp(self): - self.apply = chars.apply - self.config = { - "max": 20, # Max allowed sum of chars per user - "interval": 10, - } - - async def test_allows_messages_within_limit(self): - """Cases with a total amount of chars within limit.""" - cases = ( - [make_msg("bob", 0)], - [make_msg("bob", 20)], - [make_msg("bob", 15), make_msg("alice", 15)], - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_beyond_limit(self): - """Cases where the total amount of chars exceeds the limit, triggering the rule.""" - cases = ( - DisallowedCase( - [make_msg("bob", 21)], - ("bob",), - 21, - ), - DisallowedCase( - [make_msg("bob", 15), make_msg("bob", 15)], - ("bob",), - 30, - ), - DisallowedCase( - [make_msg("alice", 15), make_msg("bob", 20), make_msg("alice", 15)], - ("alice",), - 30, - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_message = case.recent_messages[0] - return tuple( - msg - for msg in case.recent_messages - if msg.author == last_message.author - ) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} characters in {self.config['interval']}s" diff --git a/tests/bot/rules/test_discord_emojis.py b/tests/bot/rules/test_discord_emojis.py deleted file mode 100644 index 66c2d9f92..000000000 --- a/tests/bot/rules/test_discord_emojis.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import Iterable - -from bot.rules import discord_emojis -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - -discord_emoji = "<:abcd:1234>" # Discord emojis follow the format <:name:id> -unicode_emoji = "🧪" - - -def make_msg(author: str, n_emojis: int, emoji: str = discord_emoji) -> MockMessage: - """Build a MockMessage instance with content containing `n_emojis` arbitrary emojis.""" - return MockMessage(author=author, content=emoji * n_emojis) - - -class DiscordEmojisRuleTests(RuleTest): - """Tests for the `discord_emojis` antispam rule.""" - - def setUp(self): - self.apply = discord_emojis.apply - self.config = {"max": 2, "interval": 10} - - async def test_allows_messages_within_limit(self): - """Cases with a total amount of discord and unicode emojis within limit.""" - cases = ( - [make_msg("bob", 2)], - [make_msg("alice", 1), make_msg("bob", 2), make_msg("alice", 1)], - [make_msg("bob", 2, unicode_emoji)], - [ - make_msg("alice", 1, unicode_emoji), - make_msg("bob", 2, unicode_emoji), - make_msg("alice", 1, unicode_emoji) - ], - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_beyond_limit(self): - """Cases with more than the allowed amount of discord and unicode emojis.""" - cases = ( - DisallowedCase( - [make_msg("bob", 3)], - ("bob",), - 3, - ), - DisallowedCase( - [make_msg("alice", 2), make_msg("bob", 2), make_msg("alice", 2)], - ("alice",), - 4, - ), - DisallowedCase( - [make_msg("bob", 3, unicode_emoji)], - ("bob",), - 3, - ), - DisallowedCase( - [ - make_msg("alice", 2, unicode_emoji), - make_msg("bob", 2, unicode_emoji), - make_msg("alice", 2, unicode_emoji) - ], - ("alice",), - 4 - ) - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - return tuple(msg for msg in case.recent_messages if msg.author in case.culprits) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} emojis in {self.config['interval']}s" diff --git a/tests/bot/rules/test_duplicates.py b/tests/bot/rules/test_duplicates.py deleted file mode 100644 index 9bd886a77..000000000 --- a/tests/bot/rules/test_duplicates.py +++ /dev/null @@ -1,64 +0,0 @@ -from typing import Iterable - -from bot.rules import duplicates -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str, content: str) -> MockMessage: - """Give a MockMessage instance with `author` and `content` attrs.""" - return MockMessage(author=author, content=content) - - -class DuplicatesRuleTests(RuleTest): - """Tests the `duplicates` antispam rule.""" - - def setUp(self): - self.apply = duplicates.apply - self.config = {"max": 2, "interval": 10} - - async def test_allows_messages_within_limit(self): - """Cases which do not violate the rule.""" - cases = ( - [make_msg("alice", "A"), make_msg("alice", "A")], - [make_msg("alice", "A"), make_msg("alice", "B"), make_msg("alice", "C")], # Non-duplicate - [make_msg("alice", "A"), make_msg("bob", "A"), make_msg("alice", "A")], # Different author - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_beyond_limit(self): - """Cases with too many duplicate messages from the same author.""" - cases = ( - DisallowedCase( - [make_msg("alice", "A"), make_msg("alice", "A"), make_msg("alice", "A")], - ("alice",), - 3, - ), - DisallowedCase( - [make_msg("bob", "A"), make_msg("alice", "A"), make_msg("bob", "A"), make_msg("bob", "A")], - ("bob",), - 3, # 4 duplicate messages, but only 3 from bob - ), - DisallowedCase( - [make_msg("bob", "A"), make_msg("bob", "B"), make_msg("bob", "A"), make_msg("bob", "A")], - ("bob",), - 3, # 4 message from bob, but only 3 duplicates - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_message = case.recent_messages[0] - return tuple( - msg - for msg in case.recent_messages - if ( - msg.author == last_message.author - and msg.content == last_message.content - ) - ) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} duplicated messages in {self.config['interval']}s" diff --git a/tests/bot/rules/test_links.py b/tests/bot/rules/test_links.py deleted file mode 100644 index b091bd9d7..000000000 --- a/tests/bot/rules/test_links.py +++ /dev/null @@ -1,67 +0,0 @@ -from typing import Iterable - -from bot.rules import links -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str, total_links: int) -> MockMessage: - """Makes a message with `total_links` links.""" - content = " ".join(["https://pydis.com"] * total_links) - return MockMessage(author=author, content=content) - - -class LinksTests(RuleTest): - """Tests applying the `links` rule.""" - - def setUp(self): - self.apply = links.apply - self.config = { - "max": 2, - "interval": 10 - } - - async def test_links_within_limit(self): - """Messages with an allowed amount of links.""" - cases = ( - [make_msg("bob", 0)], - [make_msg("bob", 2)], - [make_msg("bob", 3)], # Filter only applies if len(messages_with_links) > 1 - [make_msg("bob", 1), make_msg("bob", 1)], - [make_msg("bob", 2), make_msg("alice", 2)] # Only messages from latest author count - ) - - await self.run_allowed(cases) - - async def test_links_exceeding_limit(self): - """Messages with a a higher than allowed amount of links.""" - cases = ( - DisallowedCase( - [make_msg("bob", 1), make_msg("bob", 2)], - ("bob",), - 3 - ), - DisallowedCase( - [make_msg("alice", 1), make_msg("alice", 1), make_msg("alice", 1)], - ("alice",), - 3 - ), - DisallowedCase( - [make_msg("alice", 2), make_msg("bob", 3), make_msg("alice", 1)], - ("alice",), - 3 - ) - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_message = case.recent_messages[0] - return tuple( - msg - for msg in case.recent_messages - if msg.author == last_message.author - ) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} links in {self.config['interval']}s" diff --git a/tests/bot/rules/test_mentions.py b/tests/bot/rules/test_mentions.py deleted file mode 100644 index e1f904917..000000000 --- a/tests/bot/rules/test_mentions.py +++ /dev/null @@ -1,131 +0,0 @@ -from typing import Iterable, Optional - -import discord - -from bot.rules import mentions -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMember, MockMessage, MockMessageReference - - -def make_msg( - author: str, - total_user_mentions: int, - total_bot_mentions: int = 0, - *, - reference: Optional[MockMessageReference] = None -) -> MockMessage: - """Makes a message from `author` with `total_user_mentions` user mentions and `total_bot_mentions` bot mentions.""" - user_mentions = [MockMember() for _ in range(total_user_mentions)] - bot_mentions = [MockMember(bot=True) for _ in range(total_bot_mentions)] - - mentions = user_mentions + bot_mentions - if reference is not None: - # For the sake of these tests we assume that all references are mentions. - mentions.append(reference.resolved.author) - msg_type = discord.MessageType.reply - else: - msg_type = discord.MessageType.default - - return MockMessage(author=author, mentions=mentions, reference=reference, type=msg_type) - - -class TestMentions(RuleTest): - """Tests applying the `mentions` antispam rule.""" - - def setUp(self): - self.apply = mentions.apply - self.config = { - "max": 2, - "interval": 10, - } - - async def test_mentions_within_limit(self): - """Messages with an allowed amount of mentions.""" - cases = ( - [make_msg("bob", 0)], - [make_msg("bob", 2)], - [make_msg("bob", 1), make_msg("bob", 1)], - [make_msg("bob", 1), make_msg("alice", 2)], - ) - - await self.run_allowed(cases) - - async def test_mentions_exceeding_limit(self): - """Messages with a higher than allowed amount of mentions.""" - cases = ( - DisallowedCase( - [make_msg("bob", 3)], - ("bob",), - 3, - ), - DisallowedCase( - [make_msg("alice", 2), make_msg("alice", 0), make_msg("alice", 1)], - ("alice",), - 3, - ), - DisallowedCase( - [make_msg("bob", 2), make_msg("alice", 3), make_msg("bob", 2)], - ("bob",), - 4, - ), - DisallowedCase( - [make_msg("bob", 3, 1)], - ("bob",), - 3, - ), - DisallowedCase( - [make_msg("bob", 3, reference=MockMessageReference())], - ("bob",), - 3, - ), - DisallowedCase( - [make_msg("bob", 3, reference=MockMessageReference(reference_author_is_bot=True))], - ("bob",), - 3 - ) - ) - - await self.run_disallowed(cases) - - async def test_ignore_bot_mentions(self): - """Messages with an allowed amount of mentions, also containing bot mentions.""" - cases = ( - [make_msg("bob", 0, 3)], - [make_msg("bob", 2, 1)], - [make_msg("bob", 1, 2), make_msg("bob", 1, 2)], - [make_msg("bob", 1, 5), make_msg("alice", 2, 5)] - ) - - await self.run_allowed(cases) - - async def test_ignore_reply_mentions(self): - """Messages with an allowed amount of mentions in the content, also containing reply mentions.""" - cases = ( - [ - make_msg("bob", 2, reference=MockMessageReference()) - ], - [ - make_msg("bob", 2, reference=MockMessageReference(reference_author_is_bot=True)) - ], - [ - make_msg("bob", 2, reference=MockMessageReference()), - make_msg("bob", 0, reference=MockMessageReference()) - ], - [ - make_msg("bob", 2, reference=MockMessageReference(reference_author_is_bot=True)), - make_msg("bob", 0, reference=MockMessageReference(reference_author_is_bot=True)) - ] - ) - - await self.run_allowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_message = case.recent_messages[0] - return tuple( - msg - for msg in case.recent_messages - if msg.author == last_message.author - ) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} mentions in {self.config['interval']}s" diff --git a/tests/bot/rules/test_newlines.py b/tests/bot/rules/test_newlines.py deleted file mode 100644 index e35377773..000000000 --- a/tests/bot/rules/test_newlines.py +++ /dev/null @@ -1,102 +0,0 @@ -from typing import Iterable, List - -from bot.rules import newlines -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str, newline_groups: List[int]) -> MockMessage: - """Init a MockMessage instance with `author` and content configured by `newline_groups". - - Configure content by passing a list of ints, where each int `n` will generate - a separate group of `n` newlines. - - Example: - newline_groups=[3, 1, 2] -> content="\n\n\n \n \n\n" - """ - content = " ".join("\n" * n for n in newline_groups) - return MockMessage(author=author, content=content) - - -class TotalNewlinesRuleTests(RuleTest): - """Tests the `newlines` antispam rule against allowed cases and total newline count violations.""" - - def setUp(self): - self.apply = newlines.apply - self.config = { - "max": 5, # Max sum of newlines in relevant messages - "max_consecutive": 3, # Max newlines in one group, in one message - "interval": 10, - } - - async def test_allows_messages_within_limit(self): - """Cases which do not violate the rule.""" - cases = ( - [make_msg("alice", [])], # Single message with no newlines - [make_msg("alice", [1, 2]), make_msg("alice", [1, 1])], # 5 newlines in 2 messages - [make_msg("alice", [2, 2, 1]), make_msg("bob", [2, 3])], # 5 newlines from each author - [make_msg("bob", [1]), make_msg("alice", [5])], # Alice breaks the rule, but only bob is relevant - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_total(self): - """Cases which violate the rule by having too many newlines in total.""" - cases = ( - DisallowedCase( # Alice sends a total of 6 newlines (disallowed) - [make_msg("alice", [2, 2]), make_msg("alice", [2])], - ("alice",), - 6, - ), - DisallowedCase( # Here we test that only alice's newlines count in the sum - [make_msg("alice", [2, 2]), make_msg("bob", [3]), make_msg("alice", [3])], - ("alice",), - 7, - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_author = case.recent_messages[0].author - return tuple(msg for msg in case.recent_messages if msg.author == last_author) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} newlines in {self.config['interval']}s" - - -class GroupNewlinesRuleTests(RuleTest): - """ - Tests the `newlines` antispam rule against max consecutive newline violations. - - As these violations yield a different error report, they require a different - `get_report` implementation. - """ - - def setUp(self): - self.apply = newlines.apply - self.config = {"max": 5, "max_consecutive": 3, "interval": 10} - - async def test_disallows_messages_consecutive(self): - """Cases which violate the rule due to having too many consecutive newlines.""" - cases = ( - DisallowedCase( # Bob sends a group of newlines too large - [make_msg("bob", [4])], - ("bob",), - 4, - ), - DisallowedCase( # Alice sends 5 in total (allowed), but 4 in one group (disallowed) - [make_msg("alice", [1]), make_msg("alice", [4])], - ("alice",), - 4, - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_author = case.recent_messages[0].author - return tuple(msg for msg in case.recent_messages if msg.author == last_author) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} consecutive newlines in {self.config['interval']}s" diff --git a/tests/bot/rules/test_role_mentions.py b/tests/bot/rules/test_role_mentions.py deleted file mode 100644 index 26c05d527..000000000 --- a/tests/bot/rules/test_role_mentions.py +++ /dev/null @@ -1,55 +0,0 @@ -from typing import Iterable - -from bot.rules import role_mentions -from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage - - -def make_msg(author: str, n_mentions: int) -> MockMessage: - """Build a MockMessage instance with `n_mentions` role mentions.""" - return MockMessage(author=author, role_mentions=[None] * n_mentions) - - -class RoleMentionsRuleTests(RuleTest): - """Tests for the `role_mentions` antispam rule.""" - - def setUp(self): - self.apply = role_mentions.apply - self.config = {"max": 2, "interval": 10} - - async def test_allows_messages_within_limit(self): - """Cases with a total amount of role mentions within limit.""" - cases = ( - [make_msg("bob", 2)], - [make_msg("bob", 1), make_msg("alice", 1), make_msg("bob", 1)], - ) - - await self.run_allowed(cases) - - async def test_disallows_messages_beyond_limit(self): - """Cases with more than the allowed amount of role mentions.""" - cases = ( - DisallowedCase( - [make_msg("bob", 3)], - ("bob",), - 3, - ), - DisallowedCase( - [make_msg("alice", 2), make_msg("bob", 2), make_msg("alice", 2)], - ("alice",), - 4, - ), - ) - - await self.run_disallowed(cases) - - def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: - last_message = case.recent_messages[0] - return tuple( - msg - for msg in case.recent_messages - if msg.author == last_message.author - ) - - def get_report(self, case: DisallowedCase) -> str: - return f"sent {case.n_violations} role mentions in {self.config['interval']}s" diff --git a/tests/helpers.py b/tests/helpers.py index 1a71f210a..020f1aee5 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -393,15 +393,15 @@ dm_channel_instance = discord.DMChannel(me=me, state=state, data=dm_channel_data class MockDMChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): """ - A MagicMock subclass to mock TextChannel objects. + A MagicMock subclass to mock DMChannel objects. - Instances of this class will follow the specifications of `discord.TextChannel` instances. For + Instances of this class will follow the specifications of `discord.DMChannel` instances. For more information, see the `MockGuild` docstring. """ spec_set = dm_channel_instance def __init__(self, **kwargs) -> None: - default_kwargs = {'id': next(self.discord_id), 'recipient': MockUser(), "me": MockUser()} + default_kwargs = {'id': next(self.discord_id), 'recipient': MockUser(), "me": MockUser(), 'guild': None} super().__init__(**collections.ChainMap(kwargs, default_kwargs)) @@ -423,7 +423,7 @@ category_channel_instance = discord.CategoryChannel( class MockCategoryChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): def __init__(self, **kwargs) -> None: default_kwargs = {'id': next(self.discord_id)} - super().__init__(**collections.ChainMap(default_kwargs, kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) # Create a Message instance to get a realistic MagicMock of `discord.Message` |