diff options
author | 2022-03-03 15:43:00 -0500 | |
---|---|---|
committer | 2022-03-03 15:43:00 -0500 | |
commit | 38872a74513827c5482b4df8e769f45608da6dc9 (patch) | |
tree | 109fcf024e1d570859c4c64e9e139c76f05f9f7c | |
parent | Add typehint for DayDuration convertor (diff) | |
parent | Merge pull request #2107 from an-dyy/patch-1 (diff) |
Merge branch 'main' into fix/modpingsfix/modpings
59 files changed, 1827 insertions, 1013 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6dfe7e859..ea69f7677 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,10 +4,10 @@ **/bot/exts/moderation/*silence.py @MarkKoz bot/exts/info/codeblock/** @MarkKoz bot/exts/utils/extensions.py @MarkKoz -bot/exts/utils/snekbox.py @MarkKoz @Akarys42 @jb3 -bot/exts/help_channels/** @MarkKoz @Akarys42 -bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 @jb3 -bot/exts/info/** @Akarys42 @Den4200 @jb3 +bot/exts/utils/snekbox.py @MarkKoz @jb3 +bot/exts/help_channels/** @MarkKoz +bot/exts/moderation/** @mbaruh @Den4200 @ks129 @jb3 +bot/exts/info/** @Den4200 @jb3 bot/exts/info/information.py @mbaruh @jb3 bot/exts/filters/** @mbaruh @jb3 bot/exts/fun/** @ks129 @@ -21,22 +21,16 @@ bot/rules/** @mbaruh bot/utils/extensions.py @MarkKoz bot/utils/function.py @MarkKoz bot/utils/lock.py @MarkKoz -bot/utils/regex.py @Akarys42 bot/utils/scheduling.py @MarkKoz # Tests tests/_autospec.py @MarkKoz tests/bot/exts/test_cogs.py @MarkKoz -tests/** @Akarys42 # CI & Docker -.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 @jb3 -Dockerfile @MarkKoz @Akarys42 @Den4200 @jb3 -docker-compose.yml @MarkKoz @Akarys42 @Den4200 @jb3 - -# Tools -poetry.lock @Akarys42 -pyproject.toml @Akarys42 +.github/workflows/** @MarkKoz @SebastiaanZ @Den4200 @jb3 +Dockerfile @MarkKoz @Den4200 @jb3 +docker-compose.yml @MarkKoz @Den4200 @jb3 # Statistics bot/async_stats.py @jb3 diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index f2c9dfb6c..57cc544d9 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -46,6 +46,10 @@ jobs: PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache + # See https://github.com/pre-commit/pre-commit/issues/2178#issuecomment-1002163763 + # for why we set this. + SETUPTOOLS_USE_DISTUTILS: stdlib + steps: - name: Add custom PYTHONUSERBASE to PATH run: echo '${{ env.PYTHONUSERBASE }}/bin/' >> $GITHUB_PATH @@ -4,7 +4,6 @@ [![Lint & Test][1]][2] [![Build][3]][4] [![Deploy][5]][6] -[](https://coveralls.io/github/python-discord/bot) [](LICENSE) This project is a Discord bot specifically for use with the Python Discord server. It provides numerous utilities diff --git a/bot/constants.py b/bot/constants.py index 078ab6912..b775848fb 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -432,7 +432,7 @@ class Channels(metaclass=YAMLGetter): black_formatter: int bot_commands: int - discord_py: int + discord_bots: int esoteric: int voice_gate: int code_jam_planning: int @@ -445,6 +445,7 @@ class Channels(metaclass=YAMLGetter): incidents_archive: int mod_alerts: int mod_meta: int + mods: int nominations: int nomination_voting: int organisation: int @@ -619,10 +620,12 @@ class HelpChannels(metaclass=YAMLGetter): max_available: int max_total_channels: int name_prefix: str - notify: bool notify_channel: int notify_minutes: int - notify_roles: List[int] + notify_none_remaining: bool + notify_none_remaining_roles: List[int] + notify_running_low: bool + notify_running_low_threshold: int class RedirectOutput(metaclass=YAMLGetter): diff --git a/bot/converters.py b/bot/converters.py index bad72ab9c..b31ef76eb 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -6,9 +6,9 @@ from datetime import datetime, timezone from ssl import CertificateError import dateutil.parser -import dateutil.tz import discord from aiohttp import ClientConnectorError +from botcore.regex import DISCORD_INVITE from dateutil.relativedelta import relativedelta from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter from discord.utils import escape_markdown, snowflake_time @@ -20,9 +20,8 @@ from bot.errors import InvalidInfraction from bot.exts.info.doc import _inventory_parser from bot.exts.info.tags import TagIdentifier from bot.log import get_logger +from bot.utils import time from bot.utils.extensions import EXTENSIONS, unqualify -from bot.utils.regex import INVITE_RE -from bot.utils.time import parse_duration_string if t.TYPE_CHECKING: from bot.exts.info.source import SourceType @@ -72,7 +71,7 @@ class ValidDiscordServerInvite(Converter): async def convert(self, ctx: Context, server_invite: str) -> dict: """Check whether the string is a valid Discord server invite.""" - invite_code = INVITE_RE.match(server_invite) + invite_code = DISCORD_INVITE.match(server_invite) if invite_code: response = await ctx.bot.http_session.get( f"{URLs.discord_invite_api}/{invite_code.group('invite')}" @@ -338,7 +337,7 @@ class DurationDelta(Converter): The units need to be provided in descending order of magnitude. """ - if not (delta := parse_duration_string(duration)): + if not (delta := time.parse_duration_string(duration)): raise BadArgument(f"`{duration}` is not a valid duration string.") return delta @@ -454,9 +453,9 @@ class ISODateTime(Converter): raise BadArgument(f"`{datetime_string}` is not a valid ISO-8601 datetime string") if dt.tzinfo: - dt = dt.astimezone(dateutil.tz.UTC) + dt = dt.astimezone(timezone.utc) else: # Without a timezone, assume it represents UTC. - dt = dt.replace(tzinfo=dateutil.tz.UTC) + dt = dt.replace(tzinfo=timezone.utc) return dt diff --git a/bot/decorators.py b/bot/decorators.py index 048a2a09a..f4331264f 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -188,7 +188,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: """ def decorator(func: types.FunctionType) -> types.FunctionType: @command_wraps(func) - async def wrapper(*args, **kwargs) -> None: + async def wrapper(*args, **kwargs) -> t.Any: log.trace(f"{func.__name__}: respect role hierarchy decorator called") bound_args = function.get_bound_args(func, args, kwargs) @@ -196,8 +196,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: if not isinstance(target, Member): log.trace("The target is not a discord.Member; skipping role hierarchy check.") - await func(*args, **kwargs) - return + return await func(*args, **kwargs) ctx = function.get_arg_value(1, bound_args) cmd = ctx.command.name @@ -214,7 +213,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: ) else: log.trace(f"{func.__name__}: {target.top_role=} < {actor.top_role=}; calling func") - await func(*args, **kwargs) + return await func(*args, **kwargs) return wrapper return decorator diff --git a/bot/exts/filters/antimalware.py b/bot/exts/filters/antimalware.py index d727f7940..6cccf3680 100644 --- a/bot/exts/filters/antimalware.py +++ b/bot/exts/filters/antimalware.py @@ -18,14 +18,8 @@ PY_EMBED_DESCRIPTION = ( TXT_LIKE_FILES = {".txt", ".csv", ".json"} TXT_EMBED_DESCRIPTION = ( - "**Uh-oh!** It looks like your message got zapped by our spam filter. " - "We currently don't allow `{blocked_extension}` attachments, " - "so here are some tips to help you travel safely: \n\n" - "• If you attempted to send a message longer than 2000 characters, try shortening your message " - "to fit within the character limit or use a pasting service (see below) \n\n" - "• If you tried to show someone your code, you can use codeblocks \n(run `!code-blocks` in " - "{cmd_channel_mention} for more information) or use a pasting service like: " - f"\n\n{URLs.site_schema}{URLs.site_paste}" + "You either uploaded a `{blocked_extension}` file or entered a message that was too long. " + f"Please use our [paste bin]({URLs.site_schema}{URLs.site_paste}) instead." ) DISALLOWED_EMBED_DESCRIPTION = ( diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py index ddfd11231..bcd845a43 100644 --- a/bot/exts/filters/antispam.py +++ b/bot/exts/filters/antispam.py @@ -103,6 +103,7 @@ class DeletionContext: mod_alert_message += content await modlog.send_log_message( + content=", ".join(str(m.id) for m in self.members), # quality-of-life improvement for mobile moderators icon_url=Icons.filtering, colour=Colour(Colours.soft_red), title="Spam detected!", diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py index ee5bd89f3..a883ddf54 100644 --- a/bot/exts/filters/filter_lists.py +++ b/bot/exts/filters/filter_lists.py @@ -1,3 +1,4 @@ +import re from typing import Optional from discord import Colour, Embed @@ -72,6 +73,18 @@ class FilterLists(Cog): elif list_type == "FILE_FORMAT" and not content.startswith("."): content = f".{content}" + # If it's a filter token, validate the passed regex + elif list_type == "FILTER_TOKEN": + try: + re.compile(content) + except re.error as e: + await ctx.message.add_reaction("❌") + await ctx.send( + f"{ctx.author.mention} that's not a valid regex! " + f"Regex error message: {e.msg}." + ) + return + # Try to add the item to the database log.trace(f"Trying to add the {content} item to the {list_type} {allow_type}") payload = { diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 8accc61f8..f44b28125 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -8,7 +8,9 @@ import arrow import dateutil.parser import discord.errors import regex +import tldextract from async_rediscache import RedisCache +from botcore.regex import DISCORD_INVITE from dateutil.relativedelta import relativedelta from discord import Colour, HTTPException, Member, Message, NotFound, TextChannel from discord.ext.commands import Cog @@ -22,7 +24,6 @@ from bot.exts.moderation.modlog import ModLog from bot.log import get_logger from bot.utils import scheduling from bot.utils.messages import format_user -from bot.utils.regex import INVITE_RE log = get_logger(__name__) @@ -255,6 +256,7 @@ class Filtering(Cog): ) await self.mod_log.send_log_message( + content=str(member.id), # quality-of-life improvement for mobile moderators icon_url=Icons.token_removed, colour=Colours.soft_red, title="Username filtering alert", @@ -422,9 +424,12 @@ class Filtering(Cog): # Allow specific filters to override ping_everyone ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True) - # If we are going to autoban, we don't want to ping + content = str(msg.author.id) # quality-of-life improvement for mobile moderators + + # If we are going to autoban, we don't want to ping and don't need the user ID if reason and "[autoban]" in reason: ping_everyone = False + content = None eval_msg = "using !eval " if is_eval else "" footer = f"Reason: {reason}" if reason else None @@ -438,6 +443,7 @@ class Filtering(Cog): # Send pretty mod log embed to mod-alerts await self.mod_log.send_log_message( + content=content, icon_url=Icons.filtering, colour=Colour(Colours.soft_red), title=f"{_filter['type'].title()} triggered!", @@ -535,7 +541,10 @@ class Filtering(Cog): for match in URL_RE.finditer(text): for url in domain_blacklist: if url.lower() in match.group(1).lower(): - return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"] + blacklisted_parsed = tldextract.extract(url.lower()) + url_parsed = tldextract.extract(match.group(1).lower()) + if blacklisted_parsed.registered_domain == url_parsed.registered_domain: + return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"] return False, None @staticmethod @@ -562,7 +571,7 @@ class Filtering(Cog): # discord\.gg/gdudes-pony-farm text = text.replace("\\", "") - invites = [m.group("invite") for m in INVITE_RE.finditer(text)] + invites = [m.group("invite") for m in DISCORD_INVITE.finditer(text)] invite_data = dict() for invite in invites: if invite in invite_data: diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index e43c1e789..d9cebf215 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -1,3 +1,4 @@ +import re import typing as t from datetime import timedelta from enum import Enum @@ -16,13 +17,14 @@ log = get_logger(__name__) MAX_CHANNELS_PER_CATEGORY = 50 EXCLUDED_CHANNELS = (constants.Channels.cooldown,) +CLAIMED_BY_RE = re.compile(r"Channel claimed by <@!?(?P<user_id>\d{17,20})>\.$") class ClosingReason(Enum): """All possible closing reasons for help channels.""" COMMAND = "command" - LATEST_MESSSAGE = "auto.latest_message" + LATEST_MESSAGE = "auto.latest_message" CLAIMANT_TIMEOUT = "auto.claimant_timeout" OTHER_TIMEOUT = "auto.other_timeout" DELETED = "auto.deleted" @@ -75,7 +77,7 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T # Use the greatest offset to avoid the possibility of prematurely closing the channel. time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant) - reason = ClosingReason.DELETED if is_empty else ClosingReason.LATEST_MESSSAGE + reason = ClosingReason.DELETED if is_empty else ClosingReason.LATEST_MESSAGE return time, reason claimant_time = Arrow.utcfromtimestamp(claimant_time) @@ -157,3 +159,36 @@ async def move_to_bottom(channel: discord.TextChannel, category_id: int, **optio # Now that the channel is moved, we can edit the other attributes if options: await channel.edit(**options) + + +async def ensure_cached_claimant(channel: discord.TextChannel) -> None: + """ + Ensure there is a claimant cached for each help channel. + + Check the redis cache first, return early if there is already a claimant cached. + If there isn't an entry in redis, search for the "Claimed by X." embed in channel history. + Stopping early if we discover a dormant message first. + + If a claimant could not be found, send a warning to #helpers and set the claimant to the bot. + """ + if await _caches.claimants.get(channel.id): + return + + async for message in channel.history(limit=1000): + if message.author.id != bot.instance.user.id: + # We only care about bot messages + continue + if message.embeds: + if _message._match_bot_embed(message, _message.DORMANT_MSG): + log.info("Hit the dormant message embed before finding a claimant in %s (%d).", channel, channel.id) + break + # Only set the claimant if the first embed matches the claimed channel embed regex + if match := CLAIMED_BY_RE.match(message.embeds[0].description): + await _caches.claimants.set(channel.id, int(match.group("user_id"))) + return + + await bot.instance.get_channel(constants.Channels.helpers).send( + f"I couldn't find a claimant for {channel.mention} in that last 1000 messages. " + "Please use your helper powers to close the channel if/when appropriate." + ) + await _caches.claimants.set(channel.id, bot.instance.user.id) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 60209ba6e..a93acffb6 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -78,7 +78,10 @@ class HelpChannels(commands.Cog): self.channel_queue: asyncio.Queue[discord.TextChannel] = None self.name_queue: t.Deque[str] = None - self.last_notification: t.Optional[arrow.Arrow] = None + # Notifications + # Using a very old date so that we don't have to use Optional typing. + self.last_none_remaining_notification = arrow.get('1815-12-10T18:00:00.00000+00:00') + self.last_running_low_notification = arrow.get('1815-12-10T18:00:00.00000+00:00') self.dynamic_message: t.Optional[int] = None self.available_help_channels: t.Set[discord.TextChannel] = set() @@ -105,11 +108,36 @@ class HelpChannels(commands.Cog): """ Claim the channel in which the question `message` was sent. - Move the channel to the In Use category and pin the `message`. Add a cooldown to the - claimant to prevent them from asking another question. Lastly, make a new channel available. + Send an embed stating the claimant, move the channel to the In Use category, and pin the `message`. + Add a cooldown to the claimant to prevent them from asking another question. + Lastly, make a new channel available. """ log.info(f"Channel #{message.channel} was claimed by `{message.author.id}`.") - await self.move_to_in_use(message.channel) + + try: + await self.move_to_in_use(message.channel) + except discord.DiscordServerError: + try: + await message.channel.send( + "The bot encountered a Discord API error while trying to move this channel, please try again later." + ) + except Exception as e: + log.warning("Error occurred while sending fail claim message:", exc_info=e) + log.info( + "500 error from Discord when moving #%s (%d) to in-use for %s (%d). Cancelling claim.", + message.channel.name, + message.channel.id, + message.author.name, + message.author.id, + ) + self.bot.stats.incr("help.failed_claims.500_on_move") + return + + embed = discord.Embed( + description=f"Channel claimed by {message.author.mention}.", + color=constants.Colours.bright_green, + ) + await message.channel.send(embed=embed) # Handle odd edge case of `message.author` not being a `discord.Member` (see bot#1839) if not isinstance(message.author, discord.Member): @@ -227,13 +255,21 @@ class HelpChannels(commands.Cog): if not channel: log.info("Couldn't create a candidate channel; waiting to get one from the queue.") - notify_channel = self.bot.get_channel(constants.HelpChannels.notify_channel) - last_notification = await _message.notify(notify_channel, self.last_notification) + last_notification = await _message.notify_none_remaining(self.last_none_remaining_notification) + if last_notification: - self.last_notification = last_notification - self.bot.stats.incr("help.out_of_channel_alerts") + self.last_none_remaining_notification = last_notification - channel = await self.wait_for_dormant_channel() + channel = await self.wait_for_dormant_channel() # Blocks until a new channel is available + + else: + last_notification = await _message.notify_running_low( + self.channel_queue.qsize(), + self.last_running_low_notification + ) + + if last_notification: + self.last_running_low_notification = last_notification return channel @@ -301,6 +337,7 @@ class HelpChannels(commands.Cog): log.trace("Moving or rescheduling in-use channels.") for channel in _channel.get_category_channels(self.in_use_category): + await _channel.ensure_cached_claimant(channel) await self.move_idle_channel(channel, has_task=False) # Prevent the command from being used until ready. @@ -426,18 +463,21 @@ class HelpChannels(commands.Cog): async def _unclaim_channel( self, channel: discord.TextChannel, - claimant_id: int, + claimant_id: t.Optional[int], closed_on: _channel.ClosingReason ) -> None: """Actual implementation of `unclaim_channel`. See that for full documentation.""" await _caches.claimants.delete(channel.id) await _caches.session_participants.delete(channel.id) - claimant = await members.get_or_fetch_member(self.guild, claimant_id) - if claimant is None: - log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed") + if not claimant_id: + log.info("No claimant given when un-claiming %s (%d). Skipping role removal.", channel, channel.id) else: - await members.handle_role_change(claimant, claimant.remove_roles, self.cooldown_role) + claimant = await members.get_or_fetch_member(self.guild, claimant_id) + if claimant is None: + log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed") + else: + await members.handle_role_change(claimant, claimant.remove_roles, self.cooldown_role) await _message.unpin(channel) await _stats.report_complete_session(channel.id, closed_on) diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 241dd606c..7ceed9b4d 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -124,52 +124,93 @@ async def dm_on_open(message: discord.Message) -> None: ) -async def notify(channel: discord.TextChannel, last_notification: t.Optional[Arrow]) -> t.Optional[Arrow]: +async def notify_none_remaining(last_notification: Arrow) -> t.Optional[Arrow]: """ - Send a message in `channel` notifying about a lack of available help channels. + Send a pinging message in `channel` notifying about there being no dormant channels remaining. If a notification was sent, return the time at which the message was sent. Otherwise, return None. Configuration: - - * `HelpChannels.notify` - toggle notifications - * `HelpChannels.notify_minutes` - minimum interval between notifications - * `HelpChannels.notify_roles` - roles mentioned in notifications + * `HelpChannels.notify_minutes` - minimum interval between notifications + * `HelpChannels.notify_none_remaining` - toggle none_remaining notifications + * `HelpChannels.notify_none_remaining_roles` - roles mentioned in notifications """ - if not constants.HelpChannels.notify: - return + if not constants.HelpChannels.notify_none_remaining: + return None + + if (arrow.utcnow() - last_notification).total_seconds() < (constants.HelpChannels.notify_minutes * 60): + log.trace("Did not send none_remaining notification as it hasn't been enough time since the last one.") + return None log.trace("Notifying about lack of channels.") - if last_notification: - elapsed = (arrow.utcnow() - last_notification).seconds - minimum_interval = constants.HelpChannels.notify_minutes * 60 - should_send = elapsed >= minimum_interval - else: - should_send = True + mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_none_remaining_roles) + allowed_roles = [discord.Object(id_) for id_ in constants.HelpChannels.notify_none_remaining_roles] - if not should_send: - log.trace("Notification not sent because it's too recent since the previous one.") - return + channel = bot.instance.get_channel(constants.HelpChannels.notify_channel) + if channel is None: + log.trace("Did not send none_remaining notification as the notification channel couldn't be gathered.") try: - log.trace("Sending notification message.") - - mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_roles) - allowed_roles = [discord.Object(id_) for id_ in constants.HelpChannels.notify_roles] - - message = await channel.send( + await channel.send( f"{mentions} A new available help channel is needed but there " - f"are no more dormant ones. Consider freeing up some in-use channels manually by " + "are no more dormant ones. Consider freeing up some in-use channels manually by " f"using the `{constants.Bot.prefix}dormant` command within the channels.", allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles) ) - - return Arrow.fromdatetime(message.created_at) except Exception: # Handle it here cause this feature isn't critical for the functionality of the system. log.exception("Failed to send notification about lack of dormant channels!") + else: + bot.instance.stats.incr("help.out_of_channel_alerts") + return arrow.utcnow() + + +async def notify_running_low(number_of_channels_left: int, last_notification: Arrow) -> t.Optional[Arrow]: + """ + Send a non-pinging message in `channel` notifying about there being a low amount of dormant channels. + + This will include the number of dormant channels left `number_of_channels_left` + + If a notification was sent, return the time at which the message was sent. + Otherwise, return None. + + Configuration: + * `HelpChannels.notify_minutes` - minimum interval between notifications + * `HelpChannels.notify_running_low` - toggle running_low notifications + * `HelpChannels.notify_running_low_threshold` - minimum amount of channels to trigger running_low notifications + """ + if not constants.HelpChannels.notify_running_low: + return None + + if number_of_channels_left > constants.HelpChannels.notify_running_low_threshold: + log.trace("Did not send notify_running_low notification as the threshold was not met.") + return None + + if (arrow.utcnow() - last_notification).total_seconds() < (constants.HelpChannels.notify_minutes * 60): + log.trace("Did not send notify_running_low notification as it hasn't been enough time since the last one.") + return None + + log.trace("Notifying about getting close to no dormant channels.") + + channel = bot.instance.get_channel(constants.HelpChannels.notify_channel) + if channel is None: + log.trace("Did not send notify_running notification as the notification channel couldn't be gathered.") + + try: + if number_of_channels_left == 1: + message = f"There is only {number_of_channels_left} dormant channel left. " + else: + message = f"There are only {number_of_channels_left} dormant channels left. " + message += "Consider participating in some help channels so that we don't run out." + await channel.send(message) + except Exception: + # Handle it here cause this feature isn't critical for the functionality of the system. + log.exception("Failed to send notification about running low of dormant channels!") + else: + bot.instance.stats.incr("help.running_low_alerts") + return arrow.utcnow() async def pin(message: discord.Message) -> None: diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index ebf5f5932..4dc5276d9 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -464,7 +464,7 @@ class DocCog(commands.Cog): ) -> None: """Clear the persistent redis cache for `package`.""" if await doc_cache.delete(package_name): - await self.item_fetcher.stale_inventory_notifier.symbol_counter.delete() + await self.item_fetcher.stale_inventory_notifier.symbol_counter.delete(package_name) await ctx.send(f"Successfully cleared the cache for `{package_name}`.") else: await ctx.send("No keys matching the package found.") diff --git a/bot/exts/info/help.py b/bot/exts/info/help.py index 06799fb71..864e7edd2 100644 --- a/bot/exts/info/help.py +++ b/bot/exts/info/help.py @@ -113,12 +113,28 @@ class CommandView(ui.View): If the command has a parent, a button is added to the view to show that parent's help embed. """ - def __init__(self, help_command: CustomHelpCommand, command: Command): + def __init__(self, help_command: CustomHelpCommand, command: Command, context: Context): + self.context = context super().__init__() if command.parent: self.children.append(GroupButton(help_command, command, emoji="↩️")) + async def interaction_check(self, interaction: Interaction) -> bool: + """ + Ensures that the button only works for the user who spawned the help command. + + Also allows moderators to access buttons even when not the author of message. + """ + if interaction.user is not None: + if any(role.id in constants.MODERATION_ROLES for role in interaction.user.roles): + return True + + elif interaction.user.id == self.context.author.id: + return True + + return False + class GroupView(CommandView): """ @@ -130,8 +146,8 @@ class GroupView(CommandView): MAX_BUTTONS_IN_ROW = 5 MAX_ROWS = 5 - def __init__(self, help_command: CustomHelpCommand, group: Group, subcommands: list[Command]): - super().__init__(help_command, group) + def __init__(self, help_command: CustomHelpCommand, group: Group, subcommands: list[Command], context: Context): + super().__init__(help_command, group, context) # Don't add buttons if only a portion of the subcommands can be shown. if len(subcommands) + len(self.children) > self.MAX_ROWS * self.MAX_BUTTONS_IN_ROW: log.trace(f"Attempted to add navigation buttons for `{group.qualified_name}`, but there was no space.") @@ -302,7 +318,7 @@ class CustomHelpCommand(HelpCommand): embed.description = command_details # If the help is invoked in the context of an error, don't show subcommand navigation. - view = CommandView(self, command) if not self.context.command_failed else None + view = CommandView(self, command, self.context) if not self.context.command_failed else None return embed, view async def send_command_help(self, command: Command) -> None: @@ -347,7 +363,7 @@ class CustomHelpCommand(HelpCommand): embed.description += f"\n**Subcommands:**\n{command_details}" # If the help is invoked in the context of an error, don't show subcommand navigation. - view = GroupView(self, group, commands_) if not self.context.command_failed else None + view = GroupView(self, group, commands_, self.context) if not self.context.command_failed else None return embed, view async def send_group_help(self, group: Group) -> None: diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index fa22a4fe9..e616b9208 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -18,10 +18,10 @@ from bot.decorators import in_whitelist from bot.errors import NonExistentRoleError from bot.log import get_logger from bot.pagination import LinePaginator +from bot.utils import time from bot.utils.channel import is_mod_channel, is_staff_channel from bot.utils.checks import cooldown_with_role_bypass, has_no_roles_check, in_whitelist_check from bot.utils.members import get_or_fetch_member -from bot.utils.time import TimestampFormats, discord_timestamp, humanize_delta log = get_logger(__name__) @@ -83,7 +83,7 @@ class Information(Cog): defcon_info = "" if cog := self.bot.get_cog("Defcon"): - threshold = humanize_delta(cog.threshold) if cog.threshold else "-" + threshold = time.humanize_delta(cog.threshold) if cog.threshold else "-" defcon_info = f"Defcon threshold: {threshold}\n" verification = f"Verification level: {ctx.guild.verification_level.name}\n" @@ -173,16 +173,13 @@ class Information(Cog): """Returns an embed full of server information.""" embed = Embed(colour=Colour.og_blurple(), title="Server Information") - created = discord_timestamp(ctx.guild.created_at, TimestampFormats.RELATIVE) - region = ctx.guild.region + created = time.format_relative(ctx.guild.created_at) num_roles = len(ctx.guild.roles) - 1 # Exclude @everyone # Server Features are only useful in certain channels if ctx.channel.id in ( *constants.MODERATION_CHANNELS, constants.Channels.dev_core, - constants.Channels.dev_contrib, - constants.Channels.bot_commands ): features = f"\nFeatures: {', '.join(ctx.guild.features)}" else: @@ -199,7 +196,6 @@ class Information(Cog): embed.description = ( f"Created: {created}" - f"\nVoice region: {region}" f"{features}" f"\nRoles: {num_roles}" f"\nMember status: {member_status}" @@ -229,7 +225,7 @@ class Information(Cog): @command(name="user", aliases=["user_info", "member", "member_info", "u"]) async def user_info(self, ctx: Context, user_or_message: Union[MemberOrUser, Message] = None) -> None: """Returns info about a user.""" - if isinstance(user_or_message, Message): + if passed_as_message := isinstance(user_or_message, Message): user = user_or_message.author else: user = user_or_message @@ -244,20 +240,23 @@ class Information(Cog): # Will redirect to #bot-commands if it fails. if in_whitelist_check(ctx, roles=constants.STAFF_PARTNERS_COMMUNITY_ROLES): - embed = await self.create_user_embed(ctx, user) + embed = await self.create_user_embed(ctx, user, passed_as_message) await ctx.send(embed=embed) - async def create_user_embed(self, ctx: Context, user: MemberOrUser) -> Embed: + async def create_user_embed(self, ctx: Context, user: MemberOrUser, passed_as_message: bool) -> Embed: """Creates an embed containing information on the `user`.""" on_server = bool(await get_or_fetch_member(ctx.guild, user.id)) - created = discord_timestamp(user.created_at, TimestampFormats.RELATIVE) + created = time.format_relative(user.created_at) name = str(user) if on_server and user.nick: name = f"{user.nick} ({name})" name = escape_markdown(name) + if passed_as_message: + name += " - From Message" + if user.public_flags.verified_bot: name += f" {constants.Emojis.verified_bot}" elif user.bot: @@ -271,7 +270,7 @@ class Information(Cog): if on_server: if user.joined_at: - joined = discord_timestamp(user.joined_at, TimestampFormats.RELATIVE) + joined = time.format_relative(user.joined_at) else: joined = "Unable to get join date" @@ -284,7 +283,6 @@ class Information(Cog): membership = textwrap.dedent("\n".join([f"{key}: {value}" for key, value in membership.items()])) else: - roles = None membership = "The user is not a member of the server" fields = [ @@ -300,11 +298,11 @@ class Information(Cog): "Member information", membership ), + await self.user_messages(user), ] # Show more verbose output in moderation channels for infractions and nominations if is_mod_channel(ctx.channel): - fields.append(await self.user_messages(user)) fields.append(await self.expanded_user_infraction_counts(user)) fields.append(await self.user_nomination_counts(user)) else: @@ -422,13 +420,8 @@ class Information(Cog): if e.status == 404: activity_output = "No activity" else: - activity_output.append(user_activity["total_messages"] or "No messages") - - if (activity_blocks := user_activity.get("activity_blocks")) is not None: - # activity_blocks is not included in the response if the user has a lot of messages - activity_output.append(activity_blocks or "No activity") # Special case when activity_blocks is 0. - else: - activity_output.append("Too many to count!") + activity_output.append(f"{user_activity['total_messages']:,}" or "No messages") + activity_output.append(f"{user_activity['activity_blocks']:,}" or "No activity") activity_output = "\n".join( f"{name}: {metric}" for name, metric in zip(["Messages", "Activity blocks"], activity_output) @@ -477,7 +470,7 @@ class Information(Cog): If `json` is True, send the information in a copy-pasteable Python format. """ - if ctx.author not in message.channel.members: + if not message.channel.permissions_for(ctx.author).read_messages: await ctx.send(":x: You do not have permissions to see the channel this message is in.") return diff --git a/bot/exts/info/subscribe.py b/bot/exts/info/subscribe.py index 1299d5d59..eff0c13b8 100644 --- a/bot/exts/info/subscribe.py +++ b/bot/exts/info/subscribe.py @@ -171,7 +171,7 @@ class Subscribe(commands.Cog): self.assignable_roles.sort(key=operator.methodcaller("is_currently_available"), reverse=True) @commands.cooldown(1, 10, commands.BucketType.member) - @commands.command(name="subscribe") + @commands.command(name="subscribe", aliases=("unsubscribe",)) @redirect_output( destination_channel=constants.Channels.bot_commands, bypass_roles=constants.STAFF_PARTNERS_COMMUNITY_ROLES, diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py index e5930a433..f66237c8e 100644 --- a/bot/exts/info/tags.py +++ b/bot/exts/info/tags.py @@ -275,6 +275,11 @@ class Tags(Cog): ] tag = self.tags.get(tag_identifier) + + if tag is None and tag_identifier.group is not None: + # Try exact match with only the name + tag = self.tags.get(TagIdentifier(None, tag_identifier.group)) + if tag is None and len(filtered_tags) == 1: tag_identifier = filtered_tags[0][0] tag = filtered_tags[0][1] diff --git a/bot/exts/moderation/clean.py b/bot/exts/moderation/clean.py index 826265aa3..cb6836258 100644 --- a/bot/exts/moderation/clean.py +++ b/bot/exts/moderation/clean.py @@ -1,12 +1,11 @@ import contextlib -import logging import re import time from collections import defaultdict from contextlib import suppress from datetime import datetime -from itertools import islice -from typing import Any, Callable, Iterable, Literal, Optional, TYPE_CHECKING, Union +from itertools import takewhile +from typing import Callable, Iterable, Literal, Optional, TYPE_CHECKING, Union from discord import Colour, Message, NotFound, TextChannel, User, errors from discord.ext.commands import Cog, Context, Converter, Greedy, group, has_any_role @@ -17,12 +16,11 @@ from bot.bot import Bot from bot.constants import Channels, CleanMessages, Colours, Emojis, Event, Icons, MODERATION_ROLES from bot.converters import Age, ISODateTime from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger from bot.utils.channel import is_mod_channel -log = logging.getLogger(__name__) +log = get_logger(__name__) -# Default number of messages to look at in each channel. -DEFAULT_TRAVERSE = 10 # Number of seconds before command invocations and responses are deleted in non-moderation channels. MESSAGE_DELETE_DELAY = 5 @@ -33,12 +31,12 @@ CleanLimit = Union[Message, Age, ISODateTime] class CleanChannels(Converter): - """A converter that turns the given string to a list of channels to clean, or the literal `*` for all channels.""" + """A converter to turn the string into a list of channels to clean, or the literal `*` for all public channels.""" _channel_converter = TextChannelConverter() async def convert(self, ctx: Context, argument: str) -> Union[Literal["*"], list[TextChannel]]: - """Converts a string to a list of channels to clean, or the literal `*` for all channels.""" + """Converts a string to a list of channels to clean, or the literal `*` for all public channels.""" if argument == "*": return "*" return [await self._channel_converter.convert(ctx, channel) for channel in argument.split()] @@ -87,7 +85,6 @@ class Clean(Cog): @staticmethod def _validate_input( - traverse: int, channels: Optional[CleanChannels], bots_only: bool, users: Optional[list[User]], @@ -95,9 +92,9 @@ class Clean(Cog): second_limit: Optional[CleanLimit], ) -> None: """Raise errors if an argument value or a combination of values is invalid.""" - # Is this an acceptable amount of messages to traverse? - if traverse > CleanMessages.message_limit: - raise BadArgument(f"Cannot traverse more than {CleanMessages.message_limit} messages.") + if first_limit is None: + # This is an optional argument for the sake of the master command, but it's actually required. + raise BadArgument("Missing cleaning limit.") if (isinstance(first_limit, Message) or isinstance(second_limit, Message)) and channels: raise BadArgument("Both a message limit and channels specified.") @@ -110,10 +107,6 @@ class Clean(Cog): if users and bots_only: raise BadArgument("Marked as bots only, but users were specified.") - # This is an implementation error rather than user error. - if second_limit and not first_limit: - raise ValueError("Second limit specified without the first.") - @staticmethod async def _send_expiring_message(ctx: Context, content: str) -> None: """Send `content` to the context channel. Automatically delete if it's not a mod channel.""" @@ -121,12 +114,39 @@ class Clean(Cog): await ctx.send(content, delete_after=delete_after) @staticmethod + def _channels_set( + channels: CleanChannels, ctx: Context, first_limit: CleanLimit, second_limit: CleanLimit + ) -> set[TextChannel]: + """Standardize the input `channels` argument to a usable set of text channels.""" + # Default to using the invoking context's channel or the channel of the message limit(s). + if not channels: + # Input was validated - if first_limit is a message, second_limit won't point at a different channel. + if isinstance(first_limit, Message): + channels = {first_limit.channel} + elif isinstance(second_limit, Message): + channels = {second_limit.channel} + else: + channels = {ctx.channel} + else: + if channels == "*": + channels = { + channel for channel in ctx.guild.channels + if isinstance(channel, TextChannel) + # Assume that non-public channels are not needed to optimize for speed. + and channel.permissions_for(ctx.guild.default_role).view_channel + } + else: + channels = set(channels) + + return channels + + @staticmethod def _build_predicate( + first_limit: datetime, + second_limit: Optional[datetime] = None, bots_only: bool = False, users: Optional[list[User]] = None, regex: Optional[re.Pattern] = None, - first_limit: Optional[datetime] = None, - second_limit: Optional[datetime] = None, ) -> Predicate: """Return the predicate that decides whether to delete a given message.""" def predicate_bots_only(message: Message) -> bool: @@ -167,20 +187,18 @@ class Clean(Cog): predicates = [] # Set up the correct predicate + if second_limit: + predicates.append(predicate_range) # Delete messages in the specified age range + else: + predicates.append(predicate_after) # Delete messages older than the specified age + if bots_only: predicates.append(predicate_bots_only) # Delete messages from bots if users: predicates.append(predicate_specific_users) # Delete messages from specific user if regex: predicates.append(predicate_regex) # Delete messages that match regex - # Add up to one of the following: - if second_limit: - predicates.append(predicate_range) # Delete messages in the specified age range - elif first_limit: - predicates.append(predicate_after) # Delete messages older than specific message - if not predicates: - return lambda m: True if len(predicates) == 1: return predicates[0] return lambda m: all(pred(m) for pred in predicates) @@ -195,16 +213,25 @@ class Clean(Cog): # Invocation message has already been deleted log.info("Tried to delete invocation message, but it was already deleted.") - def _get_messages_from_cache(self, traverse: int, to_delete: Predicate) -> tuple[defaultdict[Any, list], list[int]]: + def _use_cache(self, limit: datetime) -> bool: + """Tell whether all messages to be cleaned can be found in the cache.""" + return self.bot.cached_messages[0].created_at <= limit + + def _get_messages_from_cache( + self, + channels: set[TextChannel], + to_delete: Predicate, + lower_limit: datetime + ) -> tuple[defaultdict[TextChannel, list], list[int]]: """Helper function for getting messages from the cache.""" message_mappings = defaultdict(list) message_ids = [] - for message in islice(self.bot.cached_messages, traverse): + for message in takewhile(lambda m: m.created_at > lower_limit, reversed(self.bot.cached_messages)): if not self.cleaning: # Cleaning was canceled return message_mappings, message_ids - if to_delete(message): + if message.channel in channels and to_delete(message): message_mappings[message.channel].append(message) message_ids.append(message.id) @@ -212,17 +239,16 @@ class Clean(Cog): async def _get_messages_from_channels( self, - traverse: int, channels: Iterable[TextChannel], to_delete: Predicate, - before: Optional[datetime] = None, + before: datetime, after: Optional[datetime] = None - ) -> tuple[defaultdict[Any, list], list]: + ) -> tuple[defaultdict[TextChannel, list], list]: message_mappings = defaultdict(list) message_ids = [] for channel in channels: - async for message in channel.history(limit=traverse, before=before, after=after): + async for message in channel.history(limit=CleanMessages.message_limit, before=before, after=after): if not self.cleaning: # Cleaning was canceled, return empty containers. @@ -305,12 +331,17 @@ class Clean(Cog): return deleted - async def _modlog_cleaned_messages(self, messages: list[Message], channels: CleanChannels, ctx: Context) -> bool: - """Log the deleted messages to the modlog. Return True if logging was successful.""" + async def _modlog_cleaned_messages( + self, + messages: list[Message], + channels: CleanChannels, + ctx: Context + ) -> Optional[str]: + """Log the deleted messages to the modlog, returning the log url if logging was successful.""" if not messages: # Can't build an embed, nothing to clean! await self._send_expiring_message(ctx, ":x: No matching messages could be found.") - return False + return None # Reverse the list to have reverse chronological order log_messages = reversed(messages) @@ -318,7 +349,7 @@ class Clean(Cog): # Build the embed and send it if channels == "*": - target_channels = "all channels" + target_channels = "all public channels" else: target_channels = ", ".join(channel.mention for channel in channels) @@ -336,42 +367,33 @@ class Clean(Cog): channel_id=Channels.mod_log, ) - return True + return log_url # endregion async def _clean_messages( self, ctx: Context, - traverse: int, channels: Optional[CleanChannels], bots_only: bool = False, users: Optional[list[User]] = None, regex: Optional[re.Pattern] = None, first_limit: Optional[CleanLimit] = None, second_limit: Optional[CleanLimit] = None, - use_cache: Optional[bool] = True - ) -> None: - """A helper function that does the actual message cleaning.""" - self._validate_input(traverse, channels, bots_only, users, first_limit, second_limit) + attempt_delete_invocation: bool = True, + ) -> Optional[str]: + """A helper function that does the actual message cleaning, returns the log url if logging was successful.""" + self._validate_input(channels, bots_only, users, first_limit, second_limit) # Are we already performing a clean? if self.cleaning: await self._send_expiring_message( ctx, ":x: Please wait for the currently ongoing clean operation to complete." ) - return + return None self.cleaning = True - # Default to using the invoking context's channel or the channel of the message limit(s). - if not channels: - # Input was validated - if first_limit is a message, second_limit won't point at a different channel. - if isinstance(first_limit, Message): - channels = [first_limit.channel] - elif isinstance(second_limit, Message): - channels = [second_limit.channel] - else: - channels = [ctx.channel] + deletion_channels = self._channels_set(channels, ctx, first_limit, second_limit) if isinstance(first_limit, Message): first_limit = first_limit.created_at @@ -381,19 +403,20 @@ class Clean(Cog): first_limit, second_limit = sorted([first_limit, second_limit]) # Needs to be called after standardizing the input. - predicate = self._build_predicate(bots_only, users, regex, first_limit, second_limit) + predicate = self._build_predicate(first_limit, second_limit, bots_only, users, regex) - # Delete the invocation first - await self._delete_invocation(ctx) + if attempt_delete_invocation: + # Delete the invocation first + await self._delete_invocation(ctx) - if channels == "*" and use_cache: - message_mappings, message_ids = self._get_messages_from_cache(traverse=traverse, to_delete=predicate) + if self._use_cache(first_limit): + log.trace(f"Messages for cleaning by {ctx.author.id} will be searched in the cache.") + message_mappings, message_ids = self._get_messages_from_cache( + channels=deletion_channels, to_delete=predicate, lower_limit=first_limit + ) else: - deletion_channels = channels - if channels == "*": - deletion_channels = [channel for channel in ctx.guild.channels if isinstance(channel, TextChannel)] + log.trace(f"Messages for cleaning by {ctx.author.id} will be searched in channel histories.") message_mappings, message_ids = await self._get_messages_from_channels( - traverse=traverse, channels=deletion_channels, to_delete=predicate, before=second_limit, @@ -402,18 +425,27 @@ class Clean(Cog): if not self.cleaning: # Means that the cleaning was canceled - return + return None # Now let's delete the actual messages with purge. self.mod_log.ignore(Event.message_delete, *message_ids) deleted_messages = await self._delete_found(message_mappings) self.cleaning = False - logged = await self._modlog_cleaned_messages(deleted_messages, channels, ctx) + if not channels: + channels = deletion_channels + log_url = await self._modlog_cleaned_messages(deleted_messages, channels, ctx) - if logged and is_mod_channel(ctx.channel): - with suppress(NotFound): # Can happen if the invoker deleted their own messages. - await ctx.message.add_reaction(Emojis.check_mark) + success_message = ( + f"{Emojis.ok_hand} Deleted {len(deleted_messages)} messages. " + f"A log of the deleted messages can be found here {log_url}." + ) + if log_url and is_mod_channel(ctx.channel): + await ctx.reply(success_message) + elif log_url: + if mods := self.bot.get_channel(Channels.mods): + await mods.send(f"{ctx.author.mention} {success_message}") + return log_url # region: Commands @@ -422,12 +454,10 @@ class Clean(Cog): self, ctx: Context, users: Greedy[User] = None, - traverse: Optional[int] = None, first_limit: Optional[CleanLimit] = None, second_limit: Optional[CleanLimit] = None, - use_cache: Optional[bool] = None, - bots_only: Optional[bool] = False, regex: Optional[Regex] = None, + bots_only: Optional[bool] = False, *, channels: CleanChannels = None # "Optional" with discord.py silently ignores incorrect input. ) -> None: @@ -437,91 +467,74 @@ class Clean(Cog): If arguments are provided, will act as a master command from which all subcommands can be derived. \u2003• `users`: A series of user mentions, ID's, or names. - \u2003• `traverse`: The number of messages to look at in each channel. If using the cache, will look at the - first `traverse` messages in the cache. \u2003• `first_limit` and `second_limit`: A message, a duration delta, or an ISO datetime. + At least one limit is required. If a message is provided, cleaning will happen in that channel, and channels cannot be provided. - If a limit is provided, multiple channels cannot be provided. If only one of them is provided, acts as `clean until`. If both are provided, acts as `clean between`. - \u2003• `use_cache`: Whether to use the message cache. - If not provided, will default to False unless an asterisk is used for the channels. - \u2003• `bots_only`: Whether to delete only bots. If specified, users cannot be specified. \u2003• `regex`: A regex pattern the message must contain to be deleted. The pattern must be provided enclosed in backticks. If the pattern contains spaces, it still needs to be enclosed in double quotes on top of that. - \u2003• `channels`: A series of channels to delete in, or an asterisk to delete from all channels. + \u2003• `bots_only`: Whether to delete only bots. If specified, users cannot be specified. + \u2003• `channels`: A series of channels to delete in, or an asterisk to delete from all public channels. """ - if not any([traverse, users, first_limit, second_limit, regex, channels]): + if not any([users, first_limit, second_limit, regex, channels]): await ctx.send_help(ctx.command) return - if not traverse: - if first_limit: - traverse = CleanMessages.message_limit - else: - traverse = DEFAULT_TRAVERSE - if use_cache is None: - use_cache = channels == "*" - - await self._clean_messages( - ctx, traverse, channels, bots_only, users, regex, first_limit, second_limit, use_cache - ) + await self._clean_messages(ctx, channels, bots_only, users, regex, first_limit, second_limit) @clean_group.command(name="user", aliases=["users"]) async def clean_user( self, ctx: Context, user: User, - traverse: Optional[int] = DEFAULT_TRAVERSE, - use_cache: Optional[bool] = True, + message_or_time: CleanLimit, *, channels: CleanChannels = None ) -> None: - """Delete messages posted by the provided user, stop cleaning after traversing `traverse` messages.""" - await self._clean_messages(ctx, traverse, users=[user], channels=channels, use_cache=use_cache) + """ + Delete messages posted by the provided user, stop cleaning after reaching `message_or_time`. - @clean_group.command(name="all", aliases=["everything"]) - async def clean_all( - self, - ctx: Context, - traverse: Optional[int] = DEFAULT_TRAVERSE, - use_cache: Optional[bool] = True, - *, - channels: CleanChannels = None - ) -> None: - """Delete all messages, regardless of poster, stop cleaning after traversing `traverse` messages.""" - await self._clean_messages(ctx, traverse, channels=channels, use_cache=use_cache) + `message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO + datetime. + + If a message is specified, `channels` cannot be specified. + """ + await self._clean_messages(ctx, users=[user], channels=channels, first_limit=message_or_time) @clean_group.command(name="bots", aliases=["bot"]) - async def clean_bots( - self, - ctx: Context, - traverse: Optional[int] = DEFAULT_TRAVERSE, - use_cache: Optional[bool] = True, - *, - channels: CleanChannels = None - ) -> None: - """Delete all messages posted by a bot, stop cleaning after traversing `traverse` messages.""" - await self._clean_messages(ctx, traverse, bots_only=True, channels=channels, use_cache=use_cache) + async def clean_bots(self, ctx: Context, message_or_time: CleanLimit, *, channels: CleanChannels = None) -> None: + """ + Delete all messages posted by a bot, stop cleaning after traversing `traverse` messages. + + `message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO + datetime. + + If a message is specified, `channels` cannot be specified. + """ + await self._clean_messages(ctx, bots_only=True, channels=channels, first_limit=message_or_time) @clean_group.command(name="regex", aliases=["word", "expression", "pattern"]) async def clean_regex( self, ctx: Context, regex: Regex, - traverse: Optional[int] = DEFAULT_TRAVERSE, - use_cache: Optional[bool] = True, + message_or_time: CleanLimit, *, channels: CleanChannels = None ) -> None: """ - Delete all messages that match a certain regex, stop cleaning after traversing `traverse` messages. + Delete all messages that match a certain regex, stop cleaning after reaching `message_or_time`. + + `message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO + datetime. + If a message is specified, `channels` cannot be specified. The pattern must be provided enclosed in backticks. If the pattern contains spaces, it still needs to be enclosed in double quotes on top of that. For example: `[0-9]` """ - await self._clean_messages(ctx, traverse, regex=regex, channels=channels, use_cache=use_cache) + await self._clean_messages(ctx, regex=regex, channels=channels, first_limit=message_or_time) @clean_group.command(name="until") async def clean_until( @@ -538,7 +551,6 @@ class Clean(Cog): """ await self._clean_messages( ctx, - CleanMessages.message_limit, channels=[channel] if channel else None, first_limit=until, ) @@ -562,7 +574,6 @@ class Clean(Cog): """ await self._clean_messages( ctx, - CleanMessages.message_limit, channels=[channel] if channel else None, first_limit=first_limit, second_limit=second_limit, diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 14db37367..178be734d 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -17,12 +17,9 @@ from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_RO from bot.converters import DurationDelta, Expiry from bot.exts.moderation.modlog import ModLog from bot.log import get_logger -from bot.utils import scheduling +from bot.utils import scheduling, time from bot.utils.messages import format_user from bot.utils.scheduling import Scheduler -from bot.utils.time import ( - TimestampFormats, discord_timestamp, humanize_delta, parse_duration_string, relativedelta_to_timedelta -) log = get_logger(__name__) @@ -88,7 +85,7 @@ class Defcon(Cog): try: settings = await self.defcon_settings.to_dict() - self.threshold = parse_duration_string(settings["threshold"]) if settings.get("threshold") else None + self.threshold = time.parse_duration_string(settings["threshold"]) if settings.get("threshold") else None self.expiry = datetime.fromisoformat(settings["expiry"]) if settings.get("expiry") else None except RedisError: log.exception("Unable to get DEFCON settings!") @@ -102,7 +99,7 @@ class Defcon(Cog): self.scheduler.schedule_at(self.expiry, 0, self._remove_threshold()) self._update_notifier() - log.info(f"DEFCON synchronized: {humanize_delta(self.threshold) if self.threshold else '-'}") + log.info(f"DEFCON synchronized: {time.humanize_delta(self.threshold) if self.threshold else '-'}") self._update_channel_topic() @@ -112,7 +109,7 @@ class Defcon(Cog): if self.threshold: now = arrow.utcnow() - if now - member.created_at < relativedelta_to_timedelta(self.threshold): + if now - member.created_at < time.relativedelta_to_timedelta(self.threshold): log.info(f"Rejecting user {member}: Account is too new") message_sent = False @@ -151,11 +148,12 @@ class Defcon(Cog): @has_any_role(*MODERATION_ROLES) async def status(self, ctx: Context) -> None: """Check the current status of DEFCON mode.""" + expiry = time.format_relative(self.expiry) if self.expiry else "-" embed = Embed( colour=Colour.og_blurple(), title="DEFCON Status", description=f""" - **Threshold:** {humanize_delta(self.threshold) if self.threshold else "-"} - **Expires:** {discord_timestamp(self.expiry, TimestampFormats.RELATIVE) if self.expiry else "-"} + **Threshold:** {time.humanize_delta(self.threshold) if self.threshold else "-"} + **Expires:** {expiry} **Verification level:** {ctx.guild.verification_level.name} """ ) @@ -213,7 +211,8 @@ class Defcon(Cog): def _update_channel_topic(self) -> None: """Update the #defcon channel topic with the current DEFCON status.""" - new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold) if self.threshold else '-'})" + threshold = time.humanize_delta(self.threshold) if self.threshold else '-' + new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {threshold})" self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) scheduling.create_task(self.channel.edit(topic=new_topic)) @@ -255,12 +254,12 @@ class Defcon(Cog): expiry_message = "" if expiry: - activity_duration = relativedelta(expiry, arrow.utcnow().datetime) - expiry_message = f" for the next {humanize_delta(activity_duration, max_units=2)}" + formatted_expiry = time.humanize_delta(expiry, max_units=2) + expiry_message = f" for the next {formatted_expiry}" if self.threshold: channel_message = ( - f"updated; accounts must be {humanize_delta(self.threshold)} " + f"updated; accounts must be {time.humanize_delta(self.threshold)} " f"old to join the server{expiry_message}" ) else: @@ -290,7 +289,7 @@ class Defcon(Cog): def _log_threshold_stat(self, threshold: relativedelta) -> None: """Adds the threshold to the bot stats in days.""" - threshold_days = relativedelta_to_timedelta(threshold).total_seconds() / SECONDS_IN_DAY + threshold_days = time.relativedelta_to_timedelta(threshold).total_seconds() / SECONDS_IN_DAY self.bot.stats.gauge("defcon.threshold", threshold_days) async def _send_defcon_log(self, action: Action, actor: User) -> None: @@ -298,7 +297,7 @@ class Defcon(Cog): info = action.value log_msg: str = ( f"**Staffer:** {actor.mention} {actor} (`{actor.id}`)\n" - f"{info.template.format(threshold=(humanize_delta(self.threshold) if self.threshold else '-'))}" + f"{info.template.format(threshold=(time.humanize_delta(self.threshold) if self.threshold else '-'))}" ) status_msg = f"DEFCON {action.name.lower()}" @@ -317,7 +316,7 @@ class Defcon(Cog): @tasks.loop(hours=1) async def defcon_notifier(self) -> None: """Routinely notify moderators that DEFCON is active.""" - await self.channel.send(f"Defcon is on and is set to {humanize_delta(self.threshold)}.") + await self.channel.send(f"Defcon is on and is set to {time.humanize_delta(self.threshold)}.") def cog_unload(self) -> None: """Cancel the notifer and threshold removal tasks when the cog unloads.""" diff --git a/bot/exts/moderation/incidents.py b/bot/exts/moderation/incidents.py index 77dfad255..b579416a6 100644 --- a/bot/exts/moderation/incidents.py +++ b/bot/exts/moderation/incidents.py @@ -229,6 +229,7 @@ async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[d ), timestamp=message.created_at ) + embed.set_author(name=message.author, icon_url=message.author.display_avatar.url) embed.add_field( name="Content", value=shorten_text(message.content) if message.content else "[No Message Content]" diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py index 762eb6afa..47b639421 100644 --- a/bot/exts/moderation/infraction/_scheduler.py +++ b/bot/exts/moderation/infraction/_scheduler.py @@ -136,7 +136,7 @@ class InfractionScheduler: infr_type = infraction["type"] icon = _utils.INFRACTION_ICONS[infr_type][0] reason = infraction["reason"] - expiry = time.format_infraction_with_duration(infraction["expires_at"]) + expiry = time.format_with_duration(infraction["expires_at"]) id_ = infraction['id'] if user_reason is None: @@ -170,7 +170,9 @@ class InfractionScheduler: dm_log_text = "\nDM: **Failed**" # Accordingly update whether the user was successfully notified via DM. - if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon): + if await _utils.notify_infraction( + self.bot, user, infraction["id"], infr_type.replace("_", " ").title(), expiry, user_reason, icon + ): dm_result = ":incoming_envelope: " dm_log_text = "\nDM: Sent" @@ -221,7 +223,7 @@ class InfractionScheduler: failed = True if failed: - log.trace(f"Deleted infraction {infraction['id']} from database because applying infraction failed.") + log.trace(f"Trying to delete infraction {id_} from database because applying infraction failed.") try: await self.bot.api_client.delete(f"bot/infractions/{id_}") except ResponseCodeError as e: @@ -238,7 +240,9 @@ class InfractionScheduler: dm_log_text = "\nDM: **Failed**" # Accordingly update whether the user was successfully notified via DM. - if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon): + if await _utils.notify_infraction( + self.bot, user, infraction["id"], infr_type.replace("_", " ").title(), expiry, user_reason, icon + ): dm_result = ":incoming_envelope: " dm_log_text = "\nDM: Sent" @@ -261,7 +265,7 @@ class InfractionScheduler: {additional_info} """), content=log_content, - footer=f"ID {infraction['id']}" + footer=f"ID: {id_}" ) log.info(f"Applied {purge}{infr_type} infraction #{id_} to {user}.") @@ -377,20 +381,15 @@ class InfractionScheduler: actor = infraction["actor"] type_ = infraction["type"] id_ = infraction["id"] - inserted_at = infraction["inserted_at"] - expiry = infraction["expires_at"] log.info(f"Marking infraction #{id_} as inactive (expired).") - expiry = dateutil.parser.isoparse(expiry) if expiry else None - created = time.format_infraction_with_duration(inserted_at, expiry) - log_content = None log_text = { "Member": f"<@{user_id}>", "Actor": f"<@{actor}>", "Reason": infraction["reason"], - "Created": created, + "Created": time.format_with_duration(infraction["inserted_at"], infraction["expires_at"]), } try: diff --git a/bot/exts/moderation/infraction/_utils.py b/bot/exts/moderation/infraction/_utils.py index bb3cc5380..4df833ffb 100644 --- a/bot/exts/moderation/infraction/_utils.py +++ b/bot/exts/moderation/infraction/_utils.py @@ -5,6 +5,7 @@ import discord from discord.ext.commands import Context from bot.api import ResponseCodeError +from bot.bot import Bot from bot.constants import Colours, Icons from bot.converters import MemberOrUser from bot.errors import InvalidInfractedUserError @@ -20,7 +21,7 @@ INFRACTION_ICONS = { "note": (Icons.user_warn, None), "superstar": (Icons.superstarify, Icons.unsuperstarify), "warning": (Icons.user_warn, None), - "voice_ban": (Icons.voice_state_red, Icons.voice_state_green), + "voice_mute": (Icons.voice_state_red, Icons.voice_state_green), } RULES_URL = "https://pythondiscord.com/pages/rules" @@ -78,7 +79,8 @@ async def post_infraction( reason: str, expires_at: datetime = None, hidden: bool = False, - active: bool = True + active: bool = True, + dm_sent: bool = False, ) -> t.Optional[dict]: """Posts an infraction to the API.""" if isinstance(user, (discord.Member, discord.User)) and user.bot: @@ -93,7 +95,8 @@ async def post_infraction( "reason": reason, "type": infr_type, "user": user.id, - "active": active + "active": active, + "dm_sent": dm_sent } if expires_at: payload['expires_at'] = expires_at.isoformat() @@ -156,7 +159,9 @@ async def send_active_infraction_message(ctx: Context, infraction: Infraction) - async def notify_infraction( + bot: Bot, user: MemberOrUser, + infr_id: id, infr_type: str, expires_at: t.Optional[str] = None, reason: t.Optional[str] = None, @@ -186,7 +191,15 @@ async def notify_infraction( embed.title = INFRACTION_TITLE embed.url = RULES_URL - return await send_private_embed(user, embed) + dm_sent = await send_private_embed(user, embed) + if dm_sent: + await bot.api_client.patch( + f"bot/infractions/{infr_id}", + json={"dm_sent": True} + ) + log.debug(f"Update infraction #{infr_id} dm_sent field to true.") + + return dm_sent async def notify_pardon( diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index e495a94b3..af42ab1b8 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -9,7 +9,7 @@ from discord.ext.commands import Context, command from bot import constants from bot.bot import Bot from bot.constants import Event -from bot.converters import Duration, Expiry, MemberOrUser, UnambiguousMemberOrUser +from bot.converters import Age, Duration, Expiry, MemberOrUser, UnambiguousMemberOrUser from bot.decorators import respect_role_hierarchy from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler @@ -19,6 +19,11 @@ from bot.utils.messages import format_user log = get_logger(__name__) +if t.TYPE_CHECKING: + from bot.exts.moderation.clean import Clean + from bot.exts.moderation.infraction.management import ModManagement + from bot.exts.moderation.watchchannels.bigbrother import BigBrother + class Infractions(InfractionScheduler, commands.Cog): """Apply and pardon infractions on users for moderation purposes.""" @@ -27,7 +32,7 @@ class Infractions(InfractionScheduler, commands.Cog): category_description = "Server moderation tools." def __init__(self, bot: Bot): - super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning", "voice_ban"}) + super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning", "voice_mute"}) self.category = "Moderation" self._muted_role = discord.Object(constants.Roles.muted) @@ -91,8 +96,8 @@ class Infractions(InfractionScheduler, commands.Cog): """ await self.apply_ban(ctx, user, reason, expires_at=duration) - @command(aliases=('pban',)) - async def purgeban( + @command(aliases=("cban", "purgeban", "pban")) + async def cleanban( self, ctx: Context, user: UnambiguousMemberOrUser, @@ -101,14 +106,62 @@ class Infractions(InfractionScheduler, commands.Cog): reason: t.Optional[str] = None ) -> None: """ - Same as ban but removes all their messages of the last 24 hours. + Same as ban, but also cleans all their messages from the last hour. If duration is specified, it temporarily bans that user for the given duration. """ - await self.apply_ban(ctx, user, reason, 1, expires_at=duration) + clean_cog: t.Optional[Clean] = self.bot.get_cog("Clean") + if clean_cog is None: + # If we can't get the clean cog, fall back to native purgeban. + await self.apply_ban(ctx, user, reason, purge_days=1, expires_at=duration) + return + + infraction = await self.apply_ban(ctx, user, reason, expires_at=duration) + if not infraction or not infraction.get("id"): + # Ban was unsuccessful, quit early. + await ctx.send(":x: Failed to apply ban.") + log.error("Failed to apply ban to user %d", user.id) + return + + # Calling commands directly skips Discord.py's convertors, so we need to convert args manually. + clean_time = await Age().convert(ctx, "1h") + + log_url = await clean_cog._clean_messages( + ctx, + users=[user], + channels="*", + first_limit=clean_time, + attempt_delete_invocation=False, + ) + if not log_url: + # Cleaning failed, or there were no messages to clean, exit early. + return + + infr_manage_cog: t.Optional[ModManagement] = self.bot.get_cog("ModManagement") + if infr_manage_cog is None: + # If we can't get the mod management cog, don't bother appending the log. + return + + # Overwrite the context's send function so infraction append + # doesn't output the update infraction confirmation message. + async def send(*args, **kwargs) -> None: + pass + ctx.send = send + await infr_manage_cog.infraction_append(ctx, infraction, None, reason=f"[Clean log]({log_url})") - @command(aliases=('vban',)) - async def voiceban( + @command(aliases=("vban",)) + async def voiceban(self, ctx: Context) -> None: + """ + NOT IMPLEMENTED. + + Permanently ban a user from joining voice channels. + + If duration is specified, it temporarily voice bans that user for the given duration. + """ + await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `voicemute`?") + + @command(aliases=("vmute",)) + async def voicemute( self, ctx: Context, user: UnambiguousMemberOrUser, @@ -117,11 +170,11 @@ class Infractions(InfractionScheduler, commands.Cog): reason: t.Optional[str] ) -> None: """ - Permanently ban user from using voice channels. + Permanently mute user in voice channels. - If duration is specified, it temporarily voice bans that user for the given duration. + If duration is specified, it temporarily voice mutes that user for the given duration. """ - await self.apply_voice_ban(ctx, user, reason, expires_at=duration) + await self.apply_voice_mute(ctx, user, reason, expires_at=duration) # endregion # region: Temporary infractions @@ -186,16 +239,25 @@ class Infractions(InfractionScheduler, commands.Cog): await self.apply_ban(ctx, user, reason, expires_at=duration) @command(aliases=("tempvban", "tvban")) - async def tempvoiceban( - self, - ctx: Context, - user: UnambiguousMemberOrUser, - duration: Expiry, - *, - reason: t.Optional[str] + async def tempvoiceban(self, ctx: Context) -> None: + """ + NOT IMPLEMENTED. + + Temporarily voice bans that user for the given duration. + """ + await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `tempvoicemute`?") + + @command(aliases=("tempvmute", "tvmute")) + async def tempvoicemute( + self, + ctx: Context, + user: UnambiguousMemberOrUser, + duration: Expiry, + *, + reason: t.Optional[str] ) -> None: """ - Temporarily voice ban a user for the given reason and duration. + Temporarily voice mute a user for the given reason and duration. A unit of time should be appended to the duration. Units (∗case-sensitive): @@ -209,7 +271,7 @@ class Infractions(InfractionScheduler, commands.Cog): Alternatively, an ISO 8601 timestamp can be provided for the duration. """ - await self.apply_voice_ban(ctx, user, reason, expires_at=duration) + await self.apply_voice_mute(ctx, user, reason, expires_at=duration) # endregion # region: Permanent shadow infractions @@ -271,9 +333,18 @@ class Infractions(InfractionScheduler, commands.Cog): await self.pardon_infraction(ctx, "ban", user) @command(aliases=("uvban",)) - async def unvoiceban(self, ctx: Context, user: UnambiguousMemberOrUser) -> None: - """Prematurely end the active voice ban infraction for the user.""" - await self.pardon_infraction(ctx, "voice_ban", user) + async def unvoiceban(self, ctx: Context) -> None: + """ + NOT IMPLEMENTED. + + Temporarily voice bans that user for the given duration. + """ + await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `unvoicemute`?") + + @command(aliases=("uvmute",)) + async def unvoicemute(self, ctx: Context, user: UnambiguousMemberOrUser) -> None: + """Prematurely end the active voice mute infraction for the user.""" + await self.pardon_infraction(ctx, "voice_mute", user) # endregion # region: Base apply functions @@ -339,7 +410,7 @@ class Infractions(InfractionScheduler, commands.Cog): reason: t.Optional[str], purge_days: t.Optional[int] = 0, **kwargs - ) -> None: + ) -> t.Optional[dict]: """ Apply a ban infraction with kwargs passed to `post_infraction`. @@ -347,7 +418,7 @@ class Infractions(InfractionScheduler, commands.Cog): """ if isinstance(user, Member) and user.top_role >= ctx.me.top_role: await ctx.send(":x: I can't ban users above or equal to me in the role hierarchy.") - return + return None # In the case of a permanent ban, we don't need get_active_infractions to tell us if one is active is_temporary = kwargs.get("expires_at") is not None @@ -356,19 +427,19 @@ class Infractions(InfractionScheduler, commands.Cog): if active_infraction: if is_temporary: log.trace("Tempban ignored as it cannot overwrite an active ban.") - return + return None if active_infraction.get('expires_at') is None: log.trace("Permaban already exists, notify.") await ctx.send(f":x: User is already permanently banned (#{active_infraction['id']}).") - return + return None log.trace("Old tempban is being replaced by new permaban.") await self.pardon_infraction(ctx, "ban", user, send_msg=is_temporary) infraction = await _utils.post_infraction(ctx, user, "ban", reason, active=True, **kwargs) if infraction is None: - return + return None infraction["purge"] = "purge " if purge_days else "" @@ -380,27 +451,25 @@ class Infractions(InfractionScheduler, commands.Cog): action = ctx.guild.ban(user, reason=reason, delete_message_days=purge_days) await self.apply_infraction(ctx, infraction, user, action) + bb_cog: t.Optional[BigBrother] = self.bot.get_cog("Big Brother") if infraction.get('expires_at') is not None: log.trace(f"Ban isn't permanent; user {user} won't be unwatched by Big Brother.") - return - - bb_cog = self.bot.get_cog("Big Brother") - if not bb_cog: + elif not bb_cog: log.error(f"Big Brother cog not loaded; perma-banned user {user} won't be unwatched.") - return - - log.trace(f"Big Brother cog loaded; attempting to unwatch perma-banned user {user}.") + else: + log.trace(f"Big Brother cog loaded; attempting to unwatch perma-banned user {user}.") + bb_reason = "User has been permanently banned from the server. Automatically removed." + await bb_cog.apply_unwatch(ctx, user, bb_reason, send_message=False) - bb_reason = "User has been permanently banned from the server. Automatically removed." - await bb_cog.apply_unwatch(ctx, user, bb_reason, send_message=False) + return infraction @respect_role_hierarchy(member_arg=2) - async def apply_voice_ban(self, ctx: Context, user: MemberOrUser, reason: t.Optional[str], **kwargs) -> None: - """Apply a voice ban infraction with kwargs passed to `post_infraction`.""" - if await _utils.get_active_infraction(ctx, user, "voice_ban"): + async def apply_voice_mute(self, ctx: Context, user: MemberOrUser, reason: t.Optional[str], **kwargs) -> None: + """Apply a voice mute infraction with kwargs passed to `post_infraction`.""" + if await _utils.get_active_infraction(ctx, user, "voice_mute"): return - infraction = await _utils.post_infraction(ctx, user, "voice_ban", reason, active=True, **kwargs) + infraction = await _utils.post_infraction(ctx, user, "voice_mute", reason, active=True, **kwargs) if infraction is None: return @@ -414,7 +483,7 @@ class Infractions(InfractionScheduler, commands.Cog): if not isinstance(user, Member): return - await user.move_to(None, reason="Disconnected from voice to apply voiceban.") + await user.move_to(None, reason="Disconnected from voice to apply voice mute.") await user.remove_roles(self._voice_verified_role, reason=reason) await self.apply_infraction(ctx, infraction, user, action()) @@ -471,7 +540,7 @@ class Infractions(InfractionScheduler, commands.Cog): return log_text - async def pardon_voice_ban( + async def pardon_voice_mute( self, user_id: int, guild: discord.Guild, @@ -487,9 +556,9 @@ class Infractions(InfractionScheduler, commands.Cog): # DM user about infraction expiration notified = await _utils.notify_pardon( user=user, - title="Voice ban ended", - content="You have been unbanned and can verify yourself again in the server.", - icon_url=_utils.INFRACTION_ICONS["voice_ban"][1] + title="Voice mute ended", + content="You have been unmuted and can verify yourself again in the server.", + icon_url=_utils.INFRACTION_ICONS["voice_mute"][1] ) log_text["DM"] = "Sent" if notified else "**Failed**" @@ -514,8 +583,8 @@ class Infractions(InfractionScheduler, commands.Cog): return await self.pardon_mute(user_id, guild, reason, notify=notify) elif infraction["type"] == "ban": return await self.pardon_ban(user_id, guild, reason) - elif infraction["type"] == "voice_ban": - return await self.pardon_voice_ban(user_id, guild, notify=notify) + elif infraction["type"] == "voice_mute": + return await self.pardon_voice_mute(user_id, guild, notify=notify) # endregion diff --git a/bot/exts/moderation/infraction/management.py b/bot/exts/moderation/infraction/management.py index a833eb227..dda3fadae 100644 --- a/bot/exts/moderation/infraction/management.py +++ b/bot/exts/moderation/infraction/management.py @@ -1,10 +1,7 @@ import textwrap import typing as t -from datetime import datetime, timezone -import dateutil.parser import discord -from dateutil.relativedelta import relativedelta from discord.ext import commands from discord.ext.commands import Context from discord.utils import escape_markdown @@ -20,7 +17,6 @@ from bot.pagination import LinePaginator from bot.utils import messages, time from bot.utils.channel import is_mod_channel from bot.utils.members import get_or_fetch_member -from bot.utils.time import humanize_delta, until_expiration log = get_logger(__name__) @@ -151,7 +147,7 @@ class ModManagement(commands.Cog): confirm_messages.append("marked as permanent") elif duration is not None: request_data['expires_at'] = duration.isoformat() - expiry = time.format_infraction_with_duration(request_data['expires_at']) + expiry = time.format_with_duration(duration) confirm_messages.append(f"set to expire on {expiry}") else: confirm_messages.append("expiry unchanged") @@ -176,15 +172,15 @@ class ModManagement(commands.Cog): if 'expires_at' in request_data: # A scheduled task should only exist if the old infraction wasn't permanent if infraction['expires_at']: - self.infractions_cog.scheduler.cancel(new_infraction['id']) + self.infractions_cog.scheduler.cancel(infraction_id) # If the infraction was not marked as permanent, schedule a new expiration task if request_data['expires_at']: self.infractions_cog.schedule_expiration(new_infraction) log_text += f""" - Previous expiry: {until_expiration(infraction['expires_at']) or "Permanent"} - New expiry: {until_expiration(new_infraction['expires_at']) or "Permanent"} + Previous expiry: {time.until_expiration(infraction['expires_at'])} + New expiry: {time.until_expiration(new_infraction['expires_at'])} """.rstrip() changes = ' & '.join(confirm_messages) @@ -210,7 +206,8 @@ class ModManagement(commands.Cog): Member: {user_text} Actor: <@{new_infraction['actor']}> Edited by: {ctx.message.author.mention}{log_text} - """) + """), + footer=f"ID: {infraction_id}" ) # endregion @@ -351,7 +348,9 @@ class ModManagement(commands.Cog): active = infraction["active"] user = infraction["user"] expires_at = infraction["expires_at"] - created = time.format_infraction(infraction["inserted_at"]) + inserted_at = infraction["inserted_at"] + created = time.discord_timestamp(inserted_at) + dm_sent = infraction["dm_sent"] # Format the user string. if user_obj := self.bot.get_user(user["id"]): @@ -363,25 +362,27 @@ class ModManagement(commands.Cog): user_str = f"<@{user['id']}> ({name}#{user['discriminator']:04})" if active: - remaining = time.until_expiration(expires_at) or "Expired" + remaining = time.until_expiration(expires_at) else: remaining = "Inactive" if expires_at is None: duration = "*Permanent*" else: - date_from = datetime.fromtimestamp( - float(time.DISCORD_TIMESTAMP_REGEX.match(created).group(1)), - timezone.utc - ) - date_to = dateutil.parser.isoparse(expires_at) - duration = humanize_delta(relativedelta(date_to, date_from)) + duration = time.humanize_delta(inserted_at, expires_at) + + # Format `dm_sent` + if dm_sent is None: + dm_sent_text = "N/A" + else: + dm_sent_text = "Yes" if dm_sent else "No" lines = textwrap.dedent(f""" {"**===============**" if active else "==============="} Status: {"__**Active**__" if active else "Inactive"} User: {user_str} Type: **{infraction["type"]}** + DM Sent: {dm_sent_text} Shadow: {infraction["hidden"]} Created: {created} Expires: {remaining} diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 08c92b8f3..3f1bffd76 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -14,9 +14,9 @@ from bot.converters import Duration, Expiry from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler from bot.log import get_logger +from bot.utils import time from bot.utils.members import get_or_fetch_member from bot.utils.messages import format_user -from bot.utils.time import format_infraction log = get_logger(__name__) NICKNAME_POLICY_URL = "https://pythondiscord.com/pages/rules/#nickname-policy" @@ -57,7 +57,9 @@ class Superstarify(InfractionScheduler, Cog): return infraction = active_superstarifies[0] - forced_nick = self.get_nick(infraction["id"], before.id) + infr_id = infraction["id"] + + forced_nick = self.get_nick(infr_id, before.id) if after.display_name == forced_nick: return # Nick change was triggered by this event. Ignore. @@ -67,13 +69,15 @@ class Superstarify(InfractionScheduler, Cog): ) await after.edit( nick=forced_nick, - reason=f"Superstarified member tried to escape the prison: {infraction['id']}" + reason=f"Superstarified member tried to escape the prison: {infr_id}" ) notified = await _utils.notify_infraction( + bot=self.bot, user=after, + infr_id=infr_id, infr_type="Superstarify", - expires_at=format_infraction(infraction["expires_at"]), + expires_at=time.discord_timestamp(infraction["expires_at"]), reason=( "You have tried to change your nickname on the **Python Discord** server " f"from **{before.display_name}** to **{after.display_name}**, but as you " @@ -150,7 +154,7 @@ class Superstarify(InfractionScheduler, Cog): id_ = infraction["id"] forced_nick = self.get_nick(id_, member.id) - expiry_str = format_infraction(infraction["expires_at"]) + expiry_str = time.discord_timestamp(infraction["expires_at"]) # Apply the infraction async def action() -> None: diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index 91709e5e5..32ea0dc6a 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -16,8 +16,8 @@ from discord.utils import escape_markdown from bot.bot import Bot from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles, URLs from bot.log import get_logger +from bot.utils import time from bot.utils.messages import format_user -from bot.utils.time import humanize_delta log = get_logger(__name__) @@ -96,6 +96,7 @@ class ModLog(Cog, name="ModLog"): footer: t.Optional[str] = None, ) -> Context: """Generate log embed and send to logging channel.""" + await self.bot.wait_until_guild_available() # Truncate string directly here to avoid removing newlines embed = discord.Embed( description=text[:4093] + "..." if len(text) > 4096 else text @@ -115,7 +116,7 @@ class ModLog(Cog, name="ModLog"): if ping_everyone: if content: - content = f"<@&{Roles.moderators}>\n{content}" + content = f"<@&{Roles.moderators}> {content}" else: content = f"<@&{Roles.moderators}>" @@ -406,7 +407,7 @@ class ModLog(Cog, name="ModLog"): now = datetime.now(timezone.utc) difference = abs(relativedelta(now, member.created_at)) - message = format_user(member) + "\n\n**Account age:** " + humanize_delta(difference) + message = format_user(member) + "\n\n**Account age:** " + time.humanize_delta(difference) if difference.days < 1 and difference.months < 1 and difference.years < 1: # New user account! message = f"{Emojis.new} {message}" @@ -614,6 +615,7 @@ class ModLog(Cog, name="ModLog"): This is called when a message absent from the cache is deleted. Hence, the message contents aren't logged. """ + await self.bot.wait_until_guild_available() if self.is_channel_ignored(event.channel_id): return @@ -711,7 +713,7 @@ class ModLog(Cog, name="ModLog"): # datetime as the baseline and create a human-readable delta between this edit event # and the last time the message was edited timestamp = msg_before.edited_at - delta = humanize_delta(relativedelta(msg_after.edited_at, msg_before.edited_at)) + delta = time.humanize_delta(msg_after.edited_at, msg_before.edited_at) footer = f"Last edited {delta} ago" else: # Message was not previously edited, use the created_at datetime as the baseline, no @@ -727,6 +729,10 @@ class ModLog(Cog, name="ModLog"): @Cog.listener() async def on_raw_message_edit(self, event: discord.RawMessageUpdateEvent) -> None: """Log raw message edit event to message change log.""" + if event.guild_id is None: + return # ignore DM edits + + await self.bot.wait_until_guild_available() try: channel = self.bot.get_channel(int(event.data["channel_id"])) message = await channel.fetch_message(event.message_id) diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py index c3f3dc0b1..f8044ea7a 100644 --- a/bot/exts/moderation/modpings.py +++ b/bot/exts/moderation/modpings.py @@ -11,10 +11,9 @@ from bot.bot import Bot from bot.constants import Colours, Emojis, Guild, Icons, MODERATION_ROLES, Roles from bot.converters import DayDuration, Expiry from bot.log import get_logger -from bot.utils import scheduling +from bot.utils import scheduling, time from bot.utils.members import get_or_fetch_member from bot.utils.scheduling import Scheduler -from bot.utils.time import TimestampFormats, discord_timestamp log = get_logger(__name__) @@ -263,8 +262,8 @@ class ModPings(Cog): await ctx.send( f"{Emojis.ok_hand} {ctx.author.mention} Scheduled mod pings from " - f"{discord_timestamp(start, TimestampFormats.TIME)} to " - f"{discord_timestamp(end, TimestampFormats.TIME)}!" + f"{time.discord_timestamp(start, time.TimestampFormats.TIME)} to " + f"{time.discord_timestamp(end, time.TimestampFormats.TIME)}!" ) @schedule_modpings.command(name='delete', aliases=('del', 'd')) diff --git a/bot/exts/moderation/slowmode.py b/bot/exts/moderation/slowmode.py index 9583597e0..b6a771441 100644 --- a/bot/exts/moderation/slowmode.py +++ b/bot/exts/moderation/slowmode.py @@ -16,7 +16,7 @@ SLOWMODE_MAX_DELAY = 21600 # seconds COMMONLY_SLOWMODED_CHANNELS = { Channels.python_general: "python_general", - Channels.discord_py: "discordpy", + Channels.discord_bots: "discord_bots", Channels.off_topic_0: "ot0", } @@ -39,8 +39,7 @@ class Slowmode(Cog): if channel is None: channel = ctx.channel - delay = relativedelta(seconds=channel.slowmode_delay) - humanized_delay = time.humanize_delta(delay) + humanized_delay = time.humanize_delta(seconds=channel.slowmode_delay) await ctx.send(f'The slowmode delay for {channel.mention} is {humanized_delay}.') diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index 99bbd8721..985cc6eb1 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -14,9 +14,8 @@ from bot.constants import ( from bot.converters import Expiry from bot.log import get_logger from bot.pagination import LinePaginator -from bot.utils import scheduling +from bot.utils import scheduling, time from bot.utils.members import get_or_fetch_member -from bot.utils.time import discord_timestamp, format_infraction_with_duration log = get_logger(__name__) @@ -131,11 +130,15 @@ class Stream(commands.Cog): await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") - await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {discord_timestamp(duration)}.") + await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {time.discord_timestamp(duration)}.") # Convert here for nicer logging - revoke_time = format_infraction_with_duration(str(duration)) - log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.") + humanized_duration = time.humanize_delta(duration, arrow.utcnow(), max_units=2) + end_time = duration.strftime("%Y-%m-%d %H:%M:%S") + log.debug( + f"Successfully gave {member} ({member.id}) permission " + f"to stream for {humanized_duration} (until {end_time})." + ) @commands.command(aliases=("pstream",)) @commands.has_any_role(*MODERATION_ROLES) diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py index ae55a03a0..fa66b00dd 100644 --- a/bot/exts/moderation/voice_gate.py +++ b/bot/exts/moderation/voice_gate.py @@ -30,7 +30,7 @@ FAILED_MESSAGE = ( MESSAGE_FIELD_MAP = { "joined_at": f"have been on the server for less than {GateConf.minimum_days_member} days", - "voice_banned": "have an active voice ban infraction", + "voice_gate_blocked": "have an active voice infraction", "total_messages": f"have sent less than {GateConf.minimum_messages} messages", "activity_blocks": f"have been active for fewer than {GateConf.minimum_activity_blocks} ten-minute blocks", } @@ -170,12 +170,9 @@ class VoiceGate(Cog): ctx.author.joined_at > arrow.utcnow() - timedelta(days=GateConf.minimum_days_member) ), "total_messages": data["total_messages"] < GateConf.minimum_messages, - "voice_banned": data["voice_banned"], + "voice_gate_blocked": data["voice_gate_blocked"], + "activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks, } - if activity_blocks := data.get("activity_blocks"): - # activity_blocks is not included in the response if the user has a lot of messages. - # Only check if the user has enough activity blocks if it is included. - checks["activity_blocks"] = activity_blocks < GateConf.minimum_activity_blocks failed = any(checks.values()) failed_reasons = [MESSAGE_FIELD_MAP[key] for key, value in checks.items() if value is True] diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index 34d445912..ee9b6ba45 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -18,9 +18,8 @@ from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE from bot.exts.moderation.modlog import ModLog from bot.log import CustomLogger, get_logger from bot.pagination import LinePaginator -from bot.utils import CogABCMeta, messages, scheduling +from bot.utils import CogABCMeta, messages, scheduling, time from bot.utils.members import get_or_fetch_member -from bot.utils.time import get_time_delta log = get_logger(__name__) @@ -286,7 +285,7 @@ class WatchChannel(metaclass=CogABCMeta): actor = actor.display_name if actor else self.watched_users[user_id]['actor'] inserted_at = self.watched_users[user_id]['inserted_at'] - time_delta = get_time_delta(inserted_at) + time_delta = time.format_relative(inserted_at) reason = self.watched_users[user_id]['reason'] @@ -360,7 +359,7 @@ class WatchChannel(metaclass=CogABCMeta): if member: line += f" ({member.name}#{member.discriminator})" inserted_at = user_data['inserted_at'] - line += f", added {get_time_delta(inserted_at)}" + line += f", added {time.format_relative(inserted_at)}" if not member: # Cross off users who left the server. line = f"~~{line}~~" list_data["info"][user_id] = line diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index 8fa0be5b1..0554bf37a 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -17,7 +17,6 @@ from bot.log import get_logger from bot.pagination import LinePaginator from bot.utils import scheduling, time from bot.utils.members import get_or_fetch_member -from bot.utils.time import get_time_delta AUTOREVIEW_ENABLED_KEY = "autoreview_enabled" REASON_MAX_CHARS = 1000 @@ -181,7 +180,7 @@ class TalentPool(Cog, name="Talentpool"): if member: line += f" ({member.name}#{member.discriminator})" inserted_at = user_data['inserted_at'] - line += f", added {get_time_delta(inserted_at)}" + line += f", added {time.format_relative(inserted_at)}" if not member: # Cross off users who left the server. line = f"~~{line}~~" if user_data['reviewed']: @@ -260,7 +259,7 @@ class TalentPool(Cog, name="Talentpool"): return if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + await ctx.send(f":x: Maximum allowed characters for the reason is {REASON_MAX_CHARS}.") return # Manual request with `raise_for_status` as False because we want the actual response @@ -445,7 +444,7 @@ class TalentPool(Cog, name="Talentpool"): async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: """Edits the unnominate reason for the nomination with the given `id`.""" if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + await ctx.send(f":x: Maximum allowed characters for the end reason is {REASON_MAX_CHARS}.") return try: @@ -562,7 +561,7 @@ class TalentPool(Cog, name="Talentpool"): actor = await get_or_fetch_member(guild, actor_id) reason = site_entry["reason"] or "*None*" - created = time.format_infraction(site_entry["inserted_at"]) + created = time.discord_timestamp(site_entry["inserted_at"]) entries.append( f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}" ) @@ -571,7 +570,7 @@ class TalentPool(Cog, name="Talentpool"): active = nomination_object["active"] - start_date = time.format_infraction(nomination_object["inserted_at"]) + start_date = time.discord_timestamp(nomination_object["inserted_at"]) if active: lines = textwrap.dedent( f""" @@ -585,7 +584,7 @@ class TalentPool(Cog, name="Talentpool"): """ ) else: - end_date = time.format_infraction(nomination_object["ended_at"]) + end_date = time.discord_timestamp(nomination_object["ended_at"]) lines = textwrap.dedent( f""" =============== diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index 0e7194892..b4d177622 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -17,10 +17,10 @@ from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import Channels, Colours, Emojis, Guild, Roles from bot.log import get_logger +from bot.utils import time from bot.utils.members import get_or_fetch_member from bot.utils.messages import count_unique_users_reaction, pin_no_system_message from bot.utils.scheduling import Scheduler -from bot.utils.time import get_time_delta, time_since if typing.TYPE_CHECKING: from bot.exts.recruitment.talentpool._cog import TalentPool @@ -273,7 +273,7 @@ class Reviewer: last_channel = user_activity["top_channel_activity"][-1] channels += f", and {last_channel[1]} in {last_channel[0]}" - joined_at_formatted = time_since(member.joined_at) + joined_at_formatted = time.format_relative(member.joined_at) review = ( f"{member.name} joined the server **{joined_at_formatted}**" f" and has **{messages} messages**{channels}." @@ -321,7 +321,7 @@ class Reviewer: infractions += ", with the last infraction issued " # Infractions were ordered by time since insertion descending. - infractions += get_time_delta(infraction_list[0]['inserted_at']) + infractions += time.format_relative(infraction_list[0]['inserted_at']) return f"They have {infractions}." @@ -365,7 +365,7 @@ class Reviewer: nomination_times = f"{num_entries} times" if num_entries > 1 else "once" rejection_times = f"{len(history)} times" if len(history) > 1 else "once" - end_time = time_since(isoparse(history[0]['ended_at'])) + end_time = time.format_relative(history[0]['ended_at']) review = ( f"They were nominated **{nomination_times}** before" diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py index 788692777..8f0094bc9 100644 --- a/bot/exts/utils/bot.py +++ b/bot/exts/utils/bot.py @@ -1,7 +1,6 @@ -from contextlib import suppress from typing import Optional -from discord import Embed, Forbidden, TextChannel, Thread +from discord import Embed, TextChannel from discord.ext.commands import Cog, Context, command, group, has_any_role from bot.bot import Bot @@ -17,20 +16,6 @@ class BotCog(Cog, name="Bot"): def __init__(self, bot: Bot): self.bot = bot - @Cog.listener() - async def on_thread_join(self, thread: Thread) -> None: - """ - Try to join newly created threads. - - Despite the event name being misleading, this is dispatched when new threads are created. - """ - if thread.me: - # We have already joined this thread - return - - with suppress(Forbidden): - await thread.join() - @group(invoke_without_command=True, name="bot", hidden=True) async def botinfo_group(self, ctx: Context) -> None: """Bot informational commands.""" diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py index 90677b2dd..ad82d49c9 100644 --- a/bot/exts/utils/reminders.py +++ b/bot/exts/utils/reminders.py @@ -13,13 +13,12 @@ from bot.constants import Guild, Icons, MODERATION_ROLES, POSITIVE_REPLIES, Role from bot.converters import Duration, UnambiguousUser from bot.log import get_logger from bot.pagination import LinePaginator -from bot.utils import scheduling +from bot.utils import scheduling, time from bot.utils.checks import has_any_role_check, has_no_roles_check from bot.utils.lock import lock_arg from bot.utils.members import get_or_fetch_member from bot.utils.messages import send_denial from bot.utils.scheduling import Scheduler -from bot.utils.time import TimestampFormats, discord_timestamp log = get_logger(__name__) @@ -67,20 +66,19 @@ class Reminders(Cog): else: self.schedule_reminder(reminder) - def ensure_valid_reminder(self, reminder: dict) -> t.Tuple[bool, discord.User, discord.TextChannel]: - """Ensure reminder author and channel can be fetched otherwise delete the reminder.""" - user = self.bot.get_user(reminder['author']) + def ensure_valid_reminder(self, reminder: dict) -> t.Tuple[bool, discord.TextChannel]: + """Ensure reminder channel can be fetched otherwise delete the reminder.""" channel = self.bot.get_channel(reminder['channel_id']) is_valid = True - if not user or not channel: + if not channel: is_valid = False log.info( f"Reminder {reminder['id']} invalid: " - f"User {reminder['author']}={user}, Channel {reminder['channel_id']}={channel}." + f"Channel {reminder['channel_id']}={channel}." ) scheduling.create_task(self.bot.api_client.delete(f"bot/reminders/{reminder['id']}")) - return is_valid, user, channel + return is_valid, channel @staticmethod async def _send_confirmation( @@ -169,9 +167,9 @@ class Reminders(Cog): self.schedule_reminder(reminder) @lock_arg(LOCK_NAMESPACE, "reminder", itemgetter("id"), raise_error=True) - async def send_reminder(self, reminder: dict, expected_time: datetime = None) -> None: + async def send_reminder(self, reminder: dict, expected_time: t.Optional[time.Timestamp] = None) -> None: """Send the reminder.""" - is_valid, user, channel = self.ensure_valid_reminder(reminder) + is_valid, channel = self.ensure_valid_reminder(reminder) if not is_valid: # No need to cancel the task too; it'll simply be done once this coroutine returns. return @@ -207,7 +205,7 @@ class Reminders(Cog): f"There was an error when trying to reply to a reminder invocation message, {e}, " "fall back to using jump_url" ) - await channel.send(content=f"{user.mention} {additional_mentions}", embed=embed) + await channel.send(content=f"<@{reminder['author']}> {additional_mentions}", embed=embed) log.debug(f"Deleting reminder #{reminder['id']} (the user has been reminded).") await self.bot.api_client.delete(f"bot/reminders/{reminder['id']}") @@ -310,7 +308,8 @@ class Reminders(Cog): } ) - mention_string = f"Your reminder will arrive on {discord_timestamp(expiration, TimestampFormats.DAY_TIME)}" + formatted_time = time.discord_timestamp(expiration, time.TimestampFormats.DAY_TIME) + mention_string = f"Your reminder will arrive on {formatted_time}" if mentions: mention_string += f" and will mention {len(mentions)} other(s)" @@ -347,8 +346,7 @@ class Reminders(Cog): for content, remind_at, id_, mentions in reminders: # Parse and humanize the time, make it pretty :D - remind_datetime = isoparse(remind_at) - time = discord_timestamp(remind_datetime, TimestampFormats.RELATIVE) + expiry = time.format_relative(remind_at) mentions = ", ".join([ # Both Role and User objects have the `name` attribute @@ -357,7 +355,7 @@ class Reminders(Cog): mention_string = f"\n**Mentions:** {mentions}" if mentions else "" text = textwrap.dedent(f""" - **Reminder #{id_}:** *expires {time}* (ID: {id_}){mention_string} + **Reminder #{id_}:** *expires {expiry}* (ID: {id_}){mention_string} {content} """).strip() diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py index fbfc58d0b..cc3a2e1d7 100644 --- a/bot/exts/utils/snekbox.py +++ b/bot/exts/utils/snekbox.py @@ -7,7 +7,8 @@ from functools import partial from signal import Signals from typing import Optional, Tuple -from discord import HTTPException, Message, NotFound, Reaction, User +from botcore.regex import FORMATTED_CODE_REGEX, RAW_CODE_REGEX +from discord import AllowedMentions, HTTPException, Message, NotFound, Reaction, User from discord.ext.commands import Cog, Context, command, guild_only from bot.bot import Bot @@ -20,21 +21,6 @@ from bot.utils.messages import wait_for_deletion log = get_logger(__name__) ESCAPE_REGEX = re.compile("[`\u202E\u200B]{3,}") -FORMATTED_CODE_REGEX = re.compile( - r"(?P<delim>(?P<block>```)|``?)" # code delimiter: 1-3 backticks; (?P=block) only matches if it's a block - r"(?(block)(?:(?P<lang>[a-z]+)\n)?)" # if we're in a block, match optional language (only letters plus newline) - r"(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code - r"(?P<code>.*?)" # extract all code inside the markup - r"\s*" # any more whitespace before the end of the code markup - r"(?P=delim)", # match the exact same delimiter from the start again - re.DOTALL | re.IGNORECASE # "." also matches newlines, case insensitive -) -RAW_CODE_REGEX = re.compile( - r"^(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code - r"(?P<code>.*?)" # extract all the rest as code - r"\s*$", # any trailing whitespace until the end of the string - re.DOTALL # "." also matches newlines -) MAX_PASTE_LEN = 10000 @@ -218,7 +204,8 @@ class Snekbox(Cog): if filter_triggered: response = await ctx.send("Attempt to circumvent filter detected. Moderator team has been alerted.") else: - response = await ctx.send(msg) + allowed_mentions = AllowedMentions(everyone=False, roles=False, users=[ctx.author]) + response = await ctx.send(msg, allowed_mentions=allowed_mentions) scheduling.create_task(wait_for_deletion(response, (ctx.author.id,)), event_loop=self.bot.loop) log.info(f"{ctx.author}'s job had a return code of {results['returncode']}") diff --git a/bot/exts/utils/thread_bumper.py b/bot/exts/utils/thread_bumper.py new file mode 100644 index 000000000..35057f1fe --- /dev/null +++ b/bot/exts/utils/thread_bumper.py @@ -0,0 +1,147 @@ +import typing as t + +import discord +from async_rediscache import RedisCache +from discord.ext import commands + +from bot import constants +from bot.bot import Bot +from bot.log import get_logger +from bot.pagination import LinePaginator +from bot.utils import channel, scheduling + +log = get_logger(__name__) + + +class ThreadBumper(commands.Cog): + """Cog that allow users to add the current thread to a list that get reopened on archive.""" + + # RedisCache[discord.Thread.id, "sentinel"] + threads_to_bump = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self.init_task = scheduling.create_task(self.ensure_bumped_threads_are_active(), event_loop=self.bot.loop) + + async def unarchive_threads_not_manually_archived(self, threads: list[discord.Thread]) -> None: + """ + Iterate through and unarchive any threads that weren't manually archived recently. + + This is done by extracting the manually archived threads from the audit log. + + Only the last 200 thread_update logs are checked, + as this is assumed to be more than enough to cover bot downtime. + """ + guild = self.bot.get_guild(constants.Guild.id) + + recent_manually_archived_thread_ids = [] + async for thread_update in guild.audit_logs(limit=200, action=discord.AuditLogAction.thread_update): + if getattr(thread_update.after, "archived", False): + recent_manually_archived_thread_ids.append(thread_update.target.id) + + for thread in threads: + if thread.id in recent_manually_archived_thread_ids: + log.info( + "#%s (%d) was manually archived. Leaving archived, and removing from bumped threads.", + thread.name, + thread.id + ) + await self.threads_to_bump.delete(thread.id) + else: + await thread.edit(archived=False) + + async def ensure_bumped_threads_are_active(self) -> None: + """Ensure bumped threads are active, since threads could have been archived while the bot was down.""" + await self.bot.wait_until_guild_available() + + threads_to_maybe_bump = [] + for thread_id, _ in await self.threads_to_bump.items(): + try: + thread = await channel.get_or_fetch_channel(thread_id) + except discord.NotFound: + log.info("Thread %d has been deleted, removing from bumped threads.", thread_id) + await self.threads_to_bump.delete(thread_id) + continue + + if thread.archived: + threads_to_maybe_bump.append(thread) + + await self.unarchive_threads_not_manually_archived(threads_to_maybe_bump) + + @commands.group(name="bump") + async def thread_bump_group(self, ctx: commands.Context) -> None: + """A group of commands to manage the bumping of threads.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @thread_bump_group.command(name="add", aliases=("a",)) + async def add_thread_to_bump_list(self, ctx: commands.Context, thread: t.Optional[discord.Thread]) -> None: + """Add a thread to the bump list.""" + await self.init_task + + if not thread: + if isinstance(ctx.channel, discord.Thread): + thread = ctx.channel + else: + raise commands.BadArgument("You must provide a thread, or run this command within a thread.") + + if await self.threads_to_bump.contains(thread.id): + raise commands.BadArgument("This thread is already in the bump list.") + + await self.threads_to_bump.set(thread.id, "sentinel") + await ctx.send(f":ok_hand:{thread.mention} has been added to the bump list.") + + @thread_bump_group.command(name="remove", aliases=("r", "rem", "d", "del", "delete")) + async def remove_thread_from_bump_list(self, ctx: commands.Context, thread: t.Optional[discord.Thread]) -> None: + """Remove a thread from the bump list.""" + await self.init_task + + if not thread: + if isinstance(ctx.channel, discord.Thread): + thread = ctx.channel + else: + raise commands.BadArgument("You must provide a thread, or run this command within a thread.") + + if not await self.threads_to_bump.contains(thread.id): + raise commands.BadArgument("This thread is not in the bump list.") + + await self.threads_to_bump.delete(thread.id) + await ctx.send(f":ok_hand: {thread.mention} has been removed from the bump list.") + + @thread_bump_group.command(name="list", aliases=("get",)) + async def list_all_threads_in_bump_list(self, ctx: commands.Context) -> None: + """List all the threads in the bump list.""" + await self.init_task + + lines = [f"<#{k}>" for k, _ in await self.threads_to_bump.items()] + embed = discord.Embed( + title="Threads in the bump list", + colour=constants.Colours.blue + ) + await LinePaginator.paginate(lines, ctx, embed) + + @commands.Cog.listener() + async def on_thread_update(self, _: discord.Thread, after: discord.Thread) -> None: + """ + Listen for thread updates and check if the thread has been archived. + + If the thread has been archived, and is in the bump list, un-archive it. + """ + await self.init_task + + if not after.archived: + return + + if await self.threads_to_bump.contains(after.id): + await self.unarchive_threads_not_manually_archived([after]) + + async def cog_check(self, ctx: commands.Context) -> bool: + """Only allow staff & partner roles to invoke the commands in this cog.""" + return await commands.has_any_role( + *constants.STAFF_PARTNERS_COMMUNITY_ROLES + ).predicate(ctx) + + +def setup(bot: Bot) -> None: + """Load the ThreadBumper cog.""" + bot.add_cog(ThreadBumper(bot)) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index 821cebd8c..2a074788e 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -13,8 +13,7 @@ from bot.converters import Snowflake from bot.decorators import in_whitelist from bot.log import get_logger from bot.pagination import LinePaginator -from bot.utils import messages -from bot.utils.time import time_since +from bot.utils import messages, time log = get_logger(__name__) @@ -49,7 +48,7 @@ class Utils(Cog): self.bot = bot @command() - @in_whitelist(channels=(Channels.bot_commands, Channels.discord_py), roles=STAFF_PARTNERS_COMMUNITY_ROLES) + @in_whitelist(channels=(Channels.bot_commands, Channels.discord_bots), roles=STAFF_PARTNERS_COMMUNITY_ROLES) async def charinfo(self, ctx: Context, *, characters: str) -> None: """Shows you information on up to 50 unicode characters.""" match = re.match(r"<(a?):(\w+):(\d+)>", characters) @@ -173,7 +172,7 @@ class Utils(Cog): lines = [] for snowflake in snowflakes: created_at = snowflake_time(snowflake) - lines.append(f"**{snowflake}**\nCreated at {created_at} ({time_since(created_at)}).") + lines.append(f"**{snowflake}**\nCreated at {created_at} ({time.format_relative(created_at)}).") await LinePaginator.paginate( lines, diff --git a/bot/monkey_patches.py b/bot/monkey_patches.py index b5c0de8d9..4840fa454 100644 --- a/bot/monkey_patches.py +++ b/bot/monkey_patches.py @@ -1,3 +1,4 @@ +import re from datetime import timedelta import arrow @@ -5,9 +6,9 @@ from discord import Forbidden, http from discord.ext import commands from bot.log import get_logger -from bot.utils.regex import MESSAGE_ID_RE log = get_logger(__name__) +MESSAGE_ID_RE = re.compile(r'(?P<message_id>[0-9]{15,20})$') class Command(commands.Command): diff --git a/bot/resources/tags/contribute.md b/bot/resources/tags/contribute.md index 070975646..50c5cd11f 100644 --- a/bot/resources/tags/contribute.md +++ b/bot/resources/tags/contribute.md @@ -7,6 +7,6 @@ Looking to contribute to Open Source Projects for the first time? Want to add a • [Site](https://github.com/python-discord/site) - resources, guides, and more **Where to start** -1. Read our [contributing guidelines](https://pythondiscord.com/pages/guides/pydis-guides/contributing/) +1. Read our [contribution guide](https://pythondiscord.com/pages/guides/pydis-guides/contributing/) 2. Chat with us in <#635950537262759947> if you're ready to jump in or have any questions 3. Open an issue or ask to be assigned to an issue to work on diff --git a/bot/resources/tags/off-topic-names.md b/bot/resources/tags/off-topic-names.md new file mode 100644 index 000000000..5d0614aaa --- /dev/null +++ b/bot/resources/tags/off-topic-names.md @@ -0,0 +1,10 @@ +**Off-topic channels** + +There are three off-topic channels: +• <#291284109232308226> +• <#463035241142026251> +• <#463035268514185226> + +The channel names change every night at midnight UTC and are often fun meta references to jokes or conversations that happened on the server. + +See our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) page for more guidance on how the channels should be used. diff --git a/bot/resources/tags/off-topic.md b/bot/resources/tags/off-topic.md deleted file mode 100644 index 287224d7f..000000000 --- a/bot/resources/tags/off-topic.md +++ /dev/null @@ -1,10 +0,0 @@ -**Off-topic channels** - -There are three off-topic channels: -• <#463035268514185226> -• <#463035241142026251> -• <#291284109232308226> - -Their names change randomly every 24 hours, but you can always find them under the `OFF-TOPIC/GENERAL` category in the channel list. - -Please read our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) before participating in conversations. diff --git a/bot/resources/tags/ot.md b/bot/resources/tags/ot.md new file mode 100644 index 000000000..636e59110 --- /dev/null +++ b/bot/resources/tags/ot.md @@ -0,0 +1,3 @@ +**Off-topic channel:** <#463035268514185226> + +Please read our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) before participating in conversations. diff --git a/bot/resources/tags/strip-gotcha.md b/bot/resources/tags/strip-gotcha.md new file mode 100644 index 000000000..9ad495cd2 --- /dev/null +++ b/bot/resources/tags/strip-gotcha.md @@ -0,0 +1,17 @@ +When working with `strip`, `lstrip`, or `rstrip`, you might think that this would be the case: +```py +>>> "Monty Python".rstrip(" Python") +"Monty" +``` +While this seems intuitive, it would actually result in: +```py +"M" +``` +as Python interprets the argument to these functions as a set of characters rather than a substring. + +If you want to remove a prefix/suffix from a string, `str.removeprefix` and `str.removesuffix` are recommended and were added in 3.9. +```py +>>> "Monty Python".removesuffix(" Python") +"Monty" +``` +See the documentation of [str.removeprefix](https://docs.python.org/3.10/library/stdtypes.html#str.removeprefix) and [str.removesuffix](https://docs.python.org/3.10/library/stdtypes.html#str.removesuffix) for more information. diff --git a/bot/resources/tags/traceback.md b/bot/resources/tags/traceback.md index 321737aac..e21fa6c6e 100644 --- a/bot/resources/tags/traceback.md +++ b/bot/resources/tags/traceback.md @@ -1,18 +1,15 @@ Please provide the full traceback for your exception in order to help us identify your issue. +While the last line of the error message tells us what kind of error you got, +the full traceback will tell us which line, and other critical information to solve your problem. +Please avoid screenshots so we can copy and paste parts of the message. A full traceback could look like: ```py Traceback (most recent call last): - File "tiny", line 3, in - do_something() - File "tiny", line 2, in do_something - a = 6 / b -ZeroDivisionError: division by zero + File "my_file.py", line 5, in <module> + add_three("6") + File "my_file.py", line 2, in add_three + a = num + 3 +TypeError: can only concatenate str (not "int") to str ``` -The best way to read your traceback is bottom to top. - -• Identify the exception raised (in this case `ZeroDivisionError`) -• Make note of the line number (in this case `2`), and navigate there in your program. -• Try to understand why the error occurred (in this case because `b` is `0`). - -To read more about exceptions and errors, please refer to the [PyDis Wiki](https://pythondiscord.com/pages/guides/pydis-guides/asking-good-questions/#examining-tracebacks) or the [official Python tutorial](https://docs.python.org/3.7/tutorial/errors.html). +If the traceback is long, use [our pastebin](https://paste.pythondiscord.com/). diff --git a/bot/utils/regex.py b/bot/utils/regex.py deleted file mode 100644 index 9dc1eba9d..000000000 --- a/bot/utils/regex.py +++ /dev/null @@ -1,15 +0,0 @@ -import re - -INVITE_RE = re.compile( - r"(discord([\.,]|dot)gg|" # Could be discord.gg/ - r"discord([\.,]|dot)com(\/|slash)invite|" # or discord.com/invite/ - r"discordapp([\.,]|dot)com(\/|slash)invite|" # or discordapp.com/invite/ - r"discord([\.,]|dot)me|" # or discord.me - r"discord([\.,]|dot)li|" # or discord.li - r"discord([\.,]|dot)io|" # or discord.io. - r"((?<!\w)([\.,]|dot))gg" # or .gg/ - r")([\/]|slash)" # / or 'slash' - r"(?P<invite>[a-zA-Z0-9\-]+)", # the invite code itself - flags=re.IGNORECASE -) -MESSAGE_ID_RE = re.compile(r'(?P<message_id>[0-9]{15,20})$') diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 7b4c8e2de..23acacf74 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -5,6 +5,8 @@ import typing as t from datetime import datetime from functools import partial +from arrow import Arrow + from bot.log import get_logger @@ -58,7 +60,7 @@ class Scheduler: self._scheduled_tasks[task_id] = task self._log.debug(f"Scheduled task #{task_id} {id(task)}.") - def schedule_at(self, time: datetime, task_id: t.Hashable, coroutine: t.Coroutine) -> None: + def schedule_at(self, time: t.Union[datetime, Arrow], task_id: t.Hashable, coroutine: t.Coroutine) -> None: """ Schedule `coroutine` to be executed at the given `time`. diff --git a/bot/utils/time.py b/bot/utils/time.py index eaa9b72e9..a0379c3ef 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -1,15 +1,12 @@ import datetime import re from enum import Enum -from typing import Optional, Union +from time import struct_time +from typing import Literal, Optional, Union, overload import arrow -import dateutil.parser from dateutil.relativedelta import relativedelta -RFC1123_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" -DISCORD_TIMESTAMP_REGEX = re.compile(r"<t:(\d+):f>") - _DURATION_REGEX = re.compile( r"((?P<years>\d+?) ?(years|year|Y|y) ?)?" r"((?P<months>\d+?) ?(months|month|m) ?)?" @@ -20,8 +17,19 @@ _DURATION_REGEX = re.compile( r"((?P<seconds>\d+?) ?(seconds|second|S|s))?" ) - -ValidTimestamp = Union[int, datetime.datetime, datetime.date, datetime.timedelta, relativedelta] +# All supported types for the single-argument overload of arrow.get(). tzinfo is excluded because +# it's too implicit of a way for the caller to specify that they want the current time. +Timestamp = Union[ + arrow.Arrow, + datetime.datetime, + datetime.date, + struct_time, + int, # POSIX timestamp + float, # POSIX timestamp + str, # ISO 8601-formatted string + tuple[int, int, int], # ISO calendar tuple +] +_Precision = Literal["years", "months", "days", "hours", "minutes", "seconds"] class TimestampFormats(Enum): @@ -42,7 +50,7 @@ class TimestampFormats(Enum): def _stringify_time_unit(value: int, unit: str) -> str: """ - Returns a string to represent a value and time unit, ensuring that it uses the right plural form of the unit. + Return a string to represent a value and time unit, ensuring the unit's correct plural form is used. >>> _stringify_time_unit(1, "seconds") "1 second" @@ -61,33 +69,140 @@ def _stringify_time_unit(value: int, unit: str) -> str: return f"{value} {unit}" -def discord_timestamp(timestamp: ValidTimestamp, format: TimestampFormats = TimestampFormats.DATE_TIME) -> str: - """Create and format a Discord flavored markdown timestamp.""" - if format not in TimestampFormats: - raise ValueError(f"Format can only be one of {', '.join(TimestampFormats.args)}, not {format}.") +def discord_timestamp(timestamp: Timestamp, format: TimestampFormats = TimestampFormats.DATE_TIME) -> str: + """ + Format a timestamp as a Discord-flavored Markdown timestamp. + + `timestamp` can be any type supported by the single-arg `arrow.get()`, except for a `tzinfo`. + """ + timestamp = int(arrow.get(timestamp).timestamp()) + return f"<t:{timestamp}:{format.value}>" + + +# region humanize_delta overloads +@overload +def humanize_delta( + arg1: Union[relativedelta, Timestamp], + /, + *, + precision: _Precision = "seconds", + max_units: int = 6, + absolute: bool = True, +) -> str: + ... + + +@overload +def humanize_delta( + end: Timestamp, + start: Timestamp, + /, + *, + precision: _Precision = "seconds", + max_units: int = 6, + absolute: bool = True, +) -> str: + ... + + +@overload +def humanize_delta( + *, + years: int = 0, + months: int = 0, + weeks: float = 0, + days: float = 0, + hours: float = 0, + minutes: float = 0, + seconds: float = 0, + precision: _Precision = "seconds", + max_units: int = 6, + absolute: bool = True, +) -> str: + ... +# endregion + + +def humanize_delta( + *args, + precision: _Precision = "seconds", + max_units: int = 6, + absolute: bool = True, + **kwargs, +) -> str: + """ + Return a human-readable version of a time duration. + + `precision` is the smallest unit of time to include (e.g. "seconds", "minutes"). - # Convert each possible timestamp class to an integer. - if isinstance(timestamp, datetime.datetime): - timestamp = (timestamp - arrow.get(0)).total_seconds() - elif isinstance(timestamp, datetime.date): - timestamp = (timestamp - arrow.get(0)).total_seconds() - elif isinstance(timestamp, datetime.timedelta): - timestamp = timestamp.total_seconds() - elif isinstance(timestamp, relativedelta): - timestamp = timestamp.seconds + `max_units` is the maximum number of units of time to include. + Count units from largest to smallest (e.g. count days before months). - return f"<t:{int(timestamp)}:{format.value}>" + Use the absolute value of the duration if `absolute` is True. + Usage: -def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: int = 6) -> str: - """ - Returns a human-readable version of the relativedelta. + Keyword arguments specifying values for time units, to construct a `relativedelta` and humanize + the duration represented by it: + + >>> humanize_delta(days=2, hours=16, seconds=23) + '2 days, 16 hours and 23 seconds' + + **One** `relativedelta` object, to humanize the duration represented by it: + + >>> humanize_delta(relativedelta(years=12, months=6)) + '12 years and 6 months' + + Note that `leapdays` and absolute info (singular names) will be ignored during humanization. + + **One** timestamp of a type supported by the single-arg `arrow.get()`, except for `tzinfo`, + to humanize the duration between it and the current time: + + >>> humanize_delta('2021-08-06T12:43:01Z', absolute=True) # now = 2021-08-06T12:33:33Z + '9 minutes and 28 seconds' + + >>> humanize_delta('2021-08-06T12:43:01Z', absolute=False) # now = 2021-08-06T12:33:33Z + '-9 minutes and -28 seconds' + + **Two** timestamps, each of a type supported by the single-arg `arrow.get()`, except for + `tzinfo`, to humanize the duration between them: + + >>> humanize_delta(datetime.datetime(2020, 1, 1), '2021-01-01T12:00:00Z', absolute=False) + '1 year and 12 hours' + + >>> humanize_delta('2021-01-01T12:00:00Z', datetime.datetime(2020, 1, 1), absolute=False) + '-1 years and -12 hours' + + Note that order of the arguments can result in a different output even if `absolute` is True: + + >>> x = datetime.datetime(3000, 11, 1) + >>> y = datetime.datetime(3000, 9, 2) + >>> humanize_delta(y, x, absolute=True), humanize_delta(x, y, absolute=True) + ('1 month and 30 days', '1 month and 29 days') - precision specifies the smallest unit of time to include (e.g. "seconds", "minutes"). - max_units specifies the maximum number of units of time to include (e.g. 1 may include days but not hours). + This is due to the nature of `relativedelta`; it does not represent a fixed period of time. + Instead, it's relative to the `datetime` to which it's added to get the other `datetime`. + In the example, the difference arises because all months don't have the same number of days. """ + if args and kwargs: + raise ValueError("Unsupported combination of positional and keyword arguments.") + + if len(args) == 0: + delta = relativedelta(**kwargs) + elif len(args) == 1 and isinstance(args[0], relativedelta): + delta = args[0] + elif len(args) <= 2: + end = arrow.get(args[0]) + start = arrow.get(args[1]) if len(args) == 2 else arrow.utcnow() + + delta = relativedelta(end.datetime, start.datetime) + if absolute: + delta = abs(delta) + else: + raise ValueError(f"Received {len(args)} positional arguments, but expected 1 or 2.") + if max_units <= 0: - raise ValueError("max_units must be positive") + raise ValueError("max_units must be positive.") units = ( ("years", delta.years), @@ -98,7 +213,7 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: ("seconds", delta.seconds), ) - # Add the time units that are >0, but stop at accuracy or max_units. + # Add the time units that are >0, but stop at precision or max_units. time_strings = [] unit_count = 0 for unit, value in units: @@ -109,7 +224,7 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: if unit == precision or unit_count >= max_units: break - # Add the 'and' between the last two units, if necessary + # Add the 'and' between the last two units, if necessary. if len(time_strings) > 1: time_strings[-1] = f"{time_strings[-2]} and {time_strings[-1]}" del time_strings[-2] @@ -123,19 +238,12 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: return humanized -def get_time_delta(time_string: str) -> str: - """Returns the time in human-readable time delta format.""" - date_time = dateutil.parser.isoparse(time_string) - time_delta = time_since(date_time) - - return time_delta - - def parse_duration_string(duration: str) -> Optional[relativedelta]: """ - Converts a `duration` string to a relativedelta object. + Convert a `duration` string to a relativedelta object. + + The following symbols are supported for each unit of time: - The function supports the following symbols for each unit of time: - years: `Y`, `y`, `year`, `years` - months: `m`, `month`, `months` - weeks: `w`, `W`, `week`, `weeks` @@ -143,8 +251,9 @@ def parse_duration_string(duration: str) -> Optional[relativedelta]: - hours: `H`, `h`, `hour`, `hours` - minutes: `M`, `minute`, `minutes` - seconds: `S`, `s`, `second`, `seconds` + The units need to be provided in descending order of magnitude. - If the string does represent a durationdelta object, it will return None. + Return None if the `duration` string cannot be parsed according to the symbols above. """ match = _DURATION_REGEX.fullmatch(duration) if not match: @@ -157,76 +266,63 @@ def parse_duration_string(duration: str) -> Optional[relativedelta]: def relativedelta_to_timedelta(delta: relativedelta) -> datetime.timedelta: - """Converts a relativedelta object to a timedelta object.""" + """Convert a relativedelta object to a timedelta object.""" utcnow = arrow.utcnow() return utcnow + delta - utcnow -def time_since(past_datetime: datetime.datetime) -> str: - """Takes a datetime and returns a discord timestamp that describes how long ago that datetime was.""" - return discord_timestamp(past_datetime, TimestampFormats.RELATIVE) - - -def parse_rfc1123(stamp: str) -> datetime.datetime: - """Parse RFC1123 time string into datetime.""" - return datetime.datetime.strptime(stamp, RFC1123_FORMAT).replace(tzinfo=datetime.timezone.utc) +def format_relative(timestamp: Timestamp) -> str: + """ + Format `timestamp` as a relative Discord timestamp. + A relative timestamp describes how much time has elapsed since `timestamp` or how much time + remains until `timestamp` is reached. -def format_infraction(timestamp: str) -> str: - """Format an infraction timestamp to a discord timestamp.""" - return discord_timestamp(dateutil.parser.isoparse(timestamp)) + `timestamp` can be any type supported by the single-arg `arrow.get()`, except for a `tzinfo`. + """ + return discord_timestamp(timestamp, TimestampFormats.RELATIVE) -def format_infraction_with_duration( - date_to: Optional[str], - date_from: Optional[datetime.datetime] = None, +def format_with_duration( + timestamp: Optional[Timestamp], + other_timestamp: Optional[Timestamp] = None, max_units: int = 2, - absolute: bool = True ) -> Optional[str]: """ - Return `date_to` formatted as a discord timestamp with the timestamp duration since `date_from`. + Return `timestamp` formatted as a discord timestamp with the timestamp duration since `other_timestamp`. + + `timestamp` and `other_timestamp` can be any type supported by the single-arg `arrow.get()`, + except for a `tzinfo`. Use the current time if `other_timestamp` is None or unspecified. - `max_units` specifies the maximum number of units of time to include in the duration. For - example, a value of 1 may include days but not hours. + `max_units` is forwarded to `time.humanize_delta`. See its documentation for more information. - If `absolute` is True, the absolute value of the duration delta is used. This prevents negative - values in the case that `date_to` is in the past relative to `date_from`. + Return None if `timestamp` is None. """ - if not date_to: + if timestamp is None: return None - date_to_formatted = format_infraction(date_to) - - date_from = date_from or datetime.datetime.now(datetime.timezone.utc) - date_to = dateutil.parser.isoparse(date_to).replace(microsecond=0) + if other_timestamp is None: + other_timestamp = arrow.utcnow() - delta = relativedelta(date_to, date_from) - if absolute: - delta = abs(delta) + formatted_timestamp = discord_timestamp(timestamp) + duration = humanize_delta(timestamp, other_timestamp, max_units=max_units) - duration = humanize_delta(delta, max_units=max_units) - duration_formatted = f" ({duration})" if duration else "" + return f"{formatted_timestamp} ({duration})" - return f"{date_to_formatted}{duration_formatted}" - -def until_expiration( - expiry: Optional[str] -) -> Optional[str]: +def until_expiration(expiry: Optional[Timestamp]) -> str: """ - Get the remaining time until infraction's expiration, in a discord timestamp. + Get the remaining time until an infraction's expiration as a Discord timestamp. - Returns a human-readable version of the remaining duration between arrow.utcnow() and an expiry. - Similar to time_since, except that this function doesn't error on a null input - and return null if the expiry is in the paste - """ - if not expiry: - return None + `expiry` can be any type supported by the single-arg `arrow.get()`, except for a `tzinfo`. - now = arrow.utcnow() - since = dateutil.parser.isoparse(expiry).replace(microsecond=0) + Return "Permanent" if `expiry` is None. Return "Expired" if `expiry` is in the past. + """ + if expiry is None: + return "Permanent" - if since < now: - return None + expiry = arrow.get(expiry) + if expiry < arrow.utcnow(): + return "Expired" - return discord_timestamp(since, TimestampFormats.RELATIVE) + return format_relative(expiry) diff --git a/config-default.yml b/config-default.yml index 1e04f5844..dae923158 100644 --- a/config-default.yml +++ b/config-default.yml @@ -174,7 +174,7 @@ guild: how_to_get_help: 704250143020417084 # Topical - discord_py: 343944376055103488 + discord_bots: 343944376055103488 # Logs attachment_log: &ATTACH_LOG 649243850006855680 @@ -513,19 +513,16 @@ help_channels: # Prefix for help channel names name_prefix: 'help-' - # Notify if more available channels are needed but there are no more dormant ones - notify: true + notify_channel: *HELPERS # Channel in which to send notifications messages + notify_minutes: 15 # Minimum interval between none_remaining or running_low notifications - # Channel in which to send notifications - notify_channel: *HELPERS - - # Minimum interval between helper notifications - notify_minutes: 15 - - # Mention these roles in notifications - notify_roles: + notify_none_remaining: true # Pinging notification for the Helper role when no dormant channels remain + notify_none_remaining_roles: # Mention these roles in the none_remaining notification - *HELPERS_ROLE + notify_running_low: true # Non-pinging notification which is triggered when the channel count is equal or less than the threshold + notify_running_low_threshold: 4 # The amount of channels at which a running_low notification will be sent + redirect_output: delete_delay: 15 diff --git a/docker-compose.yml b/docker-compose.yml index 869d9acb6..ce78f65aa 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -38,6 +38,7 @@ services: metricity: << : *logging + << : *restart_policy restart: on-failure # USE_METRICITY=false will stop the container, so this ensures it only restarts on error depends_on: postgres: diff --git a/poetry.lock b/poetry.lock index d91941d45..6d3bd44bb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,10 @@ [[package]] name = "aio-pika" -version = "6.8.0" +version = "6.8.1" description = "Wrapper for the aiormq for asyncio and humans." category = "main" optional = false -python-versions = ">3.5.*, <4" +python-versions = ">=3.5, <4" [package.dependencies] aiormq = ">=3.2.3,<4" @@ -114,29 +114,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.2.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] - -[[package]] -name = "backports.entry-points-selectable" -version = "1.1.0" -description = "Compatibility shim providing selectable entry points for older implementations" -category = "dev" -optional = false -python-versions = ">=2.7" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "beautifulsoup4" @@ -154,6 +142,21 @@ html5lib = ["html5lib"] lxml = ["lxml"] [[package]] +name = "bot-core" +version = "1.2.0" +description = "Bot-Core provides the core functionality and utilities for the bots of the Python Discord community." +category = "main" +optional = false +python-versions = "3.9.*" + +[package.dependencies] +"discord.py" = {url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip"} + +[package.source] +type = "url" +url = "https://github.com/python-discord/bot-core/archive/511bcba1b0196cd498c707a525ea56921bd971db.zip" + +[[package]] name = "certifi" version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." @@ -190,9 +193,9 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "charset-normalizer" -version = "2.0.7" +version = "2.0.10" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" +category = "main" optional = false python-versions = ">=3.5.0" @@ -233,34 +236,32 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" toml = ["toml"] [[package]] -name = "coveralls" -version = "2.2.0" -description = "Show coverage stats online via coveralls.io" -category = "dev" +name = "deepdiff" +version = "4.3.2" +description = "Deep Difference and Search of any Python object/data." +category = "main" optional = false -python-versions = ">= 3.5" +python-versions = ">=3.5" [package.dependencies] -coverage = ">=4.1,<6.0" -docopt = ">=0.6.1" -requests = ">=1.0.0" +ordered-set = ">=3.1.1" [package.extras] -yaml = ["PyYAML (>=3.10)"] +murmur = ["mmh3"] [[package]] -name = "deepdiff" -version = "4.3.2" -description = "Deep Difference and Search of any Python object/data." +name = "deprecated" +version = "1.2.13" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] -ordered-set = ">=3.1.1" +wrapt = ">=1.10,<2" [package.extras] -murmur = ["mmh3"] +dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] [[package]] name = "discord.py" @@ -281,23 +282,16 @@ voice = ["PyNaCl (>=1.3.0,<1.5)"] [package.source] type = "url" url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip" + [[package]] name = "distlib" -version = "0.3.3" +version = "0.3.4" description = "Distribution utilities" category = "dev" optional = false python-versions = "*" [[package]] -name = "docopt" -version = "0.6.2" -description = "Pythonic argument parser, that will make you smile" -category = "dev" -optional = false -python-versions = "*" - -[[package]] name = "emoji" version = "0.6.0" description = "Emoji for Python" @@ -321,7 +315,7 @@ testing = ["pre-commit"] [[package]] name = "fakeredis" -version = "1.6.1" +version = "1.7.0" description = "Fake implementation of redis API for testing purposes." category = "main" optional = false @@ -329,7 +323,7 @@ python-versions = ">=3.5" [package.dependencies] packaging = "*" -redis = "<3.6.0" +redis = "<4.1.0" six = ">=1.12" sortedcontainers = "*" @@ -350,11 +344,11 @@ sgmllib3k = "*" [[package]] name = "filelock" -version = "3.3.1" +version = "3.4.2" description = "A platform independent file lock." -category = "dev" +category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] @@ -492,14 +486,14 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve [[package]] name = "identify" -version = "2.3.0" +version = "2.4.2" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.6.1" [package.extras] -license = ["editdistance-s"] +license = ["ukkonen"] [[package]] name = "idna" @@ -519,7 +513,7 @@ python-versions = "*" [[package]] name = "isort" -version = "5.9.3" +version = "5.10.1" description = "A Python utility / library to sort Python imports." category = "dev" optional = false @@ -533,7 +527,7 @@ plugins = ["setuptools"] [[package]] name = "lxml" -version = "4.6.3" +version = "4.7.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false @@ -567,7 +561,7 @@ python-versions = "*" [[package]] name = "more-itertools" -version = "8.10.0" +version = "8.12.0" description = "More routines for operating on iterables, beyond itertools" category = "main" optional = false @@ -607,14 +601,14 @@ python-versions = ">=3.5" [[package]] name = "packaging" -version = "21.0" +version = "21.3" description = "Core utilities for Python packages" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pamqp" @@ -655,11 +649,11 @@ test = ["docutils", "pytest-cov", "pytest-pycodestyle", "pytest-runner"] [[package]] name = "platformdirs" -version = "2.4.0" +version = "2.4.1" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] @@ -679,7 +673,7 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "2.15.0" +version = "2.16.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -695,7 +689,7 @@ virtualenv = ">=20.0.8" [[package]] name = "psutil" -version = "5.8.0" +version = "5.9.0" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false @@ -714,11 +708,11 @@ python-versions = "*" [[package]] name = "py" -version = "1.10.0" +version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycares" @@ -744,7 +738,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pycparser" -version = "2.20" +version = "2.21" description = "C parser in Python" category = "main" optional = false @@ -774,11 +768,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.6" description = "Python parsing module" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyreadline3" @@ -827,11 +824,11 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale [[package]] name = "pytest-forked" -version = "1.3.0" +version = "1.4.0" description = "run tests in isolated forked subprocesses" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.dependencies] py = "*" @@ -902,7 +899,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "rapidfuzz" -version = "1.8.0" +version = "1.9.1" description = "rapid fuzzy string matching" category = "main" optional = false @@ -913,14 +910,17 @@ full = ["numpy"] [[package]] name = "redis" -version = "3.5.3" -description = "Python client for Redis key-value store" +version = "4.0.2" +description = "Python client for Redis database and key-value store" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" + +[package.dependencies] +deprecated = "*" [package.extras] -hiredis = ["hiredis (>=0.1.3)"] +hiredis = ["hiredis (>=1.0.0)"] [[package]] name = "regex" @@ -932,9 +932,9 @@ python-versions = "*" [[package]] name = "requests" -version = "2.26.0" +version = "2.27.1" description = "Python HTTP for Humans." -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" @@ -949,8 +949,20 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] +name = "requests-file" +version = "1.5.1" +description = "File transport adapter for Requests" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +requests = ">=1.0.0" +six = "*" + +[[package]] name = "sentry-sdk" -version = "1.4.3" +version = "1.5.1" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = false @@ -995,7 +1007,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "snowballstemmer" -version = "2.1.0" +version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." category = "dev" optional = false @@ -1011,7 +1023,7 @@ python-versions = "*" [[package]] name = "soupsieve" -version = "2.2.1" +version = "2.3.1" description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false @@ -1052,6 +1064,20 @@ docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", " test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] [[package]] +name = "tldextract" +version = "3.1.2" +description = "Accurately separate the TLD from the registered domain and subdomains of a URL, using the Public Suffix List. By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +filelock = ">=3.0.8" +idna = "*" +requests = ">=2.1.0" +requests-file = ">=1.4" + +[[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" @@ -1061,15 +1087,15 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "typing-extensions" -version = "3.10.0.2" -description = "Backported and Experimental Type Hints for Python 3.5+" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "urllib3" -version = "1.26.7" +version = "1.26.8" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1082,26 +1108,33 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.8.1" +version = "20.13.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -"backports.entry-points-selectable" = ">=1.0.4" distlib = ">=0.3.1,<1" -filelock = ">=3.0.0,<4" +filelock = ">=3.2,<4" platformdirs = ">=2,<3" six = ">=1.9.0,<2" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] +name = "wrapt" +version = "1.13.3" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] name = "yarl" -version = "1.7.0" +version = "1.7.2" description = "Yet another URL library" category = "main" optional = false @@ -1114,12 +1147,12 @@ multidict = ">=4.0" [metadata] lock-version = "1.1" python-versions = "3.9.*" -content-hash = "da321f13297501e62dd1eb362eccb586ea1a9c21ddb395e11a91b93a2f92e9d4" +content-hash = "0248fc7488c79af0cdb3a6db9528f4c3129db50b3a8d1dd3ba57dbc31b381c31" [metadata.files] aio-pika = [ - {file = "aio-pika-6.8.0.tar.gz", hash = "sha256:1d4305a5f78af3857310b4fe48348cdcf6c097e0e275ea88c2cd08570531a369"}, - {file = "aio_pika-6.8.0-py3-none-any.whl", hash = "sha256:e69afef8695f47c5d107bbdba21bdb845d5c249acb3be53ef5c2d497b02657c0"}, + {file = "aio-pika-6.8.1.tar.gz", hash = "sha256:c2b2b46949a34252ff0e64c3bc208eef1893e5791b51aeefabf1676788d56b66"}, + {file = "aio_pika-6.8.1-py3-none-any.whl", hash = "sha256:059ab8ecc03d73997f64ed28df7269105984232174d0e6406389c4e8ed30941c"}, ] aiodns = [ {file = "aiodns-2.0.0-py2.py3-none-any.whl", hash = "sha256:aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de"}, @@ -1189,17 +1222,14 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, -] -"backports.entry-points-selectable" = [ - {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"}, - {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] beautifulsoup4 = [ {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, ] +bot-core = [] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, @@ -1265,8 +1295,8 @@ chardet = [ {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, - {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, + {file = "charset-normalizer-2.0.10.tar.gz", hash = "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"}, + {file = "charset_normalizer-2.0.10-py3-none-any.whl", hash = "sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -1330,21 +1360,18 @@ coverage = [ {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] -coveralls = [ - {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"}, - {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"}, -] deepdiff = [ {file = "deepdiff-4.3.2-py3-none-any.whl", hash = "sha256:59fc1e3e7a28dd0147b0f2b00e3e27181f0f0ef4286b251d5f214a5bcd9a9bc4"}, {file = "deepdiff-4.3.2.tar.gz", hash = "sha256:91360be1d9d93b1d9c13ae9c5048fa83d9cff17a88eb30afaa0d7ff2d0fee17d"}, ] +deprecated = [ + {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, + {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, +] "discord.py" = [] distlib = [ - {file = "distlib-0.3.3-py2.py3-none-any.whl", hash = "sha256:c8b54e8454e5bf6237cc84c20e8264c3e991e824ef27e8f1e81049867d861e31"}, - {file = "distlib-0.3.3.zip", hash = "sha256:d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05"}, -] -docopt = [ - {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, + {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, + {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, ] emoji = [ {file = "emoji-0.6.0.tar.gz", hash = "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11"}, @@ -1354,16 +1381,16 @@ execnet = [ {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] fakeredis = [ - {file = "fakeredis-1.6.1-py3-none-any.whl", hash = "sha256:5eb1516f1fe1813e9da8f6c482178fc067af09f53de587ae03887ef5d9d13024"}, - {file = "fakeredis-1.6.1.tar.gz", hash = "sha256:0d06a9384fb79da9f2164ce96e34eb9d4e2ea46215070805ea6fd3c174590b47"}, + {file = "fakeredis-1.7.0-py3-none-any.whl", hash = "sha256:6f1e04f64557ad3b6835bdc6e5a8d022cbace4bdc24a47ad58f6a72e0fbff760"}, + {file = "fakeredis-1.7.0.tar.gz", hash = "sha256:c9bd12e430336cbd3e189fae0e91eb99997b93e76dbfdd6ed67fa352dc684c71"}, ] feedparser = [ {file = "feedparser-6.0.8-py3-none-any.whl", hash = "sha256:1b7f57841d9cf85074deb316ed2c795091a238adb79846bc46dccdaf80f9c59a"}, {file = "feedparser-6.0.8.tar.gz", hash = "sha256:5ce0410a05ab248c8c7cfca3a0ea2203968ee9ff4486067379af4827a59f9661"}, ] filelock = [ - {file = "filelock-3.3.1-py3-none-any.whl", hash = "sha256:2b5eb3589e7fdda14599e7eb1a50e09b4cc14f34ed98b8ba56d33bfaafcbef2f"}, - {file = "filelock-3.3.1.tar.gz", hash = "sha256:34a9f35f95c441e7b38209775d6e0337f9a3759f3565f6c5798f19618527c76f"}, + {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, + {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, ] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, @@ -1448,8 +1475,8 @@ humanfriendly = [ {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, ] identify = [ - {file = "identify-2.3.0-py2.py3-none-any.whl", hash = "sha256:d1e82c83d063571bb88087676f81261a4eae913c492dafde184067c584bc7c05"}, - {file = "identify-2.3.0.tar.gz", hash = "sha256:fd08c97f23ceee72784081f1ce5125c8f53a02d3f2716dde79a6ab8f1039fea5"}, + {file = "identify-2.4.2-py2.py3-none-any.whl", hash = "sha256:67c1e66225870dce721228176637a8ef965e8dd58450bcc7592249d0dfc4da6c"}, + {file = "identify-2.4.2.tar.gz", hash = "sha256:93e8ec965e888f2212aa5c24b2b662f4832c39acb1d7196a70ea45acb626a05e"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, @@ -1460,58 +1487,70 @@ iniconfig = [ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] isort = [ - {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"}, - {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"}, + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] lxml = [ - {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"}, - {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"}, - {file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"}, - {file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"}, - {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, - {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, - {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, - {file = "lxml-4.6.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:64812391546a18896adaa86c77c59a4998f33c24788cadc35789e55b727a37f4"}, - {file = "lxml-4.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c1a40c06fd5ba37ad39caa0b3144eb3772e813b5fb5b084198a985431c2f1e8d"}, - {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"}, - {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"}, - {file = "lxml-4.6.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354"}, - {file = "lxml-4.6.3-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16"}, - {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"}, - {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"}, - {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d916d31fd85b2f78c76400d625076d9124de3e4bda8b016d25a050cc7d603f24"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617"}, - {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"}, - {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"}, - {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:36108c73739985979bf302006527cf8a20515ce444ba916281d1c43938b8bb96"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92"}, - {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"}, - {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"}, - {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:e1cbd3f19a61e27e011e02f9600837b921ac661f0c40560eefb366e4e4fb275e"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:1b38116b6e628118dea5b2186ee6820ab138dbb1e24a13e478490c7db2f326ae"}, - {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"}, - {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"}, - {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:3082c518be8e97324390614dacd041bb1358c882d77108ca1957ba47738d9d59"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a"}, - {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"}, - {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"}, - {file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"}, + {file = "lxml-4.7.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f"}, + {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e"}, + {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17"}, + {file = "lxml-4.7.1-cp27-cp27m-win32.whl", hash = "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419"}, + {file = "lxml-4.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2"}, + {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5"}, + {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d"}, + {file = "lxml-4.7.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175"}, + {file = "lxml-4.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6"}, + {file = "lxml-4.7.1-cp310-cp310-win32.whl", hash = "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6"}, + {file = "lxml-4.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e"}, + {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675"}, + {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7"}, + {file = "lxml-4.7.1-cp35-cp35m-win32.whl", hash = "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5"}, + {file = "lxml-4.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03"}, + {file = "lxml-4.7.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6"}, + {file = "lxml-4.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d"}, + {file = "lxml-4.7.1-cp36-cp36m-win32.whl", hash = "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d"}, + {file = "lxml-4.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a"}, + {file = "lxml-4.7.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944"}, + {file = "lxml-4.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b"}, + {file = "lxml-4.7.1-cp37-cp37m-win32.whl", hash = "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4"}, + {file = "lxml-4.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1"}, + {file = "lxml-4.7.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e"}, + {file = "lxml-4.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9"}, + {file = "lxml-4.7.1-cp38-cp38-win32.whl", hash = "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd"}, + {file = "lxml-4.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d"}, + {file = "lxml-4.7.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851"}, + {file = "lxml-4.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4"}, + {file = "lxml-4.7.1-cp39-cp39-win32.whl", hash = "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0"}, + {file = "lxml-4.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60"}, + {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"}, ] markdownify = [ {file = "markdownify-0.6.1-py3-none-any.whl", hash = "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"}, @@ -1522,8 +1561,8 @@ mccabe = [ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] more-itertools = [ - {file = "more-itertools-8.10.0.tar.gz", hash = "sha256:1debcabeb1df793814859d64a81ad7cb10504c24349368ccf214c664c474f41f"}, - {file = "more_itertools-8.10.0-py3-none-any.whl", hash = "sha256:56ddac45541718ba332db05f464bebfb0768110111affd27f66e0051f276fa43"}, + {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"}, + {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"}, ] mslex = [ {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, @@ -1611,8 +1650,8 @@ ordered-set = [ {file = "ordered-set-4.0.2.tar.gz", hash = "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95"}, ] packaging = [ - {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, - {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pamqp = [ {file = "pamqp-2.3.0-py2.py3-none-any.whl", hash = "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02"}, @@ -1627,53 +1666,57 @@ pip-licenses = [ {file = "pip_licenses-3.5.3-py3-none-any.whl", hash = "sha256:59c148d6a03784bf945d232c0dc0e9de4272a3675acaa0361ad7712398ca86ba"}, ] platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, + {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, + {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pre-commit = [ - {file = "pre_commit-2.15.0-py2.py3-none-any.whl", hash = "sha256:a4ed01000afcb484d9eb8d504272e642c4c4099bbad3a6b27e519bd6a3e928a6"}, - {file = "pre_commit-2.15.0.tar.gz", hash = "sha256:3c25add78dbdfb6a28a651780d5c311ac40dd17f160eb3954a0c59da40a505a7"}, + {file = "pre_commit-2.16.0-py2.py3-none-any.whl", hash = "sha256:758d1dc9b62c2ed8881585c254976d66eae0889919ab9b859064fc2fe3c7743e"}, + {file = "pre_commit-2.16.0.tar.gz", hash = "sha256:fe9897cac830aa7164dbd02a4e7b90cae49630451ce88464bca73db486ba9f65"}, ] psutil = [ - {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"}, - {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"}, - {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"}, - {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"}, - {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"}, - {file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"}, - {file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"}, - {file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"}, - {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"}, - {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"}, - {file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"}, - {file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"}, - {file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"}, - {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"}, - {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"}, - {file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"}, - {file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"}, - {file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"}, - {file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"}, - {file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"}, - {file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"}, - {file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"}, - {file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"}, - {file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"}, - {file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"}, - {file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"}, - {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"}, - {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"}, + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, + {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, + {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, + {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, + {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, + {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, + {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, + {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, + {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, + {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, + {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, + {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, + {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, + {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, + {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, + {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, + {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, + {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, + {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, + {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, + {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, + {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, + {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, + {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, + {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, ] ptable = [ {file = "PTable-0.9.2.tar.gz", hash = "sha256:aa7fc151cb40f2dabcd2275ba6f7fd0ff8577a86be3365cd3fb297cbe09cc292"}, ] py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycares = [ {file = "pycares-4.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71b99b9e041ae3356b859822c511f286f84c8889ec9ed1fbf6ac30fb4da13e4c"}, @@ -1713,8 +1756,8 @@ pycodestyle = [ {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pycparser = [ - {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, - {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, @@ -1725,8 +1768,8 @@ pyflakes = [ {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, + {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, ] pyreadline3 = [ {file = "pyreadline3-3.3-py3-none-any.whl", hash = "sha256:0003fd0079d152ecbd8111202c5a7dfa6a5569ffd65b235e45f3c2ecbee337b4"}, @@ -1741,8 +1784,8 @@ pytest-cov = [ {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] pytest-forked = [ - {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, - {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, ] pytest-xdist = [ {file = "pytest-xdist-2.3.0.tar.gz", hash = "sha256:e8ecde2f85d88fbcadb7d28cb33da0fa29bca5cf7d5967fa89fc0e97e5299ea5"}, @@ -1792,68 +1835,62 @@ pyyaml = [ {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] rapidfuzz = [ - {file = "rapidfuzz-1.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:91f094562c683802e6c972bce27a692dad70d6cd1114e626b29d990c3704c653"}, - {file = "rapidfuzz-1.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4a20682121e245cf5ad2dbdd771360763ea11b77520632a1034c4bb9ad1e854c"}, - {file = "rapidfuzz-1.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8810e75d8f9c4453bbd6209c372bf97514359b0b5efff555caf85b15f8a9d862"}, - {file = "rapidfuzz-1.8.0-cp27-cp27m-win32.whl", hash = "sha256:00cf713d843735b5958d87294f08b05c653a593ced7c4120be34f5d26d7a320a"}, - {file = "rapidfuzz-1.8.0-cp27-cp27m-win_amd64.whl", hash = "sha256:2baca64e23a623e077f57e5470de21af2765af15aa1088676eb2d475e664eed0"}, - {file = "rapidfuzz-1.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:9bf7a6c61bacedd84023be356e057e1d209dd6997cfaa3c1cee77aa21d642f88"}, - {file = "rapidfuzz-1.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:61b6434e3341ca5158ecb371b1ceb4c1f6110563a72d28bdce4eb2a084493e47"}, - {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e425e690383f6cf308e8c2e8d630fa9596f67d233344efd8fae11e70a9f5635f"}, - {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93db5e693b76d616b09df27ca5c79e0dda169af7f1b8f5ab3262826d981e37e2"}, - {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a8c4f76ed1c8a65892d98dc2913027c9acdb219d18f3a441cfa427a32861af9"}, - {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71e217fd30901214cc96c0c15057278bafb7072aa9b2be4c97459c1fedf3e731"}, - {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d579dd447b8e851462e79054b68f94b66b09df8b3abb2aa5ca07fe00912ef5e8"}, - {file = "rapidfuzz-1.8.0-cp310-cp310-win32.whl", hash = "sha256:5808064555273496dcd594d659bd28ee8d399149dd31575321034424455dc955"}, - {file = "rapidfuzz-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:798fef1671ca66c78b47802228e9583f7ab32b99bdfe3984ebb1f96e93e38b5f"}, - {file = "rapidfuzz-1.8.0-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:c9e0ed210831f5c73533bf11099ea7897db491e76c3443bef281d9c1c67d7f3a"}, - {file = "rapidfuzz-1.8.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:c819bb19eb615a31ddc9cb8248a285bf04f58158b53ce096451178631f99b652"}, - {file = "rapidfuzz-1.8.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:942ee45564f28ef70320d1229f02dc998bd93e3519c1f3a80f33ce144b51039c"}, - {file = "rapidfuzz-1.8.0-cp35-cp35m-win32.whl", hash = "sha256:7e6ae2e5a3bc9acc51e118f25d32b8efcd431c5d8deb408336dd2ed0f21d087c"}, - {file = "rapidfuzz-1.8.0-cp35-cp35m-win_amd64.whl", hash = "sha256:98901fba67c89ad2506f3946642cf6eb8f489592fb7eb307ebdf8bdb0c4e97f9"}, - {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e1686f406a0c77ef323cdb7369b7cf9e68f2abfcb83ff5f1e0a5b21f5a534"}, - {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da0c5fe5fdbbd74206c1778af6b8c5ff8dfbe2dd04ae12bbe96642b358acefce"}, - {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:535253bc9224215131ae450aad6c9f7ef1b24f15c685045eab2b52511268bd06"}, - {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acdad83f07d886705fce164b0d1f4e3b56788a205602ed3a7fc8b10ceaf05fbf"}, - {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35097f649831f8375d6c65a237deccac3aceb573aa7fae1e5d3fa942e89de1c8"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6f4db142e5b4b44314166a90e11603220db659bd2f9c23dd5db402c13eac8eb7"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19a3f55f27411d68360540484874beda0b428b062596d5f0f141663ef0738bfd"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22b4c1a7f6fe29bd8dae49f7d5ab085dc42c3964f1a78b6dca22fdf83b5c9bfa"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8bfb2fbc147904b78d5c510ee75dc8704b606e956df23f33a9e89abc03f45c3"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6dc5111ebfed2c4f2e4d120a9b280ea13ea4fbb60b6915dd239817b4fc092ed"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db5ee2457d97cb967ffe08446a8c595c03fe747fdc2e145266713f9c516d1c4a"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:12c1b78cc15fc26f555a4bf66088d5afb6354b5a5aa149a123f01a15af6c411b"}, - {file = "rapidfuzz-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:693e9579048d8db4ff020715dd6f25aa315fd6445bc94e7400d7a94a227dad27"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b4fe19df3edcf7de359448b872aec08e6592b4ca2d3df4d8ee57b5812d68bebf"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3670b9df0e1f479637cad1577afca7766a02775dc08c14837cf495c82861d7c"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61d118f36eb942649b0db344f7b7a19ad7e9b5749d831788187eb03b57ce1bfa"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fce3a2c8a1d10da12aff4a0d367624e8ae9e15c1b84a5144843681d39be0c355"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1577ef26e3647ccc4cc9754c34ffaa731639779f4d7779e91a761c72adac093e"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fec9b7e60fde51990c3b48fc1aa9dba9ac3acaf78f623dbb645a6fe21a9654e"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b954469d93858bc8b48129bc63fd644382a4df5f3fb1b4b290f48eac1d00a2da"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:190ba709069a7e5a6b39b7c8bc413a08cfa7f1f4defec5d974c4128b510e0234"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-win32.whl", hash = "sha256:97b2d13d6323649b43d1b113681e4013ba230bd6e9827cc832dcebee447d7250"}, - {file = "rapidfuzz-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:81c3091209b75f6611efe2af18834180946d4ce28f41ca8d44fce816187840d2"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d610afa33e92aa0481a514ffda3ec51ca5df3c684c1c1c795307589c62025931"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d976f33ca6b5fabbb095c0a662f5b86baf706184fc24c7f125d4ddb54b8bf036"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f5ca7bca2af598d4ddcf5b93b64b50654a9ff684e6f18d865f6e13fee442b3e"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2aac5ea6b0306dcd28a6d1a89d35ed2c6ac426f2673ee1b92cf3f1d0fd5cd"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f145c9831c0454a696a3136a6380ea4e01434e9cc2f2bc10d032864c16d1d0e5"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ce53291575b56c9d45add73ea013f43bafcea55eee9d5139aa759918d7685f"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de5773a39c00a0f23cfc5da9e0e5fd0fb512b0ebe23dc7289a38e1f9a4b5cefc"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87a802e55792bfbe192e2d557f38867dbe3671b49b3d5ecd873859c7460746ba"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-win32.whl", hash = "sha256:9391abf1121df831316222f28cea37397a0f72bd7978f3be6e7da29a7821e4e5"}, - {file = "rapidfuzz-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:9eeca1b436042b5523dcf314f5822b1131597898c1d967f140d1917541a8a3d1"}, - {file = "rapidfuzz-1.8.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:a01f2495aca479b49d3b3a8863d6ba9bea2043447a1ced74ae5ec5270059cbc1"}, - {file = "rapidfuzz-1.8.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:b7d4b1a5d16817f8cdb34365c7b58ae22d5cf1b3207720bb2fa0b55968bdb034"}, - {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c738d0d7f1744646d48d19b4c775926082bcefebd2460f45ca383a0e882f5672"}, - {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0fb9c6078c17c12b52e66b7d0a2a1674f6bbbdc6a76e454c8479b95147018123"}, - {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1482b385d83670eb069577c9667f72b41eec4f005aee32f1a4ff4e71e88afde2"}, - {file = "rapidfuzz-1.8.0.tar.gz", hash = "sha256:83fff37acf0367314879231264169dcbc5e7de969a94f4b82055d06a7fddab9a"}, + {file = "rapidfuzz-1.9.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:68227a8b25291d6a2140aef049271ea30a77be5ef672a58e582a55a5cc1fce93"}, + {file = "rapidfuzz-1.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c33541995b96ff40025c1456b8c74b7dd2ab9cbf91943fc35a7bb621f48940e2"}, + {file = "rapidfuzz-1.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:c2fafbbf97a4632822248f4201601b691e2eac5fdb30e5d7a96d07a6d058a7d4"}, + {file = "rapidfuzz-1.9.1-cp27-cp27m-win32.whl", hash = "sha256:364795f617a99e1dbb55ac3947ab8366588b72531cb2d6152666287d20610706"}, + {file = "rapidfuzz-1.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:f171d9e66144b0647f9b998ef10bdd919a640e4b1357250c8ef6259deb5ffe0d"}, + {file = "rapidfuzz-1.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:c83801a7c5209663aa120b815a4f2c39e95fe8e0b774ec58a1e0affd6a2fcfc6"}, + {file = "rapidfuzz-1.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:67e61c2baa6bb1848c4a33752f1781124dcc90bf3f31b18b44db1ae4e4e26634"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8ab7eb003a18991347174910f11d38ff40399081185d9e3199ec277535f7828b"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5ad450badf06ddf98a246140b5059ba895ee8445e8102a5a289908327f551f81"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:402b2174bded62a793c5f7d9aec16bc32c661402360a934819ae72b54cfbce1e"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92066ccb054efc2e17afb4049c98b550969653cd58f71dd756cfcc8e6864630a"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8dc0bf1814accee08a9c9bace6672ef06eae6b0446fce88e3e97e23dfaf3ea10"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbd387efb8478605951344f327dd03bf053c138d757369a43404305b99e55db"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-win32.whl", hash = "sha256:b1c54807e556dbcc6caf4ce0f24446c01b195f3cc46e2a6e74b82d3a21eaa45d"}, + {file = "rapidfuzz-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:ac3273364cd1619cab3bf0ba731efea5405833f9eba362da7dcd70bd42073d8e"}, + {file = "rapidfuzz-1.9.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:d9faf62606c08a0a6992dd480c72b6a068733ae02688dc35f2e36ba0d44673f4"}, + {file = "rapidfuzz-1.9.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6a56a48be047637b1b0b2459a11cf7cd5aa7bbe16a439bd4f73b4af39e620e4"}, + {file = "rapidfuzz-1.9.1-cp35-cp35m-win32.whl", hash = "sha256:aa91609979e9d2700f0ff100df99b36e7d700b70169ee385d43d5de9e471ae97"}, + {file = "rapidfuzz-1.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b4cfdd0915ab4cec86c2ff6bab9f01b03454f3de0963c37f9f219df2ddf42b95"}, + {file = "rapidfuzz-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6bfa4ad0158a093cd304f795ceefdc3861ae6942a61432b2a50858be6de88ca"}, + {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:eb0ea02295d9278bd2dcd2df4760b0f2887b6c3f2f374005ec5af320d8d3a37e"}, + {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d5187cd5cd6273e9fee07de493a42a2153134a4914df74cb1abb0744551c548a"}, + {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6e5b8af63f9c05b64454460759ed84a715d581d598ec4484f4ec512f398e8b1"}, + {file = "rapidfuzz-1.9.1-cp36-cp36m-win32.whl", hash = "sha256:36137f88f2b28115af506118e64e11c816611eab2434293af7fdacd1290ffb9d"}, + {file = "rapidfuzz-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:fcc420cad46be7c9887110edf04cdee545f26dbf22650a443d89790fc35f7b88"}, + {file = "rapidfuzz-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b06de314f426aebff8a44319016bbe2b22f7848c84e44224f80b0690b7b08b18"}, + {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e5de44e719faea79e45322b037f0d4a141d750b80d2204fa68f43a42a24f0fbc"}, + {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f9439df09a782afd01b67005a3b110c70bbf9e1cf06d2ac9b293ce2d02d3c549"}, + {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e903d4702647465721e2d0431c95f04fd56a06577f06f41e2960c83fd63c1bad"}, + {file = "rapidfuzz-1.9.1-cp37-cp37m-win32.whl", hash = "sha256:a5298f4ac1975edcbb15583eab659a44b33aebaf3bccf172e185cfea68771c08"}, + {file = "rapidfuzz-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:103193a01921b54fcdad6b01cfda3a68e00aeafca236b7ecd5b1b2c2e7e96337"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1d98a3187040dca855e02179a35c137f72ef83ce243783d44ea59efa86b94b3a"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb92bf7fc911b787055a88d9295ca3b4fe8576e3b59271f070f1b1b181eb087d"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3f014a0f5f8159a94c6ee884fedd1c30e07fb866a5d76ff2c18091bc6363b76f"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:31474074a99f72289ac325fbd77983e7d355d48860bfe7a4f6f6396fdb24410a"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec67d79af5a2d7b0cf67b570a5579710e461cadda4120478e813b63491f394dd"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebc0d3d15ed32f98f0052cf6e3e9c9b8010fb93c04fb74d2022e3c51ec540e2"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-win32.whl", hash = "sha256:477ab1a3044bab89db45caabc562b158f68765ecaa638b73ba17e92f09dfa5ff"}, + {file = "rapidfuzz-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:8e872763dc0367d7544aa585d2e8b27af233323b8a7cd2f9b78cafa05bae5018"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8401c41e219ae36ca7a88762776a6270511650d4cc70d024ae61561e96d67e47"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea10bd8e0436801c3264f7084a5ea194f12ba9fe1ba898aa4a2107d276501292"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:433737914b46c1ffa0c678eceae1c260dc6b7fb5b6cad4c725d3e3607c764b32"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c3b08e90e45acbc469d1f456681643256e952bf84ec7714f58979baba0c8a1c"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bbcd265b3c86176e5db4cbba7b4364d7333c214ee80e2d259c7085929934ca9d"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d69fabcd635783cd842e7d5ee4b77164314c5124b82df5a0c436ab3d698f8a9"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-win32.whl", hash = "sha256:01f16b6f3fa5d1a26c12f5da5de0032f1e12c919d876005b57492a8ec9a5c043"}, + {file = "rapidfuzz-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:0bcc5bbfdbe6068cc2cf0029ab6cde08dceac498d232fa3a61dd34fbfa0b3f36"}, + {file = "rapidfuzz-1.9.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:de869c8f4e8edb9b2f7b8232a04896645501defcbd9d85bc0202ff3ec6285f6b"}, + {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:db5978e970fb0955974d51021da4b929e2e4890fef17792989ee32658e2b159c"}, + {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:33479f75f36ac3a1d8421365d4fa906e013490790730a89caba31d06e6f71738"}, + {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:af991cb333ec526d894923163050931b3a870b7694bf7687aaa6154d341a98f5"}, + {file = "rapidfuzz-1.9.1.tar.gz", hash = "sha256:bd7a4fe33ba49db3417f0f57a8af02462554f1296dedcf35b026cd3525efef74"}, ] redis = [ - {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, - {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, + {file = "redis-4.0.2-py3-none-any.whl", hash = "sha256:c8481cf414474e3497ec7971a1ba9b998c8efad0f0d289a009a5bbef040894f9"}, + {file = "redis-4.0.2.tar.gz", hash = "sha256:ccf692811f2c1fc7a92b466aa2599e4a6d2d73d5f736a2c70be600657c0da34a"}, ] regex = [ {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, @@ -1899,12 +1936,16 @@ regex = [ {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, ] requests = [ - {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, - {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +requests-file = [ + {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, + {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, ] sentry-sdk = [ - {file = "sentry-sdk-1.4.3.tar.gz", hash = "sha256:b9844751e40710e84a457c5bc29b21c383ccb2b63d76eeaad72f7f1c808c8828"}, - {file = "sentry_sdk-1.4.3-py2.py3-none-any.whl", hash = "sha256:c091cc7115ff25fe3a0e410dbecd7a996f81a3f6137d2272daef32d6c3cfa6dc"}, + {file = "sentry-sdk-1.5.1.tar.gz", hash = "sha256:2a1757d6611e4bec7d672c2b7ef45afef79fed201d064f53994753303944f5a8"}, + {file = "sentry_sdk-1.5.1-py2.py3-none-any.whl", hash = "sha256:e4cb107e305b2c1b919414775fa73a9997f996447417d22b98e7610ded1e9eb5"}, ] sgmllib3k = [ {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, @@ -1914,16 +1955,16 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] snowballstemmer = [ - {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, - {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] sortedcontainers = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] soupsieve = [ - {file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"}, - {file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"}, + {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, + {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, ] statsd = [ {file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"}, @@ -1937,94 +1978,150 @@ testfixtures = [ {file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"}, {file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"}, ] +tldextract = [ + {file = "tldextract-3.1.2-py2.py3-none-any.whl", hash = "sha256:f55e05f6bf4cc952a87d13594386d32ad2dd265630a8bdfc3df03bd60425c6b0"}, + {file = "tldextract-3.1.2.tar.gz", hash = "sha256:d2034c3558651f7d8fdadea83fb681050b2d662dc67a00d950326dc902029444"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, - {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, - {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] urllib3 = [ - {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, - {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, + {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, + {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, ] virtualenv = [ - {file = "virtualenv-20.8.1-py2.py3-none-any.whl", hash = "sha256:10062e34c204b5e4ec5f62e6ef2473f8ba76513a9a617e873f1f8fb4a519d300"}, - {file = "virtualenv-20.8.1.tar.gz", hash = "sha256:bcc17f0b3a29670dd777d6f0755a4c04f28815395bca279cdcb213b97199a6b8"}, + {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"}, + {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"}, +] +wrapt = [ + {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, + {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, + {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, + {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, + {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, + {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, + {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, + {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, + {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, + {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, + {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, + {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, + {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, + {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, + {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, + {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, + {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, + {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, + {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, ] yarl = [ - {file = "yarl-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e35d8230e4b08d86ea65c32450533b906a8267a87b873f2954adeaecede85169"}, - {file = "yarl-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb4b3f277880c314e47720b4b6bb2c85114ab3c04c5442c9bc7006b3787904d8"}, - {file = "yarl-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7015dcedb91d90a138eebdc7e432aec8966e0147ab2a55f2df27b1904fa7291"}, - {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb3e478175e15e00d659fb0354a6a8db71a7811a2a5052aed98048bc972e5d2b"}, - {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8c409aa3a7966647e7c1c524846b362a6bcbbe120bf8a176431f940d2b9a2e"}, - {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b22ea41c7e98170474a01e3eded1377d46b2dfaef45888a0005c683eaaa49285"}, - {file = "yarl-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a7dfc46add4cfe5578013dbc4127893edc69fe19132d2836ff2f6e49edc5ecd6"}, - {file = "yarl-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:82ff6f85f67500a4f74885d81659cd270eb24dfe692fe44e622b8a2fd57e7279"}, - {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f3cd2158b2ed0fb25c6811adfdcc47224efe075f2d68a750071dacc03a7a66e4"}, - {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:59c0f13f9592820c51280d1cf811294d753e4a18baf90f0139d1dc93d4b6fc5f"}, - {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7f7655ad83d1a8afa48435a449bf2f3009293da1604f5dd95b5ddcf5f673bd69"}, - {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aa9f0d9b62d15182341b3e9816582f46182cab91c1a57b2d308b9a3c4e2c4f78"}, - {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fdd1b90c225a653b1bd1c0cae8edf1957892b9a09c8bf7ee6321eeb8208eac0f"}, - {file = "yarl-1.7.0-cp310-cp310-win32.whl", hash = "sha256:7c8d0bb76eabc5299db203e952ec55f8f4c53f08e0df4285aac8c92bd9e12675"}, - {file = "yarl-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:622a36fa779efb4ff9eff5fe52730ff17521431379851a31e040958fc251670c"}, - {file = "yarl-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d461b7a8e139b9e4b41f62eb417ffa0b98d1c46d4caf14c845e6a3b349c0bb1"}, - {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81cfacdd1e40bc931b5519499342efa388d24d262c30a3d31187bfa04f4a7001"}, - {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:821b978f2152be7695d4331ef0621d207aedf9bbd591ba23a63412a3efc29a01"}, - {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b64bd24c8c9a487f4a12260dc26732bf41028816dbf0c458f17864fbebdb3131"}, - {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:98c9ddb92b60a83c21be42c776d3d9d5ec632a762a094c41bda37b7dfbd2cd83"}, - {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a532d75ca74431c053a88a802e161fb3d651b8bf5821a3440bc3616e38754583"}, - {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:053e09817eafb892e94e172d05406c1b3a22a93bc68f6eff5198363a3d764459"}, - {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:98c51f02d542945d306c8e934aa2c1e66ba5e9c1c86b5bf37f3a51c8a747067e"}, - {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:15ec41a5a5fdb7bace6d7b16701f9440007a82734f69127c0fbf6d87e10f4a1e"}, - {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a7f08819dba1e1255d6991ed37448a1bf4b1352c004bcd899b9da0c47958513d"}, - {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8e3ffab21db0542ffd1887f3b9575ddd58961f2cf61429cb6458afc00c4581e0"}, - {file = "yarl-1.7.0-cp36-cp36m-win32.whl", hash = "sha256:50127634f519b2956005891507e3aa4ac345f66a7ea7bbc2d7dcba7401f41898"}, - {file = "yarl-1.7.0-cp36-cp36m-win_amd64.whl", hash = "sha256:36ec44f15193f6d5288d42ebb8e751b967ebdfb72d6830983838d45ab18edb4f"}, - {file = "yarl-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ec1b5a25a25c880c976d0bb3d107def085bb08dbb3db7f4442e0a2b980359d24"}, - {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b36f5a63c891f813c6f04ef19675b382efc190fd5ce7e10ab19386d2548bca06"}, - {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38173b8c3a29945e7ecade9a3f6ff39581eee8201338ee6a2c8882db5df3e806"}, - {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba402f32184f0b405fb281b93bd0d8ab7e3257735b57b62a6ed2e94cdf4fe50"}, - {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:be52bc5208d767cdd8308a9e93059b3b36d1e048fecbea0e0346d0d24a76adc0"}, - {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:08c2044a956f4ef30405f2f433ce77f1f57c2c773bf81ae43201917831044d5a"}, - {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:484d61c047c45670ef5967653a1d0783e232c54bf9dd786a7737036828fa8d54"}, - {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b7de92a4af85cfcaf4081f8aa6165b1d63ee5de150af3ee85f954145f93105a7"}, - {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:376e41775aab79c5575534924a386c8e0f1a5d91db69fc6133fd27a489bcaf10"}, - {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:8a8b10d0e7bac154f959b709fcea593cda527b234119311eb950096653816a86"}, - {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f46cd4c43e6175030e2a56def8f1d83b64e6706eeb2bb9ab0ef4756f65eab23f"}, - {file = "yarl-1.7.0-cp37-cp37m-win32.whl", hash = "sha256:b28cfb46140efe1a6092b8c5c4994a1fe70dc83c38fbcea4992401e0c6fb9cce"}, - {file = "yarl-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9624154ec9c02a776802da1086eed7f5034bd1971977f5146233869c2ac80297"}, - {file = "yarl-1.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:69945d13e1bbf81784a9bc48824feb9cd66491e6a503d4e83f6cd7c7cc861361"}, - {file = "yarl-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:46a742ed9e363bd01be64160ce7520e92e11989bd4cb224403cfd31c101cc83d"}, - {file = "yarl-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb4ff1ac7cb4500f43581b3f4cbd627d702143aa6be1fdc1fa3ebffaf4dc1be5"}, - {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ad51e17cd65ea3debb0e10f0120cf8dd987c741fe423ed2285087368090b33d"}, - {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e37786ea89a5d3ffbbf318ea9790926f8dfda83858544f128553c347ad143c6"}, - {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c63c1e208f800daad71715786bfeb1cecdc595d87e2e9b1cd234fd6e597fd71d"}, - {file = "yarl-1.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91cbe24300c11835ef186436363352b3257db7af165e0a767f4f17aa25761388"}, - {file = "yarl-1.7.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e510dbec7c59d32eaa61ffa48173d5e3d7170a67f4a03e8f5e2e9e3971aca622"}, - {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3def6e681cc02397e5d8141ee97b41d02932b2bcf0fb34532ad62855eab7c60e"}, - {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:263c81b94e6431942b27f6f671fa62f430a0a5c14bb255f2ab69eeb9b2b66ff7"}, - {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e78c91faefe88d601ddd16e3882918dbde20577a2438e2320f8239c8b7507b8f"}, - {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:22b2430c49713bfb2f0a0dd4a8d7aab218b28476ba86fd1c78ad8899462cbcf2"}, - {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e7ad9db939082f5d0b9269cfd92c025cb8f2fbbb1f1b9dc5a393c639db5bd92"}, - {file = "yarl-1.7.0-cp38-cp38-win32.whl", hash = "sha256:3a31e4a8dcb1beaf167b7e7af61b88cb961b220db8d3ba1c839723630e57eef7"}, - {file = "yarl-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:d579957439933d752358c6a300c93110f84aae67b63dd0c19dde6ecbf4056f6b"}, - {file = "yarl-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:87721b549505a546eb003252185103b5ec8147de6d3ad3714d148a5a67b6fe53"}, - {file = "yarl-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1fa866fa24d9f4108f9e58ea8a2135655419885cdb443e36b39a346e1181532"}, - {file = "yarl-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d3b8449dfedfe94eaff2b77954258b09b24949f6818dfa444b05dbb05ae1b7e"}, - {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db2372e350794ce8b9f810feb094c606b7e0e4aa6807141ac4fadfe5ddd75bb0"}, - {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a06d9d0b9a97fa99b84fee71d9dd11e69e21ac8a27229089f07b5e5e50e8d63c"}, - {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3455c2456d6307bcfa80bc1157b8603f7d93573291f5bdc7144489ca0df4628"}, - {file = "yarl-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d30d67e3486aea61bb2cbf7cf81385364c2e4f7ce7469a76ed72af76a5cdfe6b"}, - {file = "yarl-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c18a4b286e8d780c3a40c31d7b79836aa93b720f71d5743f20c08b7e049ca073"}, - {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d54c925396e7891666cabc0199366ca55b27d003393465acef63fd29b8b7aa92"}, - {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:64773840952de17851a1c7346ad7f71688c77e74248d1f0bc230e96680f84028"}, - {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:acbf1756d9dc7cd0ae943d883be72e84e04396f6c2ff93a6ddeca929d562039f"}, - {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2e48f27936aa838939c798f466c851ba4ae79e347e8dfce43b009c64b930df12"}, - {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1beef4734ca1ad40a9d8c6b20a76ab46e3a2ed09f38561f01e4aa2ea82cafcef"}, - {file = "yarl-1.7.0-cp39-cp39-win32.whl", hash = "sha256:8ee78c9a5f3c642219d4607680a4693b59239c27a3aa608b64ef79ddc9698039"}, - {file = "yarl-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:d750503682605088a14d29a4701548c15c510da4f13c8b17409c4097d5b04c52"}, - {file = "yarl-1.7.0.tar.gz", hash = "sha256:8e7ebaf62e19c2feb097ffb7c94deb0f0c9fab52590784c8cd679d30ab009162"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, + {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, + {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, + {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, + {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, + {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, + {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, + {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, + {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, + {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, + {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, + {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, + {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, + {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, ] diff --git a/pyproject.toml b/pyproject.toml index 563bf4a27..c764910c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,8 @@ license = "MIT" [tool.poetry.dependencies] python = "3.9.*" "discord.py" = {url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip"} +# See https://bot-core.pythondiscord.com/ for docs. +bot-core = {url = "https://github.com/python-discord/bot-core/archive/511bcba1b0196cd498c707a525ea56921bd971db.zip"} aio-pika = "~=6.1" aiodns = "~=2.0" aiohttp = "~=3.7" @@ -21,7 +23,7 @@ deepdiff = "~=4.0" emoji = "~=0.6" feedparser = "~=6.0.2" rapidfuzz = "~=1.4" -lxml = "~=4.4" +lxml = "~=4.6" markdownify = "==0.6.1" more_itertools = "~=8.2" python-dateutil = "~=2.8" @@ -30,10 +32,10 @@ pyyaml = "~=5.1" regex = "==2021.4.4" sentry-sdk = "~=1.3" statsd = "~=3.3" +tldextract = "^3.1.2" [tool.poetry.dev-dependencies] coverage = "~=5.0" -coveralls = "~=2.1" flake8 = "~=3.8" flake8-annotations = "~=2.0" flake8-bugbear = "~=20.1" diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index 632287322..d896b7652 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -1,6 +1,7 @@ import textwrap import unittest import unittest.mock +from datetime import datetime import discord @@ -276,6 +277,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) ) + @unittest.mock.patch( + f"{COG_PATH}.user_messages", + new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count")) + ) async def test_create_user_embed_uses_string_representation_of_user_in_title_if_nick_is_not_available(self): """The embed should use the string representation of the user if they don't have a nick.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) @@ -284,8 +289,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): user.nick = None user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") user.colour = 0 + user.created_at = user.joined_at = datetime.utcnow() - embed = await self.cog.create_user_embed(ctx, user) + embed = await self.cog.create_user_embed(ctx, user, False) self.assertEqual(embed.title, "Mr. Hemlock") @@ -293,6 +299,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) ) + @unittest.mock.patch( + f"{COG_PATH}.user_messages", + new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count")) + ) async def test_create_user_embed_uses_nick_in_title_if_available(self): """The embed should use the nick if it's available.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) @@ -301,8 +311,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): user.nick = "Cat lover" user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") user.colour = 0 + user.created_at = user.joined_at = datetime.utcnow() - embed = await self.cog.create_user_embed(ctx, user) + embed = await self.cog.create_user_embed(ctx, user, False) self.assertEqual(embed.title, "Cat lover (Mr. Hemlock)") @@ -310,6 +321,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) ) + @unittest.mock.patch( + f"{COG_PATH}.user_messages", + new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count")) + ) async def test_create_user_embed_ignores_everyone_role(self): """Created `!user` embeds should not contain mention of the @everyone-role.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) @@ -317,14 +332,19 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): # A `MockMember` has the @Everyone role by default; we add the Admins to that. user = helpers.MockMember(roles=[admins_role], colour=100) + user.created_at = user.joined_at = datetime.utcnow() - embed = await self.cog.create_user_embed(ctx, user) + embed = await self.cog.create_user_embed(ctx, user, False) self.assertIn("&Admins", embed.fields[1].value) self.assertNotIn("&Everyone", embed.fields[1].value) @unittest.mock.patch(f"{COG_PATH}.expanded_user_infraction_counts", new_callable=unittest.mock.AsyncMock) @unittest.mock.patch(f"{COG_PATH}.user_nomination_counts", new_callable=unittest.mock.AsyncMock) + @unittest.mock.patch( + f"{COG_PATH}.user_messages", + new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count")) + ) async def test_create_user_embed_expanded_information_in_moderation_channels( self, nomination_counts, @@ -339,7 +359,8 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): nomination_counts.return_value = ("Nominations", "nomination info") user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) - embed = await self.cog.create_user_embed(ctx, user) + user.created_at = user.joined_at = datetime.utcfromtimestamp(1) + embed = await self.cog.create_user_embed(ctx, user, False) infraction_counts.assert_called_once_with(user) nomination_counts.assert_called_once_with(user) @@ -363,16 +384,23 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ) @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new_callable=unittest.mock.AsyncMock) - async def test_create_user_embed_basic_information_outside_of_moderation_channels(self, infraction_counts): + @unittest.mock.patch(f"{COG_PATH}.user_messages", new_callable=unittest.mock.AsyncMock) + async def test_create_user_embed_basic_information_outside_of_moderation_channels( + self, + user_messages, + infraction_counts, + ): """The embed should contain only basic infraction data outside of mod channels.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=100)) moderators_role = helpers.MockRole(name='Moderators') infraction_counts.return_value = ("Infractions", "basic infractions info") + user_messages.return_value = ("Messages", "user message counts") user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) - embed = await self.cog.create_user_embed(ctx, user) + user.created_at = user.joined_at = datetime.utcfromtimestamp(1) + embed = await self.cog.create_user_embed(ctx, user, False) infraction_counts.assert_called_once_with(user) @@ -394,14 +422,23 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ) self.assertEqual( - "basic infractions info", + "user message counts", embed.fields[2].value ) + self.assertEqual( + "basic infractions info", + embed.fields[3].value + ) + @unittest.mock.patch( f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) ) + @unittest.mock.patch( + f"{COG_PATH}.user_messages", + new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count")) + ) async def test_create_user_embed_uses_top_role_colour_when_user_has_roles(self): """The embed should be created with the colour of the top role, if a top role is available.""" ctx = helpers.MockContext() @@ -409,7 +446,8 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): moderators_role = helpers.MockRole(name='Moderators') user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) - embed = await self.cog.create_user_embed(ctx, user) + user.created_at = user.joined_at = datetime.utcnow() + embed = await self.cog.create_user_embed(ctx, user, False) self.assertEqual(embed.colour, discord.Colour(100)) @@ -417,12 +455,17 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) ) + @unittest.mock.patch( + f"{COG_PATH}.user_messages", + new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count")) + ) async def test_create_user_embed_uses_og_blurple_colour_when_user_has_no_roles(self): """The embed should be created with the og blurple colour if the user has no assigned roles.""" ctx = helpers.MockContext() user = helpers.MockMember(id=217, colour=discord.Colour.default()) - embed = await self.cog.create_user_embed(ctx, user) + user.created_at = user.joined_at = datetime.utcnow() + embed = await self.cog.create_user_embed(ctx, user, False) self.assertEqual(embed.colour, discord.Colour.og_blurple()) @@ -430,13 +473,18 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) ) + @unittest.mock.patch( + f"{COG_PATH}.user_messages", + new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count")) + ) async def test_create_user_embed_uses_png_format_of_user_avatar_as_thumbnail(self): """The embed thumbnail should be set to the user's avatar in `png` format.""" ctx = helpers.MockContext() user = helpers.MockMember(id=217, colour=0) + user.created_at = user.joined_at = datetime.utcnow() user.display_avatar.url = "avatar url" - embed = await self.cog.create_user_embed(ctx, user) + embed = await self.cog.create_user_embed(ctx, user, False) self.assertEqual(embed.thumbnail.url, "avatar url") @@ -489,7 +537,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase): await self.cog.user_info(self.cog, ctx) - create_embed.assert_called_once_with(ctx, self.author) + create_embed.assert_called_once_with(ctx, self.author, False) ctx.send.assert_called_once() @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") @@ -500,7 +548,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase): await self.cog.user_info(self.cog, ctx, self.author) - create_embed.assert_called_once_with(ctx, self.author) + create_embed.assert_called_once_with(ctx, self.author, False) ctx.send.assert_called_once() @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") @@ -511,7 +559,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase): await self.cog.user_info(self.cog, ctx) - create_embed.assert_called_once_with(ctx, self.moderator) + create_embed.assert_called_once_with(ctx, self.moderator, False) ctx.send.assert_called_once() @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") @@ -523,5 +571,5 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase): await self.cog.user_info(self.cog, ctx, self.target) - create_embed.assert_called_once_with(ctx, self.target) + create_embed.assert_called_once_with(ctx, self.target, False) ctx.send.assert_called_once() diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py index 4d01e18a5..052048053 100644 --- a/tests/bot/exts/moderation/infraction/test_infractions.py +++ b/tests/bot/exts/moderation/infraction/test_infractions.py @@ -1,13 +1,15 @@ import inspect import textwrap import unittest -from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch +from unittest.mock import ANY, AsyncMock, DEFAULT, MagicMock, Mock, patch from discord.errors import NotFound from bot.constants import Event +from bot.exts.moderation.clean import Clean from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction.infractions import Infractions +from bot.exts.moderation.infraction.management import ModManagement from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockRole, MockUser, autospec @@ -62,8 +64,8 @@ class TruncationTests(unittest.IsolatedAsyncioTestCase): @patch("bot.exts.moderation.infraction.infractions.constants.Roles.voice_verified", new=123456) -class VoiceBanTests(unittest.IsolatedAsyncioTestCase): - """Tests for voice ban related functions and commands.""" +class VoiceMuteTests(unittest.IsolatedAsyncioTestCase): + """Tests for voice mute related functions and commands.""" def setUp(self): self.bot = MockBot() @@ -73,59 +75,59 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): self.ctx = MockContext(bot=self.bot, author=self.mod) self.cog = Infractions(self.bot) - async def test_permanent_voice_ban(self): - """Should call voice ban applying function without expiry.""" - self.cog.apply_voice_ban = AsyncMock() - self.assertIsNone(await self.cog.voiceban(self.cog, self.ctx, self.user, reason="foobar")) - self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at=None) + async def test_permanent_voice_mute(self): + """Should call voice mute applying function without expiry.""" + self.cog.apply_voice_mute = AsyncMock() + self.assertIsNone(await self.cog.voicemute(self.cog, self.ctx, self.user, reason="foobar")) + self.cog.apply_voice_mute.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at=None) - async def test_temporary_voice_ban(self): - """Should call voice ban applying function with expiry.""" - self.cog.apply_voice_ban = AsyncMock() - self.assertIsNone(await self.cog.tempvoiceban(self.cog, self.ctx, self.user, "baz", reason="foobar")) - self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at="baz") + async def test_temporary_voice_mute(self): + """Should call voice mute applying function with expiry.""" + self.cog.apply_voice_mute = AsyncMock() + self.assertIsNone(await self.cog.tempvoicemute(self.cog, self.ctx, self.user, "baz", reason="foobar")) + self.cog.apply_voice_mute.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at="baz") - async def test_voice_unban(self): + async def test_voice_unmute(self): """Should call infraction pardoning function.""" self.cog.pardon_infraction = AsyncMock() - self.assertIsNone(await self.cog.unvoiceban(self.cog, self.ctx, self.user)) - self.cog.pardon_infraction.assert_awaited_once_with(self.ctx, "voice_ban", self.user) + self.assertIsNone(await self.cog.unvoicemute(self.cog, self.ctx, self.user)) + self.cog.pardon_infraction.assert_awaited_once_with(self.ctx, "voice_mute", self.user) @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") - async def test_voice_ban_user_have_active_infraction(self, get_active_infraction, post_infraction_mock): - """Should return early when user already have Voice Ban infraction.""" + async def test_voice_mute_user_have_active_infraction(self, get_active_infraction, post_infraction_mock): + """Should return early when user already have Voice Mute infraction.""" get_active_infraction.return_value = {"foo": "bar"} - self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar")) - get_active_infraction.assert_awaited_once_with(self.ctx, self.user, "voice_ban") + self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar")) + get_active_infraction.assert_awaited_once_with(self.ctx, self.user, "voice_mute") post_infraction_mock.assert_not_awaited() @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") - async def test_voice_ban_infraction_post_failed(self, get_active_infraction, post_infraction_mock): + async def test_voice_mute_infraction_post_failed(self, get_active_infraction, post_infraction_mock): """Should return early when posting infraction fails.""" self.cog.mod_log.ignore = MagicMock() get_active_infraction.return_value = None post_infraction_mock.return_value = None - self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar")) + self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar")) post_infraction_mock.assert_awaited_once() self.cog.mod_log.ignore.assert_not_called() @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") - async def test_voice_ban_infraction_post_add_kwargs(self, get_active_infraction, post_infraction_mock): - """Should pass all kwargs passed to apply_voice_ban to post_infraction.""" + async def test_voice_mute_infraction_post_add_kwargs(self, get_active_infraction, post_infraction_mock): + """Should pass all kwargs passed to apply_voice_mute to post_infraction.""" get_active_infraction.return_value = None # We don't want that this continue yet post_infraction_mock.return_value = None - self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar", my_kwarg=23)) + self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar", my_kwarg=23)) post_infraction_mock.assert_awaited_once_with( - self.ctx, self.user, "voice_ban", "foobar", active=True, my_kwarg=23 + self.ctx, self.user, "voice_mute", "foobar", active=True, my_kwarg=23 ) @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") - async def test_voice_ban_mod_log_ignore(self, get_active_infraction, post_infraction_mock): + async def test_voice_mute_mod_log_ignore(self, get_active_infraction, post_infraction_mock): """Should ignore Voice Verified role removing.""" self.cog.mod_log.ignore = MagicMock() self.cog.apply_infraction = AsyncMock() @@ -134,11 +136,11 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): get_active_infraction.return_value = None post_infraction_mock.return_value = {"foo": "bar"} - self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar")) + self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar")) self.cog.mod_log.ignore.assert_called_once_with(Event.member_update, self.user.id) async def action_tester(self, action, reason: str) -> None: - """Helper method to test voice ban action.""" + """Helper method to test voice mute action.""" self.assertTrue(inspect.iscoroutine(action)) await action @@ -147,7 +149,7 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") - async def test_voice_ban_apply_infraction(self, get_active_infraction, post_infraction_mock): + async def test_voice_mute_apply_infraction(self, get_active_infraction, post_infraction_mock): """Should ignore Voice Verified role removing.""" self.cog.mod_log.ignore = MagicMock() self.cog.apply_infraction = AsyncMock() @@ -156,22 +158,22 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): post_infraction_mock.return_value = {"foo": "bar"} reason = "foobar" - self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, reason)) + self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, reason)) self.cog.apply_infraction.assert_awaited_once_with(self.ctx, {"foo": "bar"}, self.user, ANY) await self.action_tester(self.cog.apply_infraction.call_args[0][-1], reason) @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") - async def test_voice_ban_truncate_reason(self, get_active_infraction, post_infraction_mock): - """Should truncate reason for voice ban.""" + async def test_voice_mute_truncate_reason(self, get_active_infraction, post_infraction_mock): + """Should truncate reason for voice mute.""" self.cog.mod_log.ignore = MagicMock() self.cog.apply_infraction = AsyncMock() get_active_infraction.return_value = None post_infraction_mock.return_value = {"foo": "bar"} - self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar" * 3000)) + self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar" * 3000)) self.cog.apply_infraction.assert_awaited_once_with(self.ctx, {"foo": "bar"}, self.user, ANY) # Test action @@ -180,14 +182,14 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): @autospec(_utils, "post_infraction", "get_active_infraction", return_value=None) @autospec(Infractions, "apply_infraction") - async def test_voice_ban_user_left_guild(self, apply_infraction_mock, post_infraction_mock, _): - """Should voice ban user that left the guild without throwing an error.""" + async def test_voice_mute_user_left_guild(self, apply_infraction_mock, post_infraction_mock, _): + """Should voice mute user that left the guild without throwing an error.""" infraction = {"foo": "bar"} post_infraction_mock.return_value = {"foo": "bar"} user = MockUser() - await self.cog.voiceban(self.cog, self.ctx, user, reason=None) - post_infraction_mock.assert_called_once_with(self.ctx, user, "voice_ban", None, active=True, expires_at=None) + await self.cog.voicemute(self.cog, self.ctx, user, reason=None) + post_infraction_mock.assert_called_once_with(self.ctx, user, "voice_mute", None, active=True, expires_at=None) apply_infraction_mock.assert_called_once_with(self.cog, self.ctx, infraction, user, ANY) # Test action @@ -195,22 +197,22 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): self.assertTrue(inspect.iscoroutine(action)) await action - async def test_voice_unban_user_not_found(self): + async def test_voice_unmute_user_not_found(self): """Should include info to return dict when user was not found from guild.""" self.guild.get_member.return_value = None self.guild.fetch_member.side_effect = NotFound(Mock(status=404), "Not found") - result = await self.cog.pardon_voice_ban(self.user.id, self.guild) + result = await self.cog.pardon_voice_mute(self.user.id, self.guild) self.assertEqual(result, {"Info": "User was not found in the guild."}) @patch("bot.exts.moderation.infraction.infractions._utils.notify_pardon") @patch("bot.exts.moderation.infraction.infractions.format_user") - async def test_voice_unban_user_found(self, format_user_mock, notify_pardon_mock): + async def test_voice_unmute_user_found(self, format_user_mock, notify_pardon_mock): """Should add role back with ignoring, notify user and return log dictionary..""" self.guild.get_member.return_value = self.user notify_pardon_mock.return_value = True format_user_mock.return_value = "my-user" - result = await self.cog.pardon_voice_ban(self.user.id, self.guild) + result = await self.cog.pardon_voice_mute(self.user.id, self.guild) self.assertEqual(result, { "Member": "my-user", "DM": "Sent" @@ -219,15 +221,100 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): @patch("bot.exts.moderation.infraction.infractions._utils.notify_pardon") @patch("bot.exts.moderation.infraction.infractions.format_user") - async def test_voice_unban_dm_fail(self, format_user_mock, notify_pardon_mock): + async def test_voice_unmute_dm_fail(self, format_user_mock, notify_pardon_mock): """Should add role back with ignoring, notify user and return log dictionary..""" self.guild.get_member.return_value = self.user notify_pardon_mock.return_value = False format_user_mock.return_value = "my-user" - result = await self.cog.pardon_voice_ban(self.user.id, self.guild) + result = await self.cog.pardon_voice_mute(self.user.id, self.guild) self.assertEqual(result, { "Member": "my-user", "DM": "**Failed**" }) notify_pardon_mock.assert_awaited_once() + + +class CleanBanTests(unittest.IsolatedAsyncioTestCase): + """Tests for cleanban functionality.""" + + def setUp(self): + self.bot = MockBot() + self.mod = MockMember(roles=[MockRole(id=7890123, position=10)]) + self.user = MockMember(roles=[MockRole(id=123456, position=1)]) + self.guild = MockGuild() + self.ctx = MockContext(bot=self.bot, author=self.mod) + self.cog = Infractions(self.bot) + self.clean_cog = Clean(self.bot) + self.management_cog = ModManagement(self.bot) + + self.cog.apply_ban = AsyncMock(return_value={"id": 42}) + self.log_url = "https://www.youtube.com/watch?v=dQw4w9WgXcQ" + self.clean_cog._clean_messages = AsyncMock(return_value=self.log_url) + + def mock_get_cog(self, enable_clean, enable_manage): + """Mock get cog factory that allows the user to specify whether clean and manage cogs are enabled.""" + def inner(name): + if name == "ModManagement": + return self.management_cog if enable_manage else None + elif name == "Clean": + return self.clean_cog if enable_clean else None + else: + return DEFAULT + return inner + + async def test_cleanban_falls_back_to_native_purge_without_clean_cog(self): + """Should fallback to native purge if the Clean cog is not available.""" + self.bot.get_cog.side_effect = self.mock_get_cog(False, False) + + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + self.cog.apply_ban.assert_awaited_once_with( + self.ctx, + self.user, + "FooBar", + purge_days=1, + expires_at=None, + ) + + async def test_cleanban_doesnt_purge_messages_if_clean_cog_available(self): + """Cleanban command should use the native purge messages if the clean cog is available.""" + self.bot.get_cog.side_effect = self.mock_get_cog(True, False) + + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + self.cog.apply_ban.assert_awaited_once_with( + self.ctx, + self.user, + "FooBar", + expires_at=None, + ) + + @patch("bot.exts.moderation.infraction.infractions.Age") + async def test_cleanban_uses_clean_cog_when_available(self, mocked_age_converter): + """Test cleanban uses the clean cog to clean messages if it's available.""" + self.bot.api_client.patch = AsyncMock() + self.bot.get_cog.side_effect = self.mock_get_cog(True, False) + + mocked_age_converter.return_value.convert = AsyncMock(return_value="81M") + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + + self.clean_cog._clean_messages.assert_awaited_once_with( + self.ctx, + users=[self.user], + channels="*", + first_limit="81M", + attempt_delete_invocation=False, + ) + + async def test_cleanban_edits_infraction_reason(self): + """Ensure cleanban edits the ban reason with a link to the clean log.""" + self.bot.get_cog.side_effect = self.mock_get_cog(True, True) + + self.management_cog.infraction_append = AsyncMock() + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + + self.management_cog.infraction_append.assert_awaited_once_with( + self.ctx, + {"id": 42}, + None, + reason=f"[Clean log]({self.log_url})" + ) diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py index 72eebb254..350274ecd 100644 --- a/tests/bot/exts/moderation/infraction/test_utils.py +++ b/tests/bot/exts/moderation/infraction/test_utils.py @@ -132,7 +132,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): """ test_cases = [ { - "args": (self.user, "ban", "2020-02-26 09:20 (23 hours and 59 minutes)"), + "args": (self.bot, self.user, 0, "ban", "2020-02-26 09:20 (23 hours and 59 minutes)"), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -150,7 +150,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): "send_result": True }, { - "args": (self.user, "warning", None, "Test reason."), + "args": (self.bot, self.user, 0, "warning", None, "Test reason."), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -170,7 +170,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): # Note that this test case asserts that the DM that *would* get sent to the user is formatted # correctly, even though that message is deliberately never sent. { - "args": (self.user, "note", None, None, Icons.defcon_denied), + "args": (self.bot, self.user, 0, "note", None, None, Icons.defcon_denied), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -188,7 +188,15 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): "send_result": False }, { - "args": (self.user, "mute", "2020-02-26 09:20 (23 hours and 59 minutes)", "Test", Icons.defcon_denied), + "args": ( + self.bot, + self.user, + 0, + "mute", + "2020-02-26 09:20 (23 hours and 59 minutes)", + "Test", + Icons.defcon_denied + ), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -206,7 +214,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): "send_result": False }, { - "args": (self.user, "mute", None, "foo bar" * 4000, Icons.defcon_denied), + "args": (self.bot, self.user, 0, "mute", None, "foo bar" * 4000, Icons.defcon_denied), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -238,7 +246,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): self.assertEqual(embed.to_dict(), case["expected_output"].to_dict()) - send_private_embed_mock.assert_awaited_once_with(case["args"][0], embed) + send_private_embed_mock.assert_awaited_once_with(case["args"][1], embed) @patch("bot.exts.moderation.infraction._utils.send_private_embed") async def test_notify_pardon(self, send_private_embed_mock): @@ -313,7 +321,8 @@ class TestPostInfraction(unittest.IsolatedAsyncioTestCase): "type": "ban", "user": self.member.id, "active": False, - "expires_at": now.isoformat() + "expires_at": now.isoformat(), + "dm_sent": False } self.ctx.bot.api_client.post.return_value = "foo" @@ -350,7 +359,8 @@ class TestPostInfraction(unittest.IsolatedAsyncioTestCase): "reason": "Test reason", "type": "mute", "user": self.user.id, - "active": True + "active": True, + "dm_sent": False } self.bot.api_client.post.side_effect = [ResponseCodeError(MagicMock(status=400), {"user": "foo"}), "foo"] diff --git a/tests/bot/exts/moderation/test_clean.py b/tests/bot/exts/moderation/test_clean.py new file mode 100644 index 000000000..d7647fa48 --- /dev/null +++ b/tests/bot/exts/moderation/test_clean.py @@ -0,0 +1,104 @@ +import unittest +from unittest.mock import AsyncMock, MagicMock, patch + +from bot.exts.moderation.clean import Clean +from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockMessage, MockRole, MockTextChannel + + +class CleanTests(unittest.IsolatedAsyncioTestCase): + """Tests for clean cog functionality.""" + + def setUp(self): + self.bot = MockBot() + self.mod = MockMember(roles=[MockRole(id=7890123, position=10)]) + self.user = MockMember(roles=[MockRole(id=123456, position=1)]) + self.guild = MockGuild() + self.ctx = MockContext(bot=self.bot, author=self.mod) + self.cog = Clean(self.bot) + + self.log_url = "https://www.youtube.com/watch?v=dQw4w9WgXcQ" + self.cog._modlog_cleaned_messages = AsyncMock(return_value=self.log_url) + + self.cog._use_cache = MagicMock(return_value=True) + self.cog._delete_found = AsyncMock(return_value=[42, 84]) + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_deletes_invocation_in_non_mod_channel(self, mod_channel_check): + """Clean command should delete the invocation message if ran in a non mod channel.""" + mod_channel_check.return_value = False + self.ctx.message.delete = AsyncMock() + + self.assertIsNone(await self.cog._delete_invocation(self.ctx)) + + self.ctx.message.delete.assert_awaited_once() + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_doesnt_delete_invocation_in_mod_channel(self, mod_channel_check): + """Clean command should not delete the invocation message if ran in a mod channel.""" + mod_channel_check.return_value = True + self.ctx.message.delete = AsyncMock() + + self.assertIsNone(await self.cog._delete_invocation(self.ctx)) + + self.ctx.message.delete.assert_not_awaited() + + async def test_clean_doesnt_attempt_deletion_when_attempt_delete_invocation_is_false(self): + """Clean command should not attempt to delete the invocation message if attempt_delete_invocation is false.""" + self.cog._delete_invocation = AsyncMock() + self.bot.get_channel = MagicMock(return_value=False) + + self.assertEqual( + await self.cog._clean_messages( + self.ctx, + None, + first_limit=MockMessage(), + attempt_delete_invocation=False, + ), + self.log_url, + ) + + self.cog._delete_invocation.assert_not_awaited() + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_replies_with_success_message_when_ran_in_mod_channel(self, mod_channel_check): + """Clean command should reply to the message with a confirmation message if invoked in a mod channel.""" + mod_channel_check.return_value = True + self.ctx.reply = AsyncMock() + + self.assertEqual( + await self.cog._clean_messages( + self.ctx, + None, + first_limit=MockMessage(), + attempt_delete_invocation=False, + ), + self.log_url, + ) + + self.ctx.reply.assert_awaited_once() + sent_message = self.ctx.reply.await_args[0][0] + self.assertIn(self.log_url, sent_message) + self.assertIn("2 messages", sent_message) + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_send_success_message_to_mods_when_ran_in_non_mod_channel(self, mod_channel_check): + """Clean command should send a confirmation message to #mods if invoked in a non-mod channel.""" + mod_channel_check.return_value = False + mocked_mods = MockTextChannel(id=1234567) + mocked_mods.send = AsyncMock() + self.bot.get_channel = MagicMock(return_value=mocked_mods) + + self.assertEqual( + await self.cog._clean_messages( + self.ctx, + None, + first_limit=MockMessage(), + attempt_delete_invocation=False, + ), + self.log_url, + ) + + mocked_mods.send.assert_awaited_once() + sent_message = mocked_mods.send.await_args[0][0] + self.assertIn(self.log_url, sent_message) + self.assertIn("2 messages", sent_message) diff --git a/tests/bot/exts/utils/test_snekbox.py b/tests/bot/exts/utils/test_snekbox.py index 321a92445..8bdeedd27 100644 --- a/tests/bot/exts/utils/test_snekbox.py +++ b/tests/bot/exts/utils/test_snekbox.py @@ -2,6 +2,7 @@ import asyncio import unittest from unittest.mock import AsyncMock, MagicMock, Mock, call, create_autospec, patch +from discord import AllowedMentions from discord.ext import commands from bot import constants @@ -201,7 +202,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): ctx = MockContext() ctx.message = MockMessage() ctx.send = AsyncMock() - ctx.author.mention = '@LemonLemonishBeard#0042' + ctx.author = MockUser(mention='@LemonLemonishBeard#0042') self.cog.post_eval = AsyncMock(return_value={'stdout': '', 'returncode': 0}) self.cog.get_results_message = MagicMock(return_value=('Return code 0', '')) @@ -213,9 +214,16 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): self.bot.get_cog.return_value = mocked_filter_cog await self.cog.send_eval(ctx, 'MyAwesomeCode') - ctx.send.assert_called_once_with( + + ctx.send.assert_called_once() + self.assertEqual( + ctx.send.call_args.args[0], '@LemonLemonishBeard#0042 :yay!: Return code 0.\n\n```\n[No output]\n```' ) + allowed_mentions = ctx.send.call_args.kwargs['allowed_mentions'] + expected_allowed_mentions = AllowedMentions(everyone=False, roles=False, users=[ctx.author]) + self.assertEqual(allowed_mentions.to_dict(), expected_allowed_mentions.to_dict()) + self.cog.post_eval.assert_called_once_with('MyAwesomeCode') self.cog.get_status_emoji.assert_called_once_with({'stdout': '', 'returncode': 0}) self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0}) @@ -238,10 +246,14 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): self.bot.get_cog.return_value = mocked_filter_cog await self.cog.send_eval(ctx, 'MyAwesomeCode') - ctx.send.assert_called_once_with( + + ctx.send.assert_called_once() + self.assertEqual( + ctx.send.call_args.args[0], '@LemonLemonishBeard#0042 :yay!: Return code 0.' '\n\n```\nWay too long beard\n```\nFull output: lookatmybeard.com' ) + self.cog.post_eval.assert_called_once_with('MyAwesomeCode') self.cog.get_status_emoji.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) @@ -263,9 +275,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): self.bot.get_cog.return_value = mocked_filter_cog await self.cog.send_eval(ctx, 'MyAwesomeCode') - ctx.send.assert_called_once_with( + + ctx.send.assert_called_once() + self.assertEqual( + ctx.send.call_args.args[0], '@LemonLemonishBeard#0042 :nope!: Return code 127.\n\n```\nBeard got stuck in the eval\n```' ) + self.cog.post_eval.assert_called_once_with('MyAwesomeCode') self.cog.get_status_emoji.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py index a3dcbfc0a..120d65176 100644 --- a/tests/bot/utils/test_time.py +++ b/tests/bot/utils/test_time.py @@ -13,13 +13,15 @@ class TimeTests(unittest.TestCase): """humanize_delta should be able to handle unknown units, and will not abort.""" # Does not abort for unknown units, as the unit name is checked # against the attribute of the relativedelta instance. - self.assertEqual(time.humanize_delta(relativedelta(days=2, hours=2), 'elephants', 2), '2 days and 2 hours') + actual = time.humanize_delta(relativedelta(days=2, hours=2), precision='elephants', max_units=2) + self.assertEqual(actual, '2 days and 2 hours') def test_humanize_delta_handle_high_units(self): """humanize_delta should be able to handle very high units.""" # Very high maximum units, but it only ever iterates over # each value the relativedelta might have. - self.assertEqual(time.humanize_delta(relativedelta(days=2, hours=2), 'hours', 20), '2 days and 2 hours') + actual = time.humanize_delta(relativedelta(days=2, hours=2), precision='hours', max_units=20) + self.assertEqual(actual, '2 days and 2 hours') def test_humanize_delta_should_normal_usage(self): """Testing humanize delta.""" @@ -32,7 +34,8 @@ class TimeTests(unittest.TestCase): for delta, precision, max_units, expected in test_cases: with self.subTest(delta=delta, precision=precision, max_units=max_units, expected=expected): - self.assertEqual(time.humanize_delta(delta, precision, max_units), expected) + actual = time.humanize_delta(delta, precision=precision, max_units=max_units) + self.assertEqual(actual, expected) def test_humanize_delta_raises_for_invalid_max_units(self): """humanize_delta should raises ValueError('max_units must be positive') for invalid max_units.""" @@ -40,22 +43,11 @@ class TimeTests(unittest.TestCase): for max_units in test_cases: with self.subTest(max_units=max_units), self.assertRaises(ValueError) as error: - time.humanize_delta(relativedelta(days=2, hours=2), 'hours', max_units) - self.assertEqual(str(error.exception), 'max_units must be positive') - - def test_parse_rfc1123(self): - """Testing parse_rfc1123.""" - self.assertEqual( - time.parse_rfc1123('Sun, 15 Sep 2019 12:00:00 GMT'), - datetime(2019, 9, 15, 12, 0, 0, tzinfo=timezone.utc) - ) - - def test_format_infraction(self): - """Testing format_infraction.""" - self.assertEqual(time.format_infraction('2019-12-12T00:01:00Z'), '<t:1576108860:f>') + time.humanize_delta(relativedelta(days=2, hours=2), precision='hours', max_units=max_units) + self.assertEqual(str(error.exception), 'max_units must be positive.') - def test_format_infraction_with_duration_none_expiry(self): - """format_infraction_with_duration should work for None expiry.""" + def test_format_with_duration_none_expiry(self): + """format_with_duration should work for None expiry.""" test_cases = ( (None, None, None, None), @@ -67,10 +59,10 @@ class TimeTests(unittest.TestCase): for expiry, date_from, max_units, expected in test_cases: with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected): - self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected) + self.assertEqual(time.format_with_duration(expiry, date_from, max_units), expected) - def test_format_infraction_with_duration_custom_units(self): - """format_infraction_with_duration should work for custom max_units.""" + def test_format_with_duration_custom_units(self): + """format_with_duration should work for custom max_units.""" test_cases = ( ('3000-12-12T00:01:00Z', datetime(3000, 12, 11, 12, 5, 5, tzinfo=timezone.utc), 6, '<t:32533488060:f> (11 hours, 55 minutes and 55 seconds)'), @@ -80,10 +72,10 @@ class TimeTests(unittest.TestCase): for expiry, date_from, max_units, expected in test_cases: with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected): - self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected) + self.assertEqual(time.format_with_duration(expiry, date_from, max_units), expected) - def test_format_infraction_with_duration_normal_usage(self): - """format_infraction_with_duration should work for normal usage, across various durations.""" + def test_format_with_duration_normal_usage(self): + """format_with_duration should work for normal usage, across various durations.""" utc = timezone.utc test_cases = ( ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5, tzinfo=utc), 2, @@ -105,11 +97,11 @@ class TimeTests(unittest.TestCase): for expiry, date_from, max_units, expected in test_cases: with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected): - self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected) + self.assertEqual(time.format_with_duration(expiry, date_from, max_units), expected) def test_until_expiration_with_duration_none_expiry(self): - """until_expiration should work for None expiry.""" - self.assertEqual(time.until_expiration(None), None) + """until_expiration should return "Permanent" is expiry is None.""" + self.assertEqual(time.until_expiration(None), "Permanent") def test_until_expiration_with_duration_custom_units(self): """until_expiration should work for custom max_units.""" @@ -130,7 +122,6 @@ class TimeTests(unittest.TestCase): ('3000-12-12T00:00:00Z', '<t:32533488000:R>'), ('3000-11-23T20:09:00Z', '<t:32531918940:R>'), ('3000-11-23T20:09:00Z', '<t:32531918940:R>'), - (None, None), ) for expiry, expected in test_cases: |