aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar ChrisJL <[email protected]>2022-04-03 19:30:54 +0100
committerGravatar GitHub <[email protected]>2022-04-03 19:30:54 +0100
commit15e6267daa5c9d5fd98d7337b9c9e10f311ff0b0 (patch)
treebb03f53dcf7d8850aa9af4a53955146a34b7a3b1
parentUpdate bot/resources/tags/type-hint.md (diff)
parentMerge pull request #2069 from GDWR/feature/ping_on_bad_words_in_username (diff)
Merge branch 'main' into type-hint-tag
-rw-r--r--.github/CODEOWNERS20
-rw-r--r--.github/workflows/lint-test.yml4
-rw-r--r--README.md1
-rw-r--r--bot/__init__.py5
-rw-r--r--bot/constants.py25
-rw-r--r--bot/converters.py57
-rw-r--r--bot/decorators.py40
-rw-r--r--bot/exts/backend/error_handler.py26
-rw-r--r--bot/exts/filters/antimalware.py10
-rw-r--r--bot/exts/filters/antispam.py1
-rw-r--r--bot/exts/filters/filter_lists.py13
-rw-r--r--bot/exts/filters/filtering.py83
-rw-r--r--bot/exts/fun/duck_pond.py20
-rw-r--r--bot/exts/help_channels/_channel.py39
-rw-r--r--bot/exts/help_channels/_cog.py100
-rw-r--r--bot/exts/help_channels/_message.py93
-rw-r--r--bot/exts/info/code_snippets.py3
-rw-r--r--bot/exts/info/doc/_cog.py2
-rw-r--r--bot/exts/info/help.py163
-rw-r--r--bot/exts/info/information.py70
-rw-r--r--bot/exts/info/pep.py11
-rw-r--r--bot/exts/info/pypi.py2
-rw-r--r--bot/exts/info/python_news.py2
-rw-r--r--bot/exts/info/resources.py70
-rw-r--r--bot/exts/info/site.py142
-rw-r--r--bot/exts/info/source.py13
-rw-r--r--bot/exts/info/subscribe.py201
-rw-r--r--bot/exts/info/tags.py484
-rw-r--r--bot/exts/moderation/clean.py252
-rw-r--r--bot/exts/moderation/defcon.py31
-rw-r--r--bot/exts/moderation/incidents.py264
-rw-r--r--bot/exts/moderation/infraction/_scheduler.py31
-rw-r--r--bot/exts/moderation/infraction/_utils.py66
-rw-r--r--bot/exts/moderation/infraction/infractions.py174
-rw-r--r--bot/exts/moderation/infraction/management.py126
-rw-r--r--bot/exts/moderation/infraction/superstarify.py32
-rw-r--r--bot/exts/moderation/modlog.py72
-rw-r--r--bot/exts/moderation/modpings.py120
-rw-r--r--bot/exts/moderation/slowmode.py5
-rw-r--r--bot/exts/moderation/stream.py13
-rw-r--r--bot/exts/moderation/verification.py71
-rw-r--r--bot/exts/moderation/voice_gate.py14
-rw-r--r--bot/exts/moderation/watchchannels/_watchchannel.py7
-rw-r--r--bot/exts/moderation/watchchannels/bigbrother.py2
-rw-r--r--bot/exts/recruitment/talentpool/_cog.py27
-rw-r--r--bot/exts/recruitment/talentpool/_review.py63
-rw-r--r--bot/exts/utils/bot.py17
-rw-r--r--bot/exts/utils/reminders.py46
-rw-r--r--bot/exts/utils/snekbox.py334
-rw-r--r--bot/exts/utils/thread_bumper.py147
-rw-r--r--bot/exts/utils/utils.py7
-rw-r--r--bot/log.py15
-rw-r--r--bot/monkey_patches.py24
-rw-r--r--bot/resources/tags/contribute.md2
-rw-r--r--bot/resources/tags/dictcomps.md2
-rw-r--r--bot/resources/tags/docstring.md2
-rw-r--r--bot/resources/tags/enumerate.md2
-rw-r--r--bot/resources/tags/faq.md6
-rw-r--r--bot/resources/tags/indent.md6
-rw-r--r--bot/resources/tags/off-topic-names.md10
-rw-r--r--bot/resources/tags/off-topic.md10
-rw-r--r--bot/resources/tags/ot.md3
-rw-r--r--bot/resources/tags/pathlib.md2
-rw-r--r--bot/resources/tags/pep8.md2
-rw-r--r--bot/resources/tags/positional-keyword.md6
-rw-r--r--bot/resources/tags/quotes.md4
-rw-r--r--bot/resources/tags/regex.md15
-rw-r--r--bot/resources/tags/site.md6
-rw-r--r--bot/resources/tags/sql-fstring.md2
-rw-r--r--bot/resources/tags/star-imports.md2
-rw-r--r--bot/resources/tags/strip-gotcha.md17
-rw-r--r--bot/resources/tags/tools.md6
-rw-r--r--bot/resources/tags/traceback.md21
-rw-r--r--bot/resources/tags/with.md2
-rw-r--r--bot/utils/members.py23
-rw-r--r--bot/utils/regex.py14
-rw-r--r--bot/utils/scheduling.py4
-rw-r--r--bot/utils/time.py274
-rw-r--r--config-default.yml30
-rw-r--r--docker-compose.yml2
-rw-r--r--poetry.lock803
-rw-r--r--pyproject.toml6
-rw-r--r--tests/bot/exts/backend/test_error_handler.py62
-rw-r--r--tests/bot/exts/info/test_information.py76
-rw-r--r--tests/bot/exts/moderation/infraction/test_infractions.py175
-rw-r--r--tests/bot/exts/moderation/infraction/test_utils.py36
-rw-r--r--tests/bot/exts/moderation/test_clean.py104
-rw-r--r--tests/bot/exts/moderation/test_incidents.py94
-rw-r--r--tests/bot/exts/utils/test_snekbox.py183
-rw-r--r--tests/bot/test_converters.py17
-rw-r--r--tests/bot/utils/test_time.py47
91 files changed, 3812 insertions, 1926 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 6dfe7e859..ea69f7677 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -4,10 +4,10 @@
**/bot/exts/moderation/*silence.py @MarkKoz
bot/exts/info/codeblock/** @MarkKoz
bot/exts/utils/extensions.py @MarkKoz
-bot/exts/utils/snekbox.py @MarkKoz @Akarys42 @jb3
-bot/exts/help_channels/** @MarkKoz @Akarys42
-bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 @jb3
-bot/exts/info/** @Akarys42 @Den4200 @jb3
+bot/exts/utils/snekbox.py @MarkKoz @jb3
+bot/exts/help_channels/** @MarkKoz
+bot/exts/moderation/** @mbaruh @Den4200 @ks129 @jb3
+bot/exts/info/** @Den4200 @jb3
bot/exts/info/information.py @mbaruh @jb3
bot/exts/filters/** @mbaruh @jb3
bot/exts/fun/** @ks129
@@ -21,22 +21,16 @@ bot/rules/** @mbaruh
bot/utils/extensions.py @MarkKoz
bot/utils/function.py @MarkKoz
bot/utils/lock.py @MarkKoz
-bot/utils/regex.py @Akarys42
bot/utils/scheduling.py @MarkKoz
# Tests
tests/_autospec.py @MarkKoz
tests/bot/exts/test_cogs.py @MarkKoz
-tests/** @Akarys42
# CI & Docker
-.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 @jb3
-Dockerfile @MarkKoz @Akarys42 @Den4200 @jb3
-docker-compose.yml @MarkKoz @Akarys42 @Den4200 @jb3
-
-# Tools
-poetry.lock @Akarys42
-pyproject.toml @Akarys42
+.github/workflows/** @MarkKoz @SebastiaanZ @Den4200 @jb3
+Dockerfile @MarkKoz @Den4200 @jb3
+docker-compose.yml @MarkKoz @Den4200 @jb3
# Statistics
bot/async_stats.py @jb3
diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml
index f2c9dfb6c..57cc544d9 100644
--- a/.github/workflows/lint-test.yml
+++ b/.github/workflows/lint-test.yml
@@ -46,6 +46,10 @@ jobs:
PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base
PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache
+ # See https://github.com/pre-commit/pre-commit/issues/2178#issuecomment-1002163763
+ # for why we set this.
+ SETUPTOOLS_USE_DISTUTILS: stdlib
+
steps:
- name: Add custom PYTHONUSERBASE to PATH
run: echo '${{ env.PYTHONUSERBASE }}/bin/' >> $GITHUB_PATH
diff --git a/README.md b/README.md
index 9df905dc8..06df4fd9a 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,6 @@
[![Lint & Test][1]][2]
[![Build][3]][4]
[![Deploy][5]][6]
-[![Coverage Status](https://coveralls.io/repos/github/python-discord/bot/badge.svg)](https://coveralls.io/github/python-discord/bot)
[![License](https://img.shields.io/badge/license-MIT-green)](LICENSE)
This project is a Discord bot specifically for use with the Python Discord server. It provides numerous utilities
diff --git a/bot/__init__.py b/bot/__init__.py
index a1c4466f1..17d99105a 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -18,6 +18,11 @@ if os.name == "nt":
monkey_patches.patch_typing()
+# This patches any convertors that use PartialMessage, but not the PartialMessageConverter itself
+# as library objects are made by this mapping.
+# https://github.com/Rapptz/discord.py/blob/1a4e73d59932cdbe7bf2c281f25e32529fc7ae1f/discord/ext/commands/converter.py#L984-L1004
+commands.converter.PartialMessageConverter = monkey_patches.FixedPartialMessageConverter
+
# Monkey-patch discord.py decorators to use the Command subclass which supports root aliases.
# Must be patched before any cogs are added.
commands.command = partial(commands.command, cls=monkey_patches.Command)
diff --git a/bot/constants.py b/bot/constants.py
index e3846fb3d..4531b547d 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -429,10 +429,8 @@ class Channels(metaclass=YAMLGetter):
off_topic_1: int
off_topic_2: int
- black_formatter: int
-
bot_commands: int
- discord_py: int
+ discord_bots: int
esoteric: int
voice_gate: int
code_jam_planning: int
@@ -445,6 +443,7 @@ class Channels(metaclass=YAMLGetter):
incidents_archive: int
mod_alerts: int
mod_meta: int
+ mods: int
nominations: int
nomination_voting: int
organisation: int
@@ -476,6 +475,7 @@ class Webhooks(metaclass=YAMLGetter):
big_brother: int
dev_log: int
duck_pond: int
+ incidents: int
incidents_archive: int
@@ -483,7 +483,12 @@ class Roles(metaclass=YAMLGetter):
section = "guild"
subsection = "roles"
+ # Self-assignable roles, see the Subscribe cog
+ advent_of_code: int
announcements: int
+ lovefest: int
+ pyweek_announcements: int
+
contributors: int
help_cooldown: int
muted: int
@@ -613,10 +618,12 @@ class HelpChannels(metaclass=YAMLGetter):
max_available: int
max_total_channels: int
name_prefix: str
- notify: bool
notify_channel: int
notify_minutes: int
- notify_roles: List[int]
+ notify_none_remaining: bool
+ notify_none_remaining_roles: List[int]
+ notify_running_low: bool
+ notify_running_low_threshold: int
class RedirectOutput(metaclass=YAMLGetter):
@@ -682,8 +689,16 @@ class VideoPermission(metaclass=YAMLGetter):
default_permission_duration: int
+class ThreadArchiveTimes(Enum):
+ HOUR = 60
+ DAY = 1440
+ THREE_DAY = 4320
+ WEEK = 10080
+
+
# Debug mode
DEBUG_MODE: bool = _CONFIG_YAML["debug"] == "true"
+FILE_LOGS: bool = _CONFIG_YAML["file_logs"].lower() == "true"
# Paths
BOT_DIR = os.path.dirname(__file__)
diff --git a/bot/converters.py b/bot/converters.py
index 0984fa0a3..3522a32aa 100644
--- a/bot/converters.py
+++ b/bot/converters.py
@@ -6,9 +6,9 @@ from datetime import datetime, timezone
from ssl import CertificateError
import dateutil.parser
-import dateutil.tz
import discord
from aiohttp import ClientConnectorError
+from botcore.regex import DISCORD_INVITE
from dateutil.relativedelta import relativedelta
from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter
from discord.utils import escape_markdown, snowflake_time
@@ -18,10 +18,10 @@ from bot.api import ResponseCodeError
from bot.constants import URLs
from bot.errors import InvalidInfraction
from bot.exts.info.doc import _inventory_parser
+from bot.exts.info.tags import TagIdentifier
from bot.log import get_logger
+from bot.utils import time
from bot.utils.extensions import EXTENSIONS, unqualify
-from bot.utils.regex import INVITE_RE
-from bot.utils.time import parse_duration_string
if t.TYPE_CHECKING:
from bot.exts.info.source import SourceType
@@ -71,7 +71,7 @@ class ValidDiscordServerInvite(Converter):
async def convert(self, ctx: Context, server_invite: str) -> dict:
"""Check whether the string is a valid Discord server invite."""
- invite_code = INVITE_RE.match(server_invite)
+ invite_code = DISCORD_INVITE.match(server_invite)
if invite_code:
response = await ctx.bot.http_session.get(
f"{URLs.discord_invite_api}/{invite_code.group('invite')}"
@@ -286,41 +286,6 @@ class Snowflake(IDConverter):
return snowflake
-class TagNameConverter(Converter):
- """
- Ensure that a proposed tag name is valid.
-
- Valid tag names meet the following conditions:
- * All ASCII characters
- * Has at least one non-whitespace character
- * Not solely numeric
- * Shorter than 127 characters
- """
-
- @staticmethod
- async def convert(ctx: Context, tag_name: str) -> str:
- """Lowercase & strip whitespace from proposed tag_name & ensure it's valid."""
- tag_name = tag_name.lower().strip()
-
- # The tag name has at least one invalid character.
- if ascii(tag_name)[1:-1] != tag_name:
- raise BadArgument("Don't be ridiculous, you can't use that character!")
-
- # The tag name is either empty, or consists of nothing but whitespace.
- elif not tag_name:
- raise BadArgument("Tag names should not be empty, or filled with whitespace.")
-
- # The tag name is longer than 127 characters.
- elif len(tag_name) > 127:
- raise BadArgument("Are you insane? That's way too long!")
-
- # The tag name is ascii but does not contain any letters.
- elif not any(character.isalpha() for character in tag_name):
- raise BadArgument("Tag names must contain at least one letter.")
-
- return tag_name
-
-
class SourceConverter(Converter):
"""Convert an argument into a help command, tag, command, or cog."""
@@ -343,9 +308,10 @@ class SourceConverter(Converter):
if not tags_cog:
show_tag = False
- elif argument.lower() in tags_cog._cache:
- return argument.lower()
-
+ else:
+ identifier = TagIdentifier.from_string(argument.lower())
+ if identifier in tags_cog.tags:
+ return identifier
escaped_arg = escape_markdown(argument)
raise BadArgument(
@@ -371,7 +337,7 @@ class DurationDelta(Converter):
The units need to be provided in descending order of magnitude.
"""
- if not (delta := parse_duration_string(duration)):
+ if not (delta := time.parse_duration_string(duration)):
raise BadArgument(f"`{duration}` is not a valid duration string.")
return delta
@@ -487,9 +453,9 @@ class ISODateTime(Converter):
raise BadArgument(f"`{datetime_string}` is not a valid ISO-8601 datetime string")
if dt.tzinfo:
- dt = dt.astimezone(dateutil.tz.UTC)
+ dt = dt.astimezone(timezone.utc)
else: # Without a timezone, assume it represents UTC.
- dt = dt.replace(tzinfo=dateutil.tz.UTC)
+ dt = dt.replace(tzinfo=timezone.utc)
return dt
@@ -615,7 +581,6 @@ if t.TYPE_CHECKING:
ValidURL = str # noqa: F811
Inventory = t.Tuple[str, _inventory_parser.InventoryDict] # noqa: F811
Snowflake = int # noqa: F811
- TagNameConverter = str # noqa: F811
SourceConverter = SourceType # noqa: F811
DurationDelta = relativedelta # noqa: F811
Duration = datetime # noqa: F811
diff --git a/bot/decorators.py b/bot/decorators.py
index 048a2a09a..8971898b3 100644
--- a/bot/decorators.py
+++ b/bot/decorators.py
@@ -4,6 +4,7 @@ import types
import typing as t
from contextlib import suppress
+import arrow
from discord import Member, NotFound
from discord.ext import commands
from discord.ext.commands import Cog, Context
@@ -188,7 +189,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable:
"""
def decorator(func: types.FunctionType) -> types.FunctionType:
@command_wraps(func)
- async def wrapper(*args, **kwargs) -> None:
+ async def wrapper(*args, **kwargs) -> t.Any:
log.trace(f"{func.__name__}: respect role hierarchy decorator called")
bound_args = function.get_bound_args(func, args, kwargs)
@@ -196,8 +197,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable:
if not isinstance(target, Member):
log.trace("The target is not a discord.Member; skipping role hierarchy check.")
- await func(*args, **kwargs)
- return
+ return await func(*args, **kwargs)
ctx = function.get_arg_value(1, bound_args)
cmd = ctx.command.name
@@ -214,7 +214,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable:
)
else:
log.trace(f"{func.__name__}: {target.top_role=} < {actor.top_role=}; calling func")
- await func(*args, **kwargs)
+ return await func(*args, **kwargs)
return wrapper
return decorator
@@ -237,3 +237,35 @@ def mock_in_debug(return_value: t.Any) -> t.Callable:
return await func(*args, **kwargs)
return wrapped
return decorator
+
+
+def ensure_future_timestamp(timestamp_arg: function.Argument) -> t.Callable:
+ """
+ Ensure the timestamp argument is in the future.
+
+ If the condition fails, send a warning to the invoking context.
+
+ `timestamp_arg` is the keyword name or position index of the parameter of the decorated command
+ whose value is the target timestamp.
+
+ This decorator must go before (below) the `command` decorator.
+ """
+ def decorator(func: types.FunctionType) -> types.FunctionType:
+ @command_wraps(func)
+ async def wrapper(*args, **kwargs) -> t.Any:
+ bound_args = function.get_bound_args(func, args, kwargs)
+ target = function.get_arg_value(timestamp_arg, bound_args)
+
+ ctx = function.get_arg_value(1, bound_args)
+
+ try:
+ is_future = target > arrow.utcnow()
+ except TypeError:
+ is_future = True
+ if not is_future:
+ await ctx.send(":x: Provided timestamp is in the past.")
+ return
+
+ return await func(*args, **kwargs)
+ return wrapper
+ return decorator
diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py
index 6ab6634a6..c79c7b2a7 100644
--- a/bot/exts/backend/error_handler.py
+++ b/bot/exts/backend/error_handler.py
@@ -1,5 +1,4 @@
import difflib
-import typing as t
from discord import Embed
from discord.ext.commands import ChannelNotFound, Cog, Context, TextChannelConverter, VoiceChannelConverter, errors
@@ -8,7 +7,6 @@ from sentry_sdk import push_scope
from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.constants import Colours, Icons, MODERATION_ROLES
-from bot.converters import TagNameConverter
from bot.errors import InvalidInfractedUserError, LockedResourceError
from bot.log import get_logger
from bot.utils.checks import ContextCheckFailure
@@ -97,13 +95,14 @@ class ErrorHandler(Cog):
# MaxConcurrencyReached, ExtensionError
await self.handle_unexpected_error(ctx, e)
- @staticmethod
- def get_help_command(ctx: Context) -> t.Coroutine:
+ async def send_command_help(self, ctx: Context) -> None:
"""Return a prepared `help` command invocation coroutine."""
if ctx.command:
- return ctx.send_help(ctx.command)
+ self.bot.help_command.context = ctx
+ await ctx.send_help(ctx.command)
+ return
- return ctx.send_help()
+ await ctx.send_help()
async def try_silence(self, ctx: Context) -> bool:
"""
@@ -174,16 +173,8 @@ class ErrorHandler(Cog):
await self.on_command_error(ctx, tag_error)
return
- try:
- tag_name = await TagNameConverter.convert(ctx, ctx.invoked_with)
- except errors.BadArgument:
- log.debug(
- f"{ctx.author} tried to use an invalid command "
- f"and the fallback tag failed validation in TagNameConverter."
- )
- else:
- if await ctx.invoke(tags_get_command, tag_name=tag_name):
- return
+ if await ctx.invoke(tags_get_command, argument_string=ctx.message.content):
+ return
if not any(role.id in MODERATION_ROLES for role in ctx.author.roles):
await self.send_command_suggestion(ctx, ctx.invoked_with)
@@ -245,7 +236,6 @@ class ErrorHandler(Cog):
elif isinstance(e, errors.ArgumentParsingError):
embed = self._get_error_embed("Argument parsing error", str(e))
await ctx.send(embed=embed)
- self.get_help_command(ctx).close()
self.bot.stats.incr("errors.argument_parsing_error")
return
else:
@@ -256,7 +246,7 @@ class ErrorHandler(Cog):
self.bot.stats.incr("errors.other_user_input_error")
await ctx.send(embed=embed)
- await self.get_help_command(ctx)
+ await self.send_command_help(ctx)
@staticmethod
async def handle_check_failure(ctx: Context, e: errors.CheckFailure) -> None:
diff --git a/bot/exts/filters/antimalware.py b/bot/exts/filters/antimalware.py
index d727f7940..6cccf3680 100644
--- a/bot/exts/filters/antimalware.py
+++ b/bot/exts/filters/antimalware.py
@@ -18,14 +18,8 @@ PY_EMBED_DESCRIPTION = (
TXT_LIKE_FILES = {".txt", ".csv", ".json"}
TXT_EMBED_DESCRIPTION = (
- "**Uh-oh!** It looks like your message got zapped by our spam filter. "
- "We currently don't allow `{blocked_extension}` attachments, "
- "so here are some tips to help you travel safely: \n\n"
- "• If you attempted to send a message longer than 2000 characters, try shortening your message "
- "to fit within the character limit or use a pasting service (see below) \n\n"
- "• If you tried to show someone your code, you can use codeblocks \n(run `!code-blocks` in "
- "{cmd_channel_mention} for more information) or use a pasting service like: "
- f"\n\n{URLs.site_schema}{URLs.site_paste}"
+ "You either uploaded a `{blocked_extension}` file or entered a message that was too long. "
+ f"Please use our [paste bin]({URLs.site_schema}{URLs.site_paste}) instead."
)
DISALLOWED_EMBED_DESCRIPTION = (
diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py
index ddfd11231..bcd845a43 100644
--- a/bot/exts/filters/antispam.py
+++ b/bot/exts/filters/antispam.py
@@ -103,6 +103,7 @@ class DeletionContext:
mod_alert_message += content
await modlog.send_log_message(
+ content=", ".join(str(m.id) for m in self.members), # quality-of-life improvement for mobile moderators
icon_url=Icons.filtering,
colour=Colour(Colours.soft_red),
title="Spam detected!",
diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py
index ee5bd89f3..a883ddf54 100644
--- a/bot/exts/filters/filter_lists.py
+++ b/bot/exts/filters/filter_lists.py
@@ -1,3 +1,4 @@
+import re
from typing import Optional
from discord import Colour, Embed
@@ -72,6 +73,18 @@ class FilterLists(Cog):
elif list_type == "FILE_FORMAT" and not content.startswith("."):
content = f".{content}"
+ # If it's a filter token, validate the passed regex
+ elif list_type == "FILTER_TOKEN":
+ try:
+ re.compile(content)
+ except re.error as e:
+ await ctx.message.add_reaction("❌")
+ await ctx.send(
+ f"{ctx.author.mention} that's not a valid regex! "
+ f"Regex error message: {e.msg}."
+ )
+ return
+
# Try to add the item to the database
log.trace(f"Trying to add the {content} item to the {list_type} {allow_type}")
payload = {
diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py
index 79b7abe9f..b9f2a0e51 100644
--- a/bot/exts/filters/filtering.py
+++ b/bot/exts/filters/filtering.py
@@ -1,15 +1,17 @@
import asyncio
import re
+import unicodedata
from datetime import timedelta
from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
import arrow
import dateutil.parser
-import discord.errors
import regex
+import tldextract
from async_rediscache import RedisCache
+from botcore.regex import DISCORD_INVITE
from dateutil.relativedelta import relativedelta
-from discord import Colour, HTTPException, Member, Message, NotFound, TextChannel
+from discord import ChannelType, Colour, Embed, Forbidden, HTTPException, Member, Message, NotFound, TextChannel
from discord.ext.commands import Cog
from discord.utils import escape_markdown
@@ -21,7 +23,6 @@ from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
from bot.utils import scheduling
from bot.utils.messages import format_user
-from bot.utils.regex import INVITE_RE
log = get_logger(__name__)
@@ -61,14 +62,14 @@ AUTO_BAN_REASON = (
)
AUTO_BAN_DURATION = timedelta(days=4)
-FilterMatch = Union[re.Match, dict, bool, List[discord.Embed]]
+FilterMatch = Union[re.Match, dict, bool, List[Embed]]
class Stats(NamedTuple):
"""Additional stats on a triggered filter to append to a mod log."""
message_content: str
- additional_embeds: Optional[List[discord.Embed]]
+ additional_embeds: Optional[List[Embed]]
class Filtering(Cog):
@@ -205,15 +206,26 @@ class Filtering(Cog):
delta = relativedelta(after.edited_at, before.edited_at).microseconds
await self._filter_message(after, delta)
- def get_name_matches(self, name: str) -> List[re.Match]:
- """Check bad words from passed string (name). Return list of matches."""
- name = self.clean_input(name)
- matches = []
+ @Cog.listener()
+ async def on_voice_state_update(self, member: Member, *_) -> None:
+ """Checks for bad words in usernames when users join, switch or leave a voice channel."""
+ await self.check_bad_words_in_name(member)
+
+ def get_name_match(self, name: str) -> Optional[re.Match]:
+ """Check bad words from passed string (name). Return the first match found."""
+ normalised_name = unicodedata.normalize("NFKC", name)
+ cleaned_normalised_name = "".join([c for c in normalised_name if not unicodedata.combining(c)])
+
+ # Run filters against normalised, cleaned normalised and the original name,
+ # in case we have filters for one but not the other.
+ names_to_check = (name, normalised_name, cleaned_normalised_name)
+
watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False)
for pattern in watchlist_patterns:
- if match := re.search(pattern, name, flags=re.IGNORECASE):
- matches.append(match)
- return matches
+ for name in names_to_check:
+ if match := re.search(pattern, name, flags=re.IGNORECASE):
+ return match
+ return None
async def check_send_alert(self, member: Member) -> bool:
"""When there is less than 3 days after last alert, return `False`, otherwise `True`."""
@@ -229,10 +241,14 @@ class Filtering(Cog):
"""Send a mod alert every 3 days if a username still matches a watchlist pattern."""
# Use lock to avoid race conditions
async with self.name_lock:
- # Check whether the users display name contains any words in our blacklist
- matches = self.get_name_matches(member.display_name)
+ # Check if we recently alerted about this user first,
+ # to avoid running all the filter tokens against their name again.
+ if not await self.check_send_alert(member):
+ return
- if not matches or not await self.check_send_alert(member):
+ # Check whether the users display name contains any words in our blacklist
+ match = self.get_name_match(member.display_name)
+ if not match:
return
log.info(f"Sending bad nickname alert for '{member.display_name}' ({member.id}).")
@@ -240,24 +256,26 @@ class Filtering(Cog):
log_string = (
f"**User:** {format_user(member)}\n"
f"**Display Name:** {escape_markdown(member.display_name)}\n"
- f"**Bad Matches:** {', '.join(match.group() for match in matches)}"
+ f"**Bad Match:** {match.group()}"
)
await self.mod_log.send_log_message(
+ content=str(member.id), # quality-of-life improvement for mobile moderators
icon_url=Icons.token_removed,
colour=Colours.soft_red,
title="Username filtering alert",
text=log_string,
channel_id=Channels.mod_alerts,
- thumbnail=member.display_avatar.url
+ thumbnail=member.display_avatar.url,
+ ping_everyone=True
)
# Update time when alert sent
await self.name_alerts.set(member.id, arrow.utcnow().timestamp())
- async def filter_eval(self, result: str, msg: Message) -> bool:
+ async def filter_snekbox_output(self, result: str, msg: Message) -> bool:
"""
- Filter the result of an !eval to see if it violates any of our rules, and then respond accordingly.
+ Filter the result of a snekbox command to see if it violates any of our rules, and then respond accordingly.
Also requires the original message, to check whether to filter and for mod logs.
Returns whether a filter was triggered or not.
@@ -326,7 +344,7 @@ class Filtering(Cog):
match = result
if match:
- is_private = msg.channel.type is discord.ChannelType.private
+ is_private = msg.channel.type is ChannelType.private
# If this is a filter (not a watchlist) and not in a DM, delete the message.
if _filter["type"] == "filter" and not is_private:
@@ -341,7 +359,7 @@ class Filtering(Cog):
# In addition, to avoid sending two notifications to the user, the
# logs, and mod_alert, we return if the message no longer exists.
await msg.delete()
- except discord.errors.NotFound:
+ except NotFound:
return
# Notify the user if the filter specifies
@@ -396,14 +414,14 @@ class Filtering(Cog):
self,
filter_name: str,
_filter: Dict[str, Any],
- msg: discord.Message,
+ msg: Message,
stats: Stats,
reason: Optional[str] = None,
*,
is_eval: bool = False,
) -> None:
"""Send a mod log for a triggered filter."""
- if msg.channel.type is discord.ChannelType.private:
+ if msg.channel.type is ChannelType.private:
channel_str = "via DM"
ping_everyone = False
else:
@@ -411,9 +429,12 @@ class Filtering(Cog):
# Allow specific filters to override ping_everyone
ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True)
- # If we are going to autoban, we don't want to ping
+ content = str(msg.author.id) # quality-of-life improvement for mobile moderators
+
+ # If we are going to autoban, we don't want to ping and don't need the user ID
if reason and "[autoban]" in reason:
ping_everyone = False
+ content = None
eval_msg = "using !eval " if is_eval else ""
footer = f"Reason: {reason}" if reason else None
@@ -427,6 +448,7 @@ class Filtering(Cog):
# Send pretty mod log embed to mod-alerts
await self.mod_log.send_log_message(
+ content=content,
icon_url=Icons.filtering,
colour=Colour(Colours.soft_red),
title=f"{_filter['type'].title()} triggered!",
@@ -461,7 +483,7 @@ class Filtering(Cog):
additional_embeds = []
for _, data in match.items():
reason = f"Reason: {data['reason']} | " if data.get('reason') else ""
- embed = discord.Embed(description=(
+ embed = Embed(description=(
f"**Members:**\n{data['members']}\n"
f"**Active:**\n{data['active']}"
))
@@ -524,7 +546,10 @@ class Filtering(Cog):
for match in URL_RE.finditer(text):
for url in domain_blacklist:
if url.lower() in match.group(1).lower():
- return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"]
+ blacklisted_parsed = tldextract.extract(url.lower())
+ url_parsed = tldextract.extract(match.group(1).lower())
+ if blacklisted_parsed.registered_domain == url_parsed.registered_domain:
+ return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"]
return False, None
@staticmethod
@@ -551,7 +576,7 @@ class Filtering(Cog):
# discord\.gg/gdudes-pony-farm
text = text.replace("\\", "")
- invites = [m.group("invite") for m in INVITE_RE.finditer(text)]
+ invites = [m.group("invite") for m in DISCORD_INVITE.finditer(text)]
invite_data = dict()
for invite in invites:
if invite in invite_data:
@@ -606,7 +631,7 @@ class Filtering(Cog):
return invite_data if invite_data else False
@staticmethod
- async def _has_rich_embed(msg: Message) -> Union[bool, List[discord.Embed]]:
+ async def _has_rich_embed(msg: Message) -> Union[bool, List[Embed]]:
"""Determines if `msg` contains any rich embeds not auto-generated from a URL."""
if msg.embeds:
for embed in msg.embeds:
@@ -642,7 +667,7 @@ class Filtering(Cog):
"""
try:
await filtered_member.send(reason)
- except discord.errors.Forbidden:
+ except Forbidden:
await channel.send(f"{filtered_member.mention} {reason}")
def schedule_msg_delete(self, msg: dict) -> None:
diff --git a/bot/exts/fun/duck_pond.py b/bot/exts/fun/duck_pond.py
index c51656343..8a41a3116 100644
--- a/bot/exts/fun/duck_pond.py
+++ b/bot/exts/fun/duck_pond.py
@@ -2,7 +2,7 @@ import asyncio
from typing import Union
import discord
-from discord import Color, Embed, Message, RawReactionActionEvent, TextChannel, errors
+from discord import Color, Embed, Message, RawReactionActionEvent, errors
from discord.ext.commands import Cog, Context, command
from bot import constants
@@ -46,17 +46,6 @@ class DuckPond(Cog):
return True
return False
- @staticmethod
- def is_helper_viewable(channel: TextChannel) -> bool:
- """Check if helpers can view a specific channel."""
- guild = channel.guild
- helper_role = guild.get_role(constants.Roles.helpers)
- # check channel overwrites for both the Helper role and @everyone and
- # return True for channels that they have permissions to view.
- helper_overwrites = channel.overwrites_for(helper_role)
- default_overwrites = channel.overwrites_for(guild.default_role)
- return default_overwrites.view_channel is None or helper_overwrites.view_channel is True
-
async def has_green_checkmark(self, message: Message) -> bool:
"""Check if the message has a green checkmark reaction."""
for reaction in message.reactions:
@@ -165,12 +154,15 @@ class DuckPond(Cog):
if not self._payload_has_duckpond_emoji(payload.emoji):
return
- channel = discord.utils.get(self.bot.get_all_channels(), id=payload.channel_id)
+ await self.bot.wait_until_guild_available()
+ guild = self.bot.get_guild(payload.guild_id)
+ channel = guild.get_channel_or_thread(payload.channel_id)
if channel is None:
return
# Was the message sent in a channel Helpers can see?
- if not self.is_helper_viewable(channel):
+ helper_role = guild.get_role(constants.Roles.helpers)
+ if not channel.permissions_for(helper_role).view_channel:
return
try:
diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py
index e43c1e789..d9cebf215 100644
--- a/bot/exts/help_channels/_channel.py
+++ b/bot/exts/help_channels/_channel.py
@@ -1,3 +1,4 @@
+import re
import typing as t
from datetime import timedelta
from enum import Enum
@@ -16,13 +17,14 @@ log = get_logger(__name__)
MAX_CHANNELS_PER_CATEGORY = 50
EXCLUDED_CHANNELS = (constants.Channels.cooldown,)
+CLAIMED_BY_RE = re.compile(r"Channel claimed by <@!?(?P<user_id>\d{17,20})>\.$")
class ClosingReason(Enum):
"""All possible closing reasons for help channels."""
COMMAND = "command"
- LATEST_MESSSAGE = "auto.latest_message"
+ LATEST_MESSAGE = "auto.latest_message"
CLAIMANT_TIMEOUT = "auto.claimant_timeout"
OTHER_TIMEOUT = "auto.other_timeout"
DELETED = "auto.deleted"
@@ -75,7 +77,7 @@ async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.T
# Use the greatest offset to avoid the possibility of prematurely closing the channel.
time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant)
- reason = ClosingReason.DELETED if is_empty else ClosingReason.LATEST_MESSSAGE
+ reason = ClosingReason.DELETED if is_empty else ClosingReason.LATEST_MESSAGE
return time, reason
claimant_time = Arrow.utcfromtimestamp(claimant_time)
@@ -157,3 +159,36 @@ async def move_to_bottom(channel: discord.TextChannel, category_id: int, **optio
# Now that the channel is moved, we can edit the other attributes
if options:
await channel.edit(**options)
+
+
+async def ensure_cached_claimant(channel: discord.TextChannel) -> None:
+ """
+ Ensure there is a claimant cached for each help channel.
+
+ Check the redis cache first, return early if there is already a claimant cached.
+ If there isn't an entry in redis, search for the "Claimed by X." embed in channel history.
+ Stopping early if we discover a dormant message first.
+
+ If a claimant could not be found, send a warning to #helpers and set the claimant to the bot.
+ """
+ if await _caches.claimants.get(channel.id):
+ return
+
+ async for message in channel.history(limit=1000):
+ if message.author.id != bot.instance.user.id:
+ # We only care about bot messages
+ continue
+ if message.embeds:
+ if _message._match_bot_embed(message, _message.DORMANT_MSG):
+ log.info("Hit the dormant message embed before finding a claimant in %s (%d).", channel, channel.id)
+ break
+ # Only set the claimant if the first embed matches the claimed channel embed regex
+ if match := CLAIMED_BY_RE.match(message.embeds[0].description):
+ await _caches.claimants.set(channel.id, int(match.group("user_id")))
+ return
+
+ await bot.instance.get_channel(constants.Channels.helpers).send(
+ f"I couldn't find a claimant for {channel.mention} in that last 1000 messages. "
+ "Please use your helper powers to close the channel if/when appropriate."
+ )
+ await _caches.claimants.set(channel.id, bot.instance.user.id)
diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py
index 0905cb23d..a93acffb6 100644
--- a/bot/exts/help_channels/_cog.py
+++ b/bot/exts/help_channels/_cog.py
@@ -66,6 +66,9 @@ class HelpChannels(commands.Cog):
self.bot = bot
self.scheduler = scheduling.Scheduler(self.__class__.__name__)
+ self.guild: discord.Guild = None
+ self.cooldown_role: discord.Role = None
+
# Categories
self.available_category: discord.CategoryChannel = None
self.in_use_category: discord.CategoryChannel = None
@@ -75,7 +78,10 @@ class HelpChannels(commands.Cog):
self.channel_queue: asyncio.Queue[discord.TextChannel] = None
self.name_queue: t.Deque[str] = None
- self.last_notification: t.Optional[arrow.Arrow] = None
+ # Notifications
+ # Using a very old date so that we don't have to use Optional typing.
+ self.last_none_remaining_notification = arrow.get('1815-12-10T18:00:00.00000+00:00')
+ self.last_running_low_notification = arrow.get('1815-12-10T18:00:00.00000+00:00')
self.dynamic_message: t.Optional[int] = None
self.available_help_channels: t.Set[discord.TextChannel] = set()
@@ -95,24 +101,6 @@ class HelpChannels(commands.Cog):
self.scheduler.cancel_all()
- async def _handle_role_change(self, member: discord.Member, coro: t.Callable[..., t.Coroutine]) -> None:
- """
- Change `member`'s cooldown role via awaiting `coro` and handle errors.
-
- `coro` is intended to be `discord.Member.add_roles` or `discord.Member.remove_roles`.
- """
- try:
- await coro(self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.help_cooldown))
- except discord.NotFound:
- log.debug(f"Failed to change role for {member} ({member.id}): member not found")
- except discord.Forbidden:
- log.debug(
- f"Forbidden to change role for {member} ({member.id}); "
- f"possibly due to role hierarchy"
- )
- except discord.HTTPException as e:
- log.error(f"Failed to change role for {member} ({member.id}): {e.status} {e.code}")
-
@lock.lock_arg(NAMESPACE, "message", attrgetter("channel.id"))
@lock.lock_arg(NAMESPACE, "message", attrgetter("author.id"))
@lock.lock_arg(f"{NAMESPACE}.unclaim", "message", attrgetter("author.id"), wait=True)
@@ -120,19 +108,42 @@ class HelpChannels(commands.Cog):
"""
Claim the channel in which the question `message` was sent.
- Move the channel to the In Use category and pin the `message`. Add a cooldown to the
- claimant to prevent them from asking another question. Lastly, make a new channel available.
+ Send an embed stating the claimant, move the channel to the In Use category, and pin the `message`.
+ Add a cooldown to the claimant to prevent them from asking another question.
+ Lastly, make a new channel available.
"""
log.info(f"Channel #{message.channel} was claimed by `{message.author.id}`.")
- await self.move_to_in_use(message.channel)
+
+ try:
+ await self.move_to_in_use(message.channel)
+ except discord.DiscordServerError:
+ try:
+ await message.channel.send(
+ "The bot encountered a Discord API error while trying to move this channel, please try again later."
+ )
+ except Exception as e:
+ log.warning("Error occurred while sending fail claim message:", exc_info=e)
+ log.info(
+ "500 error from Discord when moving #%s (%d) to in-use for %s (%d). Cancelling claim.",
+ message.channel.name,
+ message.channel.id,
+ message.author.name,
+ message.author.id,
+ )
+ self.bot.stats.incr("help.failed_claims.500_on_move")
+ return
+
+ embed = discord.Embed(
+ description=f"Channel claimed by {message.author.mention}.",
+ color=constants.Colours.bright_green,
+ )
+ await message.channel.send(embed=embed)
# Handle odd edge case of `message.author` not being a `discord.Member` (see bot#1839)
if not isinstance(message.author, discord.Member):
- log.warning(
- f"{message.author} ({message.author.id}) isn't a member. Not giving cooldown role or sending DM."
- )
+ log.debug(f"{message.author} ({message.author.id}) isn't a member. Not giving cooldown role or sending DM.")
else:
- await self._handle_role_change(message.author, message.author.add_roles)
+ await members.handle_role_change(message.author, message.author.add_roles, self.cooldown_role)
try:
await _message.dm_on_open(message)
@@ -244,13 +255,21 @@ class HelpChannels(commands.Cog):
if not channel:
log.info("Couldn't create a candidate channel; waiting to get one from the queue.")
- notify_channel = self.bot.get_channel(constants.HelpChannels.notify_channel)
- last_notification = await _message.notify(notify_channel, self.last_notification)
+ last_notification = await _message.notify_none_remaining(self.last_none_remaining_notification)
+
if last_notification:
- self.last_notification = last_notification
- self.bot.stats.incr("help.out_of_channel_alerts")
+ self.last_none_remaining_notification = last_notification
- channel = await self.wait_for_dormant_channel()
+ channel = await self.wait_for_dormant_channel() # Blocks until a new channel is available
+
+ else:
+ last_notification = await _message.notify_running_low(
+ self.channel_queue.qsize(),
+ self.last_running_low_notification
+ )
+
+ if last_notification:
+ self.last_running_low_notification = last_notification
return channel
@@ -304,6 +323,9 @@ class HelpChannels(commands.Cog):
await self.bot.wait_until_guild_available()
log.trace("Initialising the cog.")
+ self.guild = self.bot.get_guild(constants.Guild.id)
+ self.cooldown_role = self.guild.get_role(constants.Roles.help_cooldown)
+
await self.init_categories()
self.channel_queue = self.create_channel_queue()
@@ -315,6 +337,7 @@ class HelpChannels(commands.Cog):
log.trace("Moving or rescheduling in-use channels.")
for channel in _channel.get_category_channels(self.in_use_category):
+ await _channel.ensure_cached_claimant(channel)
await self.move_idle_channel(channel, has_task=False)
# Prevent the command from being used until ready.
@@ -440,18 +463,21 @@ class HelpChannels(commands.Cog):
async def _unclaim_channel(
self,
channel: discord.TextChannel,
- claimant_id: int,
+ claimant_id: t.Optional[int],
closed_on: _channel.ClosingReason
) -> None:
"""Actual implementation of `unclaim_channel`. See that for full documentation."""
await _caches.claimants.delete(channel.id)
await _caches.session_participants.delete(channel.id)
- claimant = await members.get_or_fetch_member(self.bot.get_guild(constants.Guild.id), claimant_id)
- if claimant is None:
- log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed")
+ if not claimant_id:
+ log.info("No claimant given when un-claiming %s (%d). Skipping role removal.", channel, channel.id)
else:
- await self._handle_role_change(claimant, claimant.remove_roles)
+ claimant = await members.get_or_fetch_member(self.guild, claimant_id)
+ if claimant is None:
+ log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed")
+ else:
+ await members.handle_role_change(claimant, claimant.remove_roles, self.cooldown_role)
await _message.unpin(channel)
await _stats.report_complete_session(channel.id, closed_on)
@@ -592,7 +618,7 @@ class HelpChannels(commands.Cog):
embed = discord.Embed(
title="Currently Helping",
description=f"You're currently helping in {message.channel.mention}",
- color=constants.Colours.soft_green,
+ color=constants.Colours.bright_green,
timestamp=message.created_at
)
embed.add_field(name="Conversation", value=f"[Jump to message]({message.jump_url})")
diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py
index 241dd606c..7ceed9b4d 100644
--- a/bot/exts/help_channels/_message.py
+++ b/bot/exts/help_channels/_message.py
@@ -124,52 +124,93 @@ async def dm_on_open(message: discord.Message) -> None:
)
-async def notify(channel: discord.TextChannel, last_notification: t.Optional[Arrow]) -> t.Optional[Arrow]:
+async def notify_none_remaining(last_notification: Arrow) -> t.Optional[Arrow]:
"""
- Send a message in `channel` notifying about a lack of available help channels.
+ Send a pinging message in `channel` notifying about there being no dormant channels remaining.
If a notification was sent, return the time at which the message was sent.
Otherwise, return None.
Configuration:
-
- * `HelpChannels.notify` - toggle notifications
- * `HelpChannels.notify_minutes` - minimum interval between notifications
- * `HelpChannels.notify_roles` - roles mentioned in notifications
+ * `HelpChannels.notify_minutes` - minimum interval between notifications
+ * `HelpChannels.notify_none_remaining` - toggle none_remaining notifications
+ * `HelpChannels.notify_none_remaining_roles` - roles mentioned in notifications
"""
- if not constants.HelpChannels.notify:
- return
+ if not constants.HelpChannels.notify_none_remaining:
+ return None
+
+ if (arrow.utcnow() - last_notification).total_seconds() < (constants.HelpChannels.notify_minutes * 60):
+ log.trace("Did not send none_remaining notification as it hasn't been enough time since the last one.")
+ return None
log.trace("Notifying about lack of channels.")
- if last_notification:
- elapsed = (arrow.utcnow() - last_notification).seconds
- minimum_interval = constants.HelpChannels.notify_minutes * 60
- should_send = elapsed >= minimum_interval
- else:
- should_send = True
+ mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_none_remaining_roles)
+ allowed_roles = [discord.Object(id_) for id_ in constants.HelpChannels.notify_none_remaining_roles]
- if not should_send:
- log.trace("Notification not sent because it's too recent since the previous one.")
- return
+ channel = bot.instance.get_channel(constants.HelpChannels.notify_channel)
+ if channel is None:
+ log.trace("Did not send none_remaining notification as the notification channel couldn't be gathered.")
try:
- log.trace("Sending notification message.")
-
- mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_roles)
- allowed_roles = [discord.Object(id_) for id_ in constants.HelpChannels.notify_roles]
-
- message = await channel.send(
+ await channel.send(
f"{mentions} A new available help channel is needed but there "
- f"are no more dormant ones. Consider freeing up some in-use channels manually by "
+ "are no more dormant ones. Consider freeing up some in-use channels manually by "
f"using the `{constants.Bot.prefix}dormant` command within the channels.",
allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles)
)
-
- return Arrow.fromdatetime(message.created_at)
except Exception:
# Handle it here cause this feature isn't critical for the functionality of the system.
log.exception("Failed to send notification about lack of dormant channels!")
+ else:
+ bot.instance.stats.incr("help.out_of_channel_alerts")
+ return arrow.utcnow()
+
+
+async def notify_running_low(number_of_channels_left: int, last_notification: Arrow) -> t.Optional[Arrow]:
+ """
+ Send a non-pinging message in `channel` notifying about there being a low amount of dormant channels.
+
+ This will include the number of dormant channels left `number_of_channels_left`
+
+ If a notification was sent, return the time at which the message was sent.
+ Otherwise, return None.
+
+ Configuration:
+ * `HelpChannels.notify_minutes` - minimum interval between notifications
+ * `HelpChannels.notify_running_low` - toggle running_low notifications
+ * `HelpChannels.notify_running_low_threshold` - minimum amount of channels to trigger running_low notifications
+ """
+ if not constants.HelpChannels.notify_running_low:
+ return None
+
+ if number_of_channels_left > constants.HelpChannels.notify_running_low_threshold:
+ log.trace("Did not send notify_running_low notification as the threshold was not met.")
+ return None
+
+ if (arrow.utcnow() - last_notification).total_seconds() < (constants.HelpChannels.notify_minutes * 60):
+ log.trace("Did not send notify_running_low notification as it hasn't been enough time since the last one.")
+ return None
+
+ log.trace("Notifying about getting close to no dormant channels.")
+
+ channel = bot.instance.get_channel(constants.HelpChannels.notify_channel)
+ if channel is None:
+ log.trace("Did not send notify_running notification as the notification channel couldn't be gathered.")
+
+ try:
+ if number_of_channels_left == 1:
+ message = f"There is only {number_of_channels_left} dormant channel left. "
+ else:
+ message = f"There are only {number_of_channels_left} dormant channels left. "
+ message += "Consider participating in some help channels so that we don't run out."
+ await channel.send(message)
+ except Exception:
+ # Handle it here cause this feature isn't critical for the functionality of the system.
+ log.exception("Failed to send notification about running low of dormant channels!")
+ else:
+ bot.instance.stats.incr("help.running_low_alerts")
+ return arrow.utcnow()
async def pin(message: discord.Message) -> None:
diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py
index 07b1b8a2d..f2f29020f 100644
--- a/bot/exts/info/code_snippets.py
+++ b/bot/exts/info/code_snippets.py
@@ -246,6 +246,9 @@ class CodeSnippets(Cog):
if message.author.bot:
return
+ if message.guild is None:
+ return
+
message_to_send = await self._parse_snippets(message.content)
destination = message.channel
diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py
index ebf5f5932..4dc5276d9 100644
--- a/bot/exts/info/doc/_cog.py
+++ b/bot/exts/info/doc/_cog.py
@@ -464,7 +464,7 @@ class DocCog(commands.Cog):
) -> None:
"""Clear the persistent redis cache for `package`."""
if await doc_cache.delete(package_name):
- await self.item_fetcher.stale_inventory_notifier.symbol_counter.delete()
+ await self.item_fetcher.stale_inventory_notifier.symbol_counter.delete(package_name)
await ctx.send(f"Successfully cleared the cache for `{package_name}`.")
else:
await ctx.send("No keys matching the package found.")
diff --git a/bot/exts/info/help.py b/bot/exts/info/help.py
index 743dfdd3f..864e7edd2 100644
--- a/bot/exts/info/help.py
+++ b/bot/exts/info/help.py
@@ -1,10 +1,12 @@
+from __future__ import annotations
+
import itertools
import re
from collections import namedtuple
from contextlib import suppress
-from typing import List, Union
+from typing import List, Optional, Union
-from discord import Colour, Embed
+from discord import ButtonStyle, Colour, Embed, Emoji, Interaction, PartialEmoji, ui
from discord.ext.commands import Bot, Cog, Command, CommandError, Context, DisabledCommand, Group, HelpCommand
from rapidfuzz import fuzz, process
from rapidfuzz.utils import default_process
@@ -26,6 +28,135 @@ NOT_ALLOWED_TO_RUN_MESSAGE = "***You cannot run this command.***\n\n"
Category = namedtuple("Category", ["name", "description", "cogs"])
+class SubcommandButton(ui.Button):
+ """
+ A button shown in a group's help embed.
+
+ The button represents a subcommand, and pressing it will edit the help embed to that of the subcommand.
+ """
+
+ def __init__(
+ self,
+ help_command: CustomHelpCommand,
+ command: Command,
+ *,
+ style: ButtonStyle = ButtonStyle.primary,
+ label: Optional[str] = None,
+ disabled: bool = False,
+ custom_id: Optional[str] = None,
+ url: Optional[str] = None,
+ emoji: Optional[Union[str, Emoji, PartialEmoji]] = None,
+ row: Optional[int] = None
+ ):
+ super().__init__(
+ style=style, label=label, disabled=disabled, custom_id=custom_id, url=url, emoji=emoji, row=row
+ )
+
+ self.help_command = help_command
+ self.command = command
+
+ async def callback(self, interaction: Interaction) -> None:
+ """Edits the help embed to that of the subcommand."""
+ message = interaction.message
+ if not message:
+ return
+
+ subcommand = self.command
+ if isinstance(subcommand, Group):
+ embed, subcommand_view = await self.help_command.format_group_help(subcommand)
+ else:
+ embed, subcommand_view = await self.help_command.command_formatting(subcommand)
+ await message.edit(embed=embed, view=subcommand_view)
+
+
+class GroupButton(ui.Button):
+ """
+ A button shown in a subcommand's help embed.
+
+ The button represents the parent command, and pressing it will edit the help embed to that of the parent.
+ """
+
+ def __init__(
+ self,
+ help_command: CustomHelpCommand,
+ command: Command,
+ *,
+ style: ButtonStyle = ButtonStyle.secondary,
+ label: Optional[str] = None,
+ disabled: bool = False,
+ custom_id: Optional[str] = None,
+ url: Optional[str] = None,
+ emoji: Optional[Union[str, Emoji, PartialEmoji]] = None,
+ row: Optional[int] = None
+ ):
+ super().__init__(
+ style=style, label=label, disabled=disabled, custom_id=custom_id, url=url, emoji=emoji, row=row
+ )
+
+ self.help_command = help_command
+ self.command = command
+
+ async def callback(self, interaction: Interaction) -> None:
+ """Edits the help embed to that of the parent."""
+ message = interaction.message
+ if not message:
+ return
+
+ embed, group_view = await self.help_command.format_group_help(self.command.parent)
+ await message.edit(embed=embed, view=group_view)
+
+
+class CommandView(ui.View):
+ """
+ The view added to any command's help embed.
+
+ If the command has a parent, a button is added to the view to show that parent's help embed.
+ """
+
+ def __init__(self, help_command: CustomHelpCommand, command: Command, context: Context):
+ self.context = context
+ super().__init__()
+
+ if command.parent:
+ self.children.append(GroupButton(help_command, command, emoji="↩️"))
+
+ async def interaction_check(self, interaction: Interaction) -> bool:
+ """
+ Ensures that the button only works for the user who spawned the help command.
+
+ Also allows moderators to access buttons even when not the author of message.
+ """
+ if interaction.user is not None:
+ if any(role.id in constants.MODERATION_ROLES for role in interaction.user.roles):
+ return True
+
+ elif interaction.user.id == self.context.author.id:
+ return True
+
+ return False
+
+
+class GroupView(CommandView):
+ """
+ The view added to a group's help embed.
+
+ The view generates a SubcommandButton for every subcommand the group has.
+ """
+
+ MAX_BUTTONS_IN_ROW = 5
+ MAX_ROWS = 5
+
+ def __init__(self, help_command: CustomHelpCommand, group: Group, subcommands: list[Command], context: Context):
+ super().__init__(help_command, group, context)
+ # Don't add buttons if only a portion of the subcommands can be shown.
+ if len(subcommands) + len(self.children) > self.MAX_ROWS * self.MAX_BUTTONS_IN_ROW:
+ log.trace(f"Attempted to add navigation buttons for `{group.qualified_name}`, but there was no space.")
+ return
+
+ for subcommand in subcommands:
+ self.add_item(SubcommandButton(help_command, subcommand, label=subcommand.name))
+
+
class HelpQueryNotFound(ValueError):
"""
Raised when a HelpSession Query doesn't match a command or cog.
@@ -148,7 +279,7 @@ class CustomHelpCommand(HelpCommand):
await self.context.send(embed=embed)
- async def command_formatting(self, command: Command) -> Embed:
+ async def command_formatting(self, command: Command) -> tuple[Embed, Optional[CommandView]]:
"""
Takes a command and turns it into an embed.
@@ -186,12 +317,14 @@ class CustomHelpCommand(HelpCommand):
command_details += f"*{formatted_doc or 'No details provided.'}*\n"
embed.description = command_details
- return embed
+ # If the help is invoked in the context of an error, don't show subcommand navigation.
+ view = CommandView(self, command, self.context) if not self.context.command_failed else None
+ return embed, view
async def send_command_help(self, command: Command) -> None:
"""Send help for a single command."""
- embed = await self.command_formatting(command)
- message = await self.context.send(embed=embed)
+ embed, view = await self.command_formatting(command)
+ message = await self.context.send(embed=embed, view=view)
await wait_for_deletion(message, (self.context.author.id,))
@staticmethod
@@ -212,25 +345,31 @@ class CustomHelpCommand(HelpCommand):
else:
return "".join(details)
- async def send_group_help(self, group: Group) -> None:
- """Sends help for a group command."""
+ async def format_group_help(self, group: Group) -> tuple[Embed, Optional[CommandView]]:
+ """Formats help for a group command."""
subcommands = group.commands
if len(subcommands) == 0:
# no subcommands, just treat it like a regular command
- await self.send_command_help(group)
- return
+ return await self.command_formatting(group)
# remove commands that the user can't run and are hidden, and sort by name
commands_ = await self.filter_commands(subcommands, sort=True)
- embed = await self.command_formatting(group)
+ embed, _ = await self.command_formatting(group)
command_details = self.get_commands_brief_details(commands_)
if command_details:
embed.description += f"\n**Subcommands:**\n{command_details}"
- message = await self.context.send(embed=embed)
+ # If the help is invoked in the context of an error, don't show subcommand navigation.
+ view = GroupView(self, group, commands_, self.context) if not self.context.command_failed else None
+ return embed, view
+
+ async def send_group_help(self, group: Group) -> None:
+ """Sends help for a group command."""
+ embed, view = await self.format_group_help(group)
+ message = await self.context.send(embed=embed, view=view)
await wait_for_deletion(message, (self.context.author.id,))
async def send_cog_help(self, cog: Cog) -> None:
diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py
index 7f4811a43..e616b9208 100644
--- a/bot/exts/info/information.py
+++ b/bot/exts/info/information.py
@@ -2,11 +2,12 @@ import colorsys
import pprint
import textwrap
from collections import defaultdict
+from textwrap import shorten
from typing import Any, DefaultDict, Mapping, Optional, Tuple, Union
import rapidfuzz
from discord import AllowedMentions, Colour, Embed, Guild, Message, Role
-from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role
+from discord.ext.commands import BucketType, Cog, Context, Greedy, Paginator, command, group, has_any_role
from discord.utils import escape_markdown
from bot import constants
@@ -17,10 +18,10 @@ from bot.decorators import in_whitelist
from bot.errors import NonExistentRoleError
from bot.log import get_logger
from bot.pagination import LinePaginator
+from bot.utils import time
from bot.utils.channel import is_mod_channel, is_staff_channel
from bot.utils.checks import cooldown_with_role_bypass, has_no_roles_check, in_whitelist_check
from bot.utils.members import get_or_fetch_member
-from bot.utils.time import TimestampFormats, discord_timestamp, humanize_delta
log = get_logger(__name__)
@@ -82,7 +83,7 @@ class Information(Cog):
defcon_info = ""
if cog := self.bot.get_cog("Defcon"):
- threshold = humanize_delta(cog.threshold) if cog.threshold else "-"
+ threshold = time.humanize_delta(cog.threshold) if cog.threshold else "-"
defcon_info = f"Defcon threshold: {threshold}\n"
verification = f"Verification level: {ctx.guild.verification_level.name}\n"
@@ -172,13 +173,13 @@ class Information(Cog):
"""Returns an embed full of server information."""
embed = Embed(colour=Colour.og_blurple(), title="Server Information")
- created = discord_timestamp(ctx.guild.created_at, TimestampFormats.RELATIVE)
- region = ctx.guild.region
+ created = time.format_relative(ctx.guild.created_at)
num_roles = len(ctx.guild.roles) - 1 # Exclude @everyone
# Server Features are only useful in certain channels
if ctx.channel.id in (
- *constants.MODERATION_CHANNELS, constants.Channels.dev_core, constants.Channels.dev_contrib
+ *constants.MODERATION_CHANNELS,
+ constants.Channels.dev_core,
):
features = f"\nFeatures: {', '.join(ctx.guild.features)}"
else:
@@ -195,7 +196,6 @@ class Information(Cog):
embed.description = (
f"Created: {created}"
- f"\nVoice region: {region}"
f"{features}"
f"\nRoles: {num_roles}"
f"\nMember status: {member_status}"
@@ -225,7 +225,7 @@ class Information(Cog):
@command(name="user", aliases=["user_info", "member", "member_info", "u"])
async def user_info(self, ctx: Context, user_or_message: Union[MemberOrUser, Message] = None) -> None:
"""Returns info about a user."""
- if isinstance(user_or_message, Message):
+ if passed_as_message := isinstance(user_or_message, Message):
user = user_or_message.author
else:
user = user_or_message
@@ -240,20 +240,23 @@ class Information(Cog):
# Will redirect to #bot-commands if it fails.
if in_whitelist_check(ctx, roles=constants.STAFF_PARTNERS_COMMUNITY_ROLES):
- embed = await self.create_user_embed(ctx, user)
+ embed = await self.create_user_embed(ctx, user, passed_as_message)
await ctx.send(embed=embed)
- async def create_user_embed(self, ctx: Context, user: MemberOrUser) -> Embed:
+ async def create_user_embed(self, ctx: Context, user: MemberOrUser, passed_as_message: bool) -> Embed:
"""Creates an embed containing information on the `user`."""
on_server = bool(await get_or_fetch_member(ctx.guild, user.id))
- created = discord_timestamp(user.created_at, TimestampFormats.RELATIVE)
+ created = time.format_relative(user.created_at)
name = str(user)
if on_server and user.nick:
name = f"{user.nick} ({name})"
name = escape_markdown(name)
+ if passed_as_message:
+ name += " - From Message"
+
if user.public_flags.verified_bot:
name += f" {constants.Emojis.verified_bot}"
elif user.bot:
@@ -267,7 +270,7 @@ class Information(Cog):
if on_server:
if user.joined_at:
- joined = discord_timestamp(user.joined_at, TimestampFormats.RELATIVE)
+ joined = time.format_relative(user.joined_at)
else:
joined = "Unable to get join date"
@@ -280,7 +283,6 @@ class Information(Cog):
membership = textwrap.dedent("\n".join([f"{key}: {value}" for key, value in membership.items()]))
else:
- roles = None
membership = "The user is not a member of the server"
fields = [
@@ -296,11 +298,11 @@ class Information(Cog):
"Member information",
membership
),
+ await self.user_messages(user),
]
# Show more verbose output in moderation channels for infractions and nominations
if is_mod_channel(ctx.channel):
- fields.append(await self.user_messages(user))
fields.append(await self.expanded_user_infraction_counts(user))
fields.append(await self.user_nomination_counts(user))
else:
@@ -418,8 +420,8 @@ class Information(Cog):
if e.status == 404:
activity_output = "No activity"
else:
- activity_output.append(user_activity["total_messages"] or "No messages")
- activity_output.append(user_activity["activity_blocks"] or "No activity")
+ activity_output.append(f"{user_activity['total_messages']:,}" or "No messages")
+ activity_output.append(f"{user_activity['activity_blocks']:,}" or "No activity")
activity_output = "\n".join(
f"{name}: {metric}" for name, metric in zip(["Messages", "Activity blocks"], activity_output)
@@ -468,7 +470,7 @@ class Information(Cog):
If `json` is True, send the information in a copy-pasteable Python format.
"""
- if ctx.author not in message.channel.members:
+ if not message.channel.permissions_for(ctx.author).read_messages:
await ctx.send(":x: You do not have permissions to see the channel this message is in.")
return
@@ -515,6 +517,40 @@ class Information(Cog):
"""Shows information about the raw API response in a copy-pasteable Python format."""
await self.send_raw_content(ctx, message, json=True)
+ @command(aliases=("rule",))
+ async def rules(self, ctx: Context, rules: Greedy[int]) -> None:
+ """Provides a link to all rules or, if specified, displays specific rule(s)."""
+ rules_embed = Embed(title="Rules", color=Colour.og_blurple(), url="https://www.pythondiscord.com/pages/rules")
+
+ if not rules:
+ # Rules were not submitted. Return the default description.
+ rules_embed.description = (
+ "The rules and guidelines that apply to this community can be found on"
+ " our [rules page](https://www.pythondiscord.com/pages/rules). We expect"
+ " all members of the community to have read and understood these."
+ )
+
+ await ctx.send(embed=rules_embed)
+ return
+
+ full_rules = await self.bot.api_client.get("rules", params={"link_format": "md"})
+
+ # Remove duplicates and sort the rule indices
+ rules = sorted(set(rules))
+
+ invalid = ", ".join(str(index) for index in rules if index < 1 or index > len(full_rules))
+
+ if invalid:
+ await ctx.send(shorten(":x: Invalid rule indices: " + invalid, 75, placeholder=" ..."))
+ return
+
+ for rule in rules:
+ self.bot.stats.incr(f"rule_uses.{rule}")
+
+ final_rules = tuple(f"**{pick}.** {full_rules[pick - 1]}" for pick in rules)
+
+ await LinePaginator.paginate(final_rules, ctx, rules_embed, max_lines=3)
+
def setup(bot: Bot) -> None:
"""Load the Information cog."""
diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py
index 259095b50..50c137d0f 100644
--- a/bot/exts/info/pep.py
+++ b/bot/exts/info/pep.py
@@ -15,8 +15,8 @@ from bot.utils.caching import AsyncCache
log = get_logger(__name__)
ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
-BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
-PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master"
+BASE_PEP_URL = "https://peps.python.org/pep-"
+PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=main"
pep_cache = AsyncCache()
@@ -67,7 +67,7 @@ class PythonEnhancementProposals(Cog):
"""Get information embed about PEP 0."""
pep_embed = Embed(
title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
- url="https://www.python.org/dev/peps/"
+ url="https://peps.python.org/"
)
pep_embed.set_thumbnail(url=ICON_URL)
pep_embed.add_field(name="Status", value="Active")
@@ -97,9 +97,12 @@ class PythonEnhancementProposals(Cog):
def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed:
"""Generate PEP embed based on PEP headers data."""
+ # the parsed header can be wrapped to multiple lines, so we need to make sure that is removed
+ # for an example of a pep with this issue, see pep 500
+ title = " ".join(pep_header["Title"].split())
# Assemble the embed
pep_embed = Embed(
- title=f"**PEP {pep_nr} - {pep_header['Title']}**",
+ title=f"**PEP {pep_nr} - {title}**",
description=f"[Link]({BASE_PEP_URL}{pep_nr:04})",
)
diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py
index c3d2e2a3c..dacf7bc12 100644
--- a/bot/exts/info/pypi.py
+++ b/bot/exts/info/pypi.py
@@ -29,7 +29,7 @@ class PyPi(Cog):
def __init__(self, bot: Bot):
self.bot = bot
- @command(name="pypi", aliases=("package", "pack"))
+ @command(name="pypi", aliases=("package", "pack", "pip"))
async def get_package_info(self, ctx: Context, package: str) -> None:
"""Provide information about a specific package from PyPI."""
embed = Embed(title=random.choice(NEGATIVE_REPLIES), colour=Colours.soft_red)
diff --git a/bot/exts/info/python_news.py b/bot/exts/info/python_news.py
index 2fad9d2ab..c5b7183ce 100644
--- a/bot/exts/info/python_news.py
+++ b/bot/exts/info/python_news.py
@@ -14,7 +14,7 @@ from bot.log import get_logger
from bot.utils import scheduling
from bot.utils.webhooks import send_webhook
-PEPS_RSS_URL = "https://www.python.org/dev/peps/peps.rss/"
+PEPS_RSS_URL = "https://peps.python.org/peps.rss"
RECENT_THREADS_TEMPLATE = "https://mail.python.org/archives/list/{name}@python.org/recent-threads"
THREAD_TEMPLATE_URL = "https://mail.python.org/archives/api/list/{name}@python.org/thread/{id}/"
diff --git a/bot/exts/info/resources.py b/bot/exts/info/resources.py
new file mode 100644
index 000000000..e27357484
--- /dev/null
+++ b/bot/exts/info/resources.py
@@ -0,0 +1,70 @@
+import re
+from typing import Optional
+from urllib.parse import quote
+
+from discord import Embed
+from discord.ext import commands
+
+from bot.bot import Bot
+
+REGEX_CONSECUTIVE_NON_LETTERS = r"[^A-Za-z0-9]+"
+RESOURCE_URL = "https://www.pythondiscord.com/resources/"
+
+
+def to_kebabcase(resource_topic: str) -> str:
+ """
+ Convert any string to kebab-case.
+
+ For example, convert
+ "__Favorite FROOT¤#/$?is----LeMON???" to
+ "favorite-froot-is-lemon"
+
+ Code adopted from:
+ https://github.com/python-discord/site/blob/main/pydis_site/apps/resources/templatetags/to_kebabcase.py
+ """
+ # First, make it lowercase, and just remove any apostrophes.
+ # We remove the apostrophes because "wasnt" is better than "wasn-t"
+ resource_topic = resource_topic.casefold()
+ resource_topic = resource_topic.replace("'", '')
+
+ # Now, replace any non-alphanumerics that remains with a dash.
+ # If there are multiple consecutive non-letters, just replace them with a single dash.
+ # my-favorite-class is better than my-favorite------class
+ resource_topic = re.sub(
+ REGEX_CONSECUTIVE_NON_LETTERS,
+ "-",
+ resource_topic,
+ )
+
+ # Now we use strip to get rid of any leading or trailing dashes.
+ resource_topic = resource_topic.strip("-")
+ return resource_topic
+
+
+class Resources(commands.Cog):
+ """Display information about the Python Discord website Resource page."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+
+ @commands.command(name="resources", aliases=("res",))
+ async def resources_command(self, ctx: commands.Context, *, resource_topic: Optional[str]) -> None:
+ """Display information and a link to the Python Discord website Resources page."""
+ url = RESOURCE_URL
+
+ if resource_topic:
+ # Capture everything prior to new line allowing users to add messages below the command then prep for url
+ url = f"{url}?topics={quote(to_kebabcase(resource_topic.splitlines()[0]))}"
+
+ embed = Embed(
+ title="Resources",
+ description=f"The [Resources page]({url}) on our website contains a list "
+ f"of hand-selected learning resources that we "
+ f"regularly recommend to both beginners and experts."
+ )
+ await ctx.send(embed=embed)
+
+
+def setup(bot: Bot) -> None:
+ """Load the Resources cog."""
+ bot.add_cog(Resources(bot))
diff --git a/bot/exts/info/site.py b/bot/exts/info/site.py
deleted file mode 100644
index e8e71558b..000000000
--- a/bot/exts/info/site.py
+++ /dev/null
@@ -1,142 +0,0 @@
-from discord import Colour, Embed
-from discord.ext.commands import Cog, Context, Greedy, group
-
-from bot.bot import Bot
-from bot.constants import URLs
-from bot.log import get_logger
-from bot.pagination import LinePaginator
-
-log = get_logger(__name__)
-
-BASE_URL = f"{URLs.site_schema}{URLs.site}"
-
-
-class Site(Cog):
- """Commands for linking to different parts of the site."""
-
- def __init__(self, bot: Bot):
- self.bot = bot
-
- @group(name="site", aliases=("s",), invoke_without_command=True)
- async def site_group(self, ctx: Context) -> None:
- """Commands for getting info about our website."""
- await ctx.send_help(ctx.command)
-
- @site_group.command(name="home", aliases=("about",), root_aliases=("home",))
- async def site_main(self, ctx: Context) -> None:
- """Info about the website itself."""
- url = f"{URLs.site_schema}{URLs.site}/"
-
- embed = Embed(title="Python Discord website")
- embed.set_footer(text=url)
- embed.colour = Colour.og_blurple()
- embed.description = (
- f"[Our official website]({url}) is an open-source community project "
- "created with Python and Django. It contains information about the server "
- "itself, lets you sign up for upcoming events, has its own wiki, contains "
- "a list of valuable learning resources, and much more."
- )
-
- await ctx.send(embed=embed)
-
- @site_group.command(name="resources", root_aliases=("resources", "resource"))
- async def site_resources(self, ctx: Context) -> None:
- """Info about the site's Resources page."""
- learning_url = f"{BASE_URL}/resources"
-
- embed = Embed(title="Resources")
- embed.set_footer(text=f"{learning_url}")
- embed.colour = Colour.og_blurple()
- embed.description = (
- f"The [Resources page]({learning_url}) on our website contains a "
- "list of hand-selected learning resources that we regularly recommend "
- f"to both beginners and experts."
- )
-
- await ctx.send(embed=embed)
-
- @site_group.command(name="tools", root_aliases=("tools",))
- async def site_tools(self, ctx: Context) -> None:
- """Info about the site's Tools page."""
- tools_url = f"{BASE_URL}/resources/tools"
-
- embed = Embed(title="Tools")
- embed.set_footer(text=f"{tools_url}")
- embed.colour = Colour.og_blurple()
- embed.description = (
- f"The [Tools page]({tools_url}) on our website contains a "
- f"couple of the most popular tools for programming in Python."
- )
-
- await ctx.send(embed=embed)
-
- @site_group.command(name="help")
- async def site_help(self, ctx: Context) -> None:
- """Info about the site's Getting Help page."""
- url = f"{BASE_URL}/pages/guides/pydis-guides/asking-good-questions/"
-
- embed = Embed(title="Asking Good Questions")
- embed.set_footer(text=url)
- embed.colour = Colour.og_blurple()
- embed.description = (
- "Asking the right question about something that's new to you can sometimes be tricky. "
- f"To help with this, we've created a [guide to asking good questions]({url}) on our website. "
- "It contains everything you need to get the very best help from our community."
- )
-
- await ctx.send(embed=embed)
-
- @site_group.command(name="faq", root_aliases=("faq",))
- async def site_faq(self, ctx: Context) -> None:
- """Info about the site's FAQ page."""
- url = f"{BASE_URL}/pages/frequently-asked-questions"
-
- embed = Embed(title="FAQ")
- embed.set_footer(text=url)
- embed.colour = Colour.og_blurple()
- embed.description = (
- "As the largest Python community on Discord, we get hundreds of questions every day. "
- "Many of these questions have been asked before. We've compiled a list of the most "
- "frequently asked questions along with their answers, which can be found on "
- f"our [FAQ page]({url})."
- )
-
- await ctx.send(embed=embed)
-
- @site_group.command(name="rules", aliases=("r", "rule"), root_aliases=("rules", "rule"))
- async def site_rules(self, ctx: Context, rules: Greedy[int]) -> None:
- """Provides a link to all rules or, if specified, displays specific rule(s)."""
- rules_embed = Embed(title='Rules', color=Colour.og_blurple(), url=f'{BASE_URL}/pages/rules')
-
- if not rules:
- # Rules were not submitted. Return the default description.
- rules_embed.description = (
- "The rules and guidelines that apply to this community can be found on"
- f" our [rules page]({BASE_URL}/pages/rules). We expect"
- " all members of the community to have read and understood these."
- )
-
- await ctx.send(embed=rules_embed)
- return
-
- full_rules = await self.bot.api_client.get('rules', params={'link_format': 'md'})
-
- # Remove duplicates and sort the rule indices
- rules = sorted(set(rules))
- invalid = ', '.join(str(index) for index in rules if index < 1 or index > len(full_rules))
-
- if invalid:
- await ctx.send(f":x: Invalid rule indices: {invalid}")
- return
-
- for rule in rules:
- self.bot.stats.incr(f"rule_uses.{rule}")
-
- final_rules = tuple(f"**{pick}.** {full_rules[pick - 1]}" for pick in rules)
-
- await LinePaginator.paginate(final_rules, ctx, rules_embed, max_lines=3)
-
-
-def setup(bot: Bot) -> None:
- """Load the Site cog."""
- bot.add_cog(Site(bot))
diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py
index 8ce25b4e8..e3e7029ca 100644
--- a/bot/exts/info/source.py
+++ b/bot/exts/info/source.py
@@ -8,8 +8,9 @@ from discord.ext import commands
from bot.bot import Bot
from bot.constants import URLs
from bot.converters import SourceConverter
+from bot.exts.info.tags import TagIdentifier
-SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, str, commands.ExtensionNotLoaded]
+SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, TagIdentifier, commands.ExtensionNotLoaded]
class BotSource(commands.Cog):
@@ -41,9 +42,9 @@ class BotSource(commands.Cog):
source_item = inspect.unwrap(source_item.callback)
src = source_item.__code__
filename = src.co_filename
- elif isinstance(source_item, str):
+ elif isinstance(source_item, TagIdentifier):
tags_cog = self.bot.get_cog("Tags")
- filename = tags_cog._cache[source_item]["location"]
+ filename = tags_cog.tags[source_item].file_path
else:
src = type(source_item)
try:
@@ -51,7 +52,7 @@ class BotSource(commands.Cog):
except TypeError:
raise commands.BadArgument("Cannot get source for a dynamically-created object.")
- if not isinstance(source_item, str):
+ if not isinstance(source_item, TagIdentifier):
try:
lines, first_line_no = inspect.getsourcelines(src)
except OSError:
@@ -64,7 +65,7 @@ class BotSource(commands.Cog):
# Handle tag file location differently than others to avoid errors in some cases
if not first_line_no:
- file_location = Path(filename).relative_to("/bot/")
+ file_location = Path(filename).relative_to("bot/")
else:
file_location = Path(filename).relative_to(Path.cwd()).as_posix()
@@ -82,7 +83,7 @@ class BotSource(commands.Cog):
elif isinstance(source_object, commands.Command):
description = source_object.short_doc
title = f"Command: {source_object.qualified_name}"
- elif isinstance(source_object, str):
+ elif isinstance(source_object, TagIdentifier):
title = f"Tag: {source_object}"
description = ""
else:
diff --git a/bot/exts/info/subscribe.py b/bot/exts/info/subscribe.py
new file mode 100644
index 000000000..eff0c13b8
--- /dev/null
+++ b/bot/exts/info/subscribe.py
@@ -0,0 +1,201 @@
+import calendar
+import operator
+import typing as t
+from dataclasses import dataclass
+
+import arrow
+import discord
+from discord.ext import commands
+from discord.interactions import Interaction
+
+from bot import constants
+from bot.bot import Bot
+from bot.decorators import redirect_output
+from bot.log import get_logger
+from bot.utils import members, scheduling
+
+
+@dataclass(frozen=True)
+class AssignableRole:
+ """
+ A role that can be assigned to a user.
+
+ months_available is a tuple that signifies what months the role should be
+ self-assignable, using None for when it should always be available.
+ """
+
+ role_id: int
+ months_available: t.Optional[tuple[int]]
+ name: t.Optional[str] = None # This gets populated within Subscribe.init_cog()
+
+ def is_currently_available(self) -> bool:
+ """Check if the role is available for the current month."""
+ if self.months_available is None:
+ return True
+ return arrow.utcnow().month in self.months_available
+
+ def get_readable_available_months(self) -> str:
+ """Get a readable string of the months the role is available."""
+ if self.months_available is None:
+ return f"{self.name} is always available."
+
+ # Join the months together with comma separators, but use "and" for the final seperator.
+ month_names = [calendar.month_name[month] for month in self.months_available]
+ available_months_str = ", ".join(month_names[:-1]) + f" and {month_names[-1]}"
+ return f"{self.name} can only be assigned during {available_months_str}."
+
+
+ASSIGNABLE_ROLES = (
+ AssignableRole(constants.Roles.announcements, None),
+ AssignableRole(constants.Roles.pyweek_announcements, None),
+ AssignableRole(constants.Roles.lovefest, (1, 2)),
+ AssignableRole(constants.Roles.advent_of_code, (11, 12)),
+)
+
+ITEMS_PER_ROW = 3
+DELETE_MESSAGE_AFTER = 300 # Seconds
+
+log = get_logger(__name__)
+
+
+class RoleButtonView(discord.ui.View):
+ """A list of SingleRoleButtons to show to the member."""
+
+ def __init__(self, member: discord.Member):
+ super().__init__()
+ self.interaction_owner = member
+
+ async def interaction_check(self, interaction: Interaction) -> bool:
+ """Ensure that the user clicking the button is the member who invoked the command."""
+ if interaction.user != self.interaction_owner:
+ await interaction.response.send_message(
+ ":x: This is not your command to react to!",
+ ephemeral=True
+ )
+ return False
+ return True
+
+
+class SingleRoleButton(discord.ui.Button):
+ """A button that adds or removes a role from the member depending on it's current state."""
+
+ ADD_STYLE = discord.ButtonStyle.success
+ REMOVE_STYLE = discord.ButtonStyle.red
+ UNAVAILABLE_STYLE = discord.ButtonStyle.secondary
+ LABEL_FORMAT = "{action} role {role_name}."
+ CUSTOM_ID_FORMAT = "subscribe-{role_id}"
+
+ def __init__(self, role: AssignableRole, assigned: bool, row: int):
+ if role.is_currently_available():
+ style = self.REMOVE_STYLE if assigned else self.ADD_STYLE
+ label = self.LABEL_FORMAT.format(action="Remove" if assigned else "Add", role_name=role.name)
+ else:
+ style = self.UNAVAILABLE_STYLE
+ label = f"🔒 {role.name}"
+
+ super().__init__(
+ style=style,
+ label=label,
+ custom_id=self.CUSTOM_ID_FORMAT.format(role_id=role.role_id),
+ row=row,
+ )
+ self.role = role
+ self.assigned = assigned
+
+ async def callback(self, interaction: Interaction) -> None:
+ """Update the member's role and change button text to reflect current text."""
+ if isinstance(interaction.user, discord.User):
+ log.trace("User %s is not a member", interaction.user)
+ await interaction.message.delete()
+ self.view.stop()
+ return
+
+ if not self.role.is_currently_available():
+ await interaction.response.send_message(self.role.get_readable_available_months(), ephemeral=True)
+ return
+
+ await members.handle_role_change(
+ interaction.user,
+ interaction.user.remove_roles if self.assigned else interaction.user.add_roles,
+ discord.Object(self.role.role_id),
+ )
+
+ self.assigned = not self.assigned
+ await self.update_view(interaction)
+ await interaction.response.send_message(
+ self.LABEL_FORMAT.format(action="Added" if self.assigned else "Removed", role_name=self.role.name),
+ ephemeral=True,
+ )
+
+ async def update_view(self, interaction: Interaction) -> None:
+ """Updates the original interaction message with a new view object with the updated buttons."""
+ self.style = self.REMOVE_STYLE if self.assigned else self.ADD_STYLE
+ self.label = self.LABEL_FORMAT.format(action="Remove" if self.assigned else "Add", role_name=self.role.name)
+ try:
+ await interaction.message.edit(view=self.view)
+ except discord.NotFound:
+ log.debug("Subscribe message for %s removed before buttons could be updated", interaction.user)
+ self.view.stop()
+
+
+class Subscribe(commands.Cog):
+ """Cog to allow user to self-assign & remove the roles present in ASSIGNABLE_ROLES."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.init_task = scheduling.create_task(self.init_cog(), event_loop=self.bot.loop)
+ self.assignable_roles: list[AssignableRole] = []
+ self.guild: discord.Guild = None
+
+ async def init_cog(self) -> None:
+ """Initialise the cog by resolving the role IDs in ASSIGNABLE_ROLES to role names."""
+ await self.bot.wait_until_guild_available()
+
+ self.guild = self.bot.get_guild(constants.Guild.id)
+
+ for role in ASSIGNABLE_ROLES:
+ discord_role = self.guild.get_role(role.role_id)
+ if discord_role is None:
+ log.warning("Could not resolve %d to a role in the guild, skipping.", role.role_id)
+ continue
+ self.assignable_roles.append(
+ AssignableRole(
+ role_id=role.role_id,
+ months_available=role.months_available,
+ name=discord_role.name,
+ )
+ )
+
+ # Sort by role name, then shift unavailable roles to the end of the list
+ self.assignable_roles.sort(key=operator.attrgetter("name"))
+ self.assignable_roles.sort(key=operator.methodcaller("is_currently_available"), reverse=True)
+
+ @commands.cooldown(1, 10, commands.BucketType.member)
+ @commands.command(name="subscribe", aliases=("unsubscribe",))
+ @redirect_output(
+ destination_channel=constants.Channels.bot_commands,
+ bypass_roles=constants.STAFF_PARTNERS_COMMUNITY_ROLES,
+ )
+ async def subscribe_command(self, ctx: commands.Context, *_) -> None: # We don't actually care about the args
+ """Display the member's current state for each role, and allow them to add/remove the roles."""
+ await self.init_task
+
+ button_view = RoleButtonView(ctx.author)
+ author_roles = [role.id for role in ctx.author.roles]
+ for index, role in enumerate(self.assignable_roles):
+ row = index // ITEMS_PER_ROW
+ button_view.add_item(SingleRoleButton(role, role.role_id in author_roles, row))
+
+ await ctx.send(
+ "Click the buttons below to add or remove your roles!",
+ view=button_view,
+ delete_after=DELETE_MESSAGE_AFTER,
+ )
+
+
+def setup(bot: Bot) -> None:
+ """Load the Subscribe cog."""
+ if len(ASSIGNABLE_ROLES) > ITEMS_PER_ROW*5: # Discord limits views to 5 rows of buttons.
+ log.error("Too many roles for 5 rows, not loading the Subscribe cog.")
+ else:
+ bot.add_cog(Subscribe(bot))
diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py
index 842647555..f66237c8e 100644
--- a/bot/exts/info/tags.py
+++ b/bot/exts/info/tags.py
@@ -1,14 +1,18 @@
+from __future__ import annotations
+
+import enum
import re
import time
from pathlib import Path
-from typing import Callable, Dict, Iterable, List, Optional
+from typing import Callable, Iterable, Literal, NamedTuple, Optional, Union
-from discord import Colour, Embed, Member
+import discord
+import frontmatter
+from discord import Embed, Member
from discord.ext.commands import Cog, Context, group
from bot import constants
from bot.bot import Bot
-from bot.converters import TagNameConverter
from bot.log import get_logger
from bot.pagination import LinePaginator
from bot.utils.messages import wait_for_deletion
@@ -24,99 +28,168 @@ REGEX_NON_ALPHABET = re.compile(r"[^a-z]", re.MULTILINE & re.IGNORECASE)
FOOTER_TEXT = f"To show a tag, type {constants.Bot.prefix}tags <tagname>."
+class COOLDOWN(enum.Enum):
+ """Sentinel value to signal that a tag is on cooldown."""
+
+ obj = object()
+
+
+class TagIdentifier(NamedTuple):
+ """Stores the group and name used as an identifier for a tag."""
+
+ group: Optional[str]
+ name: str
+
+ def get_fuzzy_score(self, fuzz_tag_identifier: TagIdentifier) -> float:
+ """Get fuzzy score, using `fuzz_tag_identifier` as the identifier to fuzzy match with."""
+ if (self.group is None) != (fuzz_tag_identifier.group is None):
+ # Ignore tags without groups if the identifier has a group and vice versa
+ return .0
+ if self.group == fuzz_tag_identifier.group:
+ # Completely identical, or both None
+ group_score = 1
+ else:
+ group_score = _fuzzy_search(fuzz_tag_identifier.group, self.group)
+
+ fuzzy_score = group_score * _fuzzy_search(fuzz_tag_identifier.name, self.name) * 100
+ if fuzzy_score:
+ log.trace(f"Fuzzy score {fuzzy_score:=06.2f} for tag {self!r} with fuzz {fuzz_tag_identifier!r}")
+ return fuzzy_score
+
+ def __str__(self) -> str:
+ if self.group is not None:
+ return f"{self.group} {self.name}"
+ else:
+ return self.name
+
+ @classmethod
+ def from_string(cls, string: str) -> TagIdentifier:
+ """Create a `TagIdentifier` instance from the beginning of `string`."""
+ split_string = string.removeprefix(constants.Bot.prefix).split(" ", maxsplit=2)
+ if len(split_string) == 1:
+ return cls(None, split_string[0])
+ else:
+ return cls(split_string[0], split_string[1])
+
+
+class Tag:
+ """Provide an interface to a tag from resources with `file_content`."""
+
+ def __init__(self, content_path: Path):
+ post = frontmatter.loads(content_path.read_text("utf8"))
+ self.file_path = content_path
+ self.content = post.content
+ self.metadata = post.metadata
+ self._restricted_to: set[int] = set(self.metadata.get("restricted_to", ()))
+ self._cooldowns: dict[discord.TextChannel, float] = {}
+
+ @property
+ def embed(self) -> Embed:
+ """Create an embed for the tag."""
+ embed = Embed.from_dict(self.metadata.get("embed", {}))
+ embed.description = self.content
+ return embed
+
+ def accessible_by(self, member: discord.Member) -> bool:
+ """Check whether `member` can access the tag."""
+ return bool(
+ not self._restricted_to
+ or self._restricted_to & {role.id for role in member.roles}
+ )
+
+ def on_cooldown_in(self, channel: discord.TextChannel) -> bool:
+ """Check whether the tag is on cooldown in `channel`."""
+ return self._cooldowns.get(channel, float("-inf")) > time.time()
+
+ def set_cooldown_for(self, channel: discord.TextChannel) -> None:
+ """Set the tag to be on cooldown in `channel` for `constants.Cooldowns.tags` seconds."""
+ self._cooldowns[channel] = time.time() + constants.Cooldowns.tags
+
+
+def _fuzzy_search(search: str, target: str) -> float:
+ """A simple scoring algorithm based on how many letters are found / total, with order in mind."""
+ _search = REGEX_NON_ALPHABET.sub("", search.lower())
+ if not _search:
+ return 0
+
+ _targets = iter(REGEX_NON_ALPHABET.split(target.lower()))
+
+ current = 0
+ for _target in _targets:
+ index = 0
+ try:
+ while index < len(_target) and _search[current] == _target[index]:
+ current += 1
+ index += 1
+ except IndexError:
+ # Exit when _search runs out
+ break
+
+ return current / len(_search)
+
+
class Tags(Cog):
- """Save new tags and fetch existing tags."""
+ """Fetch tags by name or content."""
+
+ PAGINATOR_DEFAULTS = dict(max_lines=15, empty=False, footer_text=FOOTER_TEXT)
def __init__(self, bot: Bot):
self.bot = bot
- self.tag_cooldowns = {}
- self._cache = self.get_tags()
-
- @staticmethod
- def get_tags() -> dict:
- """Get all tags."""
- cache = {}
+ self.tags: dict[TagIdentifier, Tag] = {}
+ self.initialize_tags()
+ def initialize_tags(self) -> None:
+ """Load all tags from resources into `self.tags`."""
base_path = Path("bot", "resources", "tags")
+
for file in base_path.glob("**/*"):
if file.is_file():
- tag_title = file.stem
- tag = {
- "title": tag_title,
- "embed": {
- "description": file.read_text(encoding="utf8"),
- },
- "restricted_to": None,
- "location": f"/bot/{file}"
- }
-
- # Convert to a list to allow negative indexing.
- parents = list(file.relative_to(base_path).parents)
- if len(parents) > 1:
- # -1 would be '.' hence -2 is used as the index.
- tag["restricted_to"] = parents[-2].name
-
- cache[tag_title] = tag
-
- return cache
-
- @staticmethod
- def check_accessibility(user: Member, tag: dict) -> bool:
- """Check if user can access a tag."""
- return not tag["restricted_to"] or tag["restricted_to"].lower() in [role.name.lower() for role in user.roles]
-
- @staticmethod
- def _fuzzy_search(search: str, target: str) -> float:
- """A simple scoring algorithm based on how many letters are found / total, with order in mind."""
- current, index = 0, 0
- _search = REGEX_NON_ALPHABET.sub('', search.lower())
- _targets = iter(REGEX_NON_ALPHABET.split(target.lower()))
- _target = next(_targets)
- try:
- while True:
- while index < len(_target) and _search[current] == _target[index]:
- current += 1
- index += 1
- index, _target = 0, next(_targets)
- except (StopIteration, IndexError):
- pass
- return current / len(_search) * 100
-
- def _get_suggestions(self, tag_name: str, thresholds: Optional[List[int]] = None) -> List[str]:
- """Return a list of suggested tags."""
- scores: Dict[str, int] = {
- tag_title: Tags._fuzzy_search(tag_name, tag['title'])
- for tag_title, tag in self._cache.items()
- }
-
- thresholds = thresholds or [100, 90, 80, 70, 60]
-
- for threshold in thresholds:
+ parent_dir = file.relative_to(base_path).parent
+ tag_name = file.stem
+ # Files directly under `base_path` have an empty string as the parent directory name
+ tag_group = parent_dir.name or None
+
+ self.tags[TagIdentifier(tag_group, tag_name)] = Tag(file)
+
+ def _get_suggestions(self, tag_identifier: TagIdentifier) -> list[tuple[TagIdentifier, Tag]]:
+ """Return a list of suggested tags for `tag_identifier`."""
+ for threshold in [100, 90, 80, 70, 60]:
suggestions = [
- self._cache[tag_title]
- for tag_title, matching_score in scores.items()
- if matching_score >= threshold
+ (identifier, tag)
+ for identifier, tag in self.tags.items()
+ if identifier.get_fuzzy_score(tag_identifier) >= threshold
]
if suggestions:
return suggestions
return []
- def _get_tag(self, tag_name: str) -> list:
- """Get a specific tag."""
- found = [self._cache.get(tag_name.lower(), None)]
- if not found[0]:
- return self._get_suggestions(tag_name)
- return found
+ def get_fuzzy_matches(self, tag_identifier: TagIdentifier) -> list[tuple[TagIdentifier, Tag]]:
+ """Get tags with identifiers similar to `tag_identifier`."""
+ suggestions = []
+
+ if tag_identifier.group is not None and len(tag_identifier.group) >= 2:
+ # Try fuzzy matching with only a name first
+ suggestions += self._get_suggestions(TagIdentifier(None, tag_identifier.group))
+
+ if len(tag_identifier.name) >= 2:
+ suggestions += self._get_suggestions(tag_identifier)
- def _get_tags_via_content(self, check: Callable[[Iterable], bool], keywords: str, user: Member) -> list:
+ return suggestions
+
+ def _get_tags_via_content(
+ self,
+ check: Callable[[Iterable], bool],
+ keywords: str,
+ user: Member,
+ ) -> list[tuple[TagIdentifier, Tag]]:
"""
Search for tags via contents.
`predicate` will be the built-in any, all, or a custom callable. Must return a bool.
"""
- keywords_processed: List[str] = []
- for keyword in keywords.split(','):
+ keywords_processed = []
+ for keyword in keywords.split(","):
keyword_sanitized = keyword.strip().casefold()
if not keyword_sanitized:
# this happens when there are leading / trailing / consecutive comma.
@@ -124,45 +197,48 @@ class Tags(Cog):
keywords_processed.append(keyword_sanitized)
if not keywords_processed:
- # after sanitizing, we can end up with an empty list, for example when keywords is ','
+ # after sanitizing, we can end up with an empty list, for example when keywords is ","
# in that case, we simply want to search for such keywords directly instead.
keywords_processed = [keywords]
matching_tags = []
- for tag in self._cache.values():
- matches = (query in tag['embed']['description'].casefold() for query in keywords_processed)
- if self.check_accessibility(user, tag) and check(matches):
- matching_tags.append(tag)
+ for identifier, tag in self.tags.items():
+ matches = (query in tag.content.casefold() for query in keywords_processed)
+ if tag.accessible_by(user) and check(matches):
+ matching_tags.append((identifier, tag))
return matching_tags
- async def _send_matching_tags(self, ctx: Context, keywords: str, matching_tags: list) -> None:
+ async def _send_matching_tags(
+ self,
+ ctx: Context,
+ keywords: str,
+ matching_tags: list[tuple[TagIdentifier, Tag]],
+ ) -> None:
"""Send the result of matching tags to user."""
- if not matching_tags:
- pass
- elif len(matching_tags) == 1:
- await ctx.send(embed=Embed().from_dict(matching_tags[0]['embed']))
- else:
- is_plural = keywords.strip().count(' ') > 0 or keywords.strip().count(',') > 0
+ if len(matching_tags) == 1:
+ await ctx.send(embed=matching_tags[0][1].embed)
+ elif matching_tags:
+ is_plural = keywords.strip().count(" ") > 0 or keywords.strip().count(",") > 0
embed = Embed(
title=f"Here are the tags containing the given keyword{'s' * is_plural}:",
- description='\n'.join(tag['title'] for tag in matching_tags[:10])
)
await LinePaginator.paginate(
- sorted(f"**»** {tag['title']}" for tag in matching_tags),
+ sorted(
+ f"**\N{RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK}** {identifier.name}"
+ for identifier, _ in matching_tags
+ ),
ctx,
embed,
- footer_text=FOOTER_TEXT,
- empty=False,
- max_lines=15
+ **self.PAGINATOR_DEFAULTS,
)
- @group(name='tags', aliases=('tag', 't'), invoke_without_command=True)
- async def tags_group(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None:
+ @group(name="tags", aliases=("tag", "t"), invoke_without_command=True, usage="[tag_group] [tag_name]")
+ async def tags_group(self, ctx: Context, *, argument_string: Optional[str]) -> None:
"""Show all known tags, a single tag, or run a subcommand."""
- await self.get_command(ctx, tag_name=tag_name)
+ await self.get_command(ctx, argument_string=argument_string)
- @tags_group.group(name='search', invoke_without_command=True)
+ @tags_group.group(name="search", invoke_without_command=True)
async def search_tag_content(self, ctx: Context, *, keywords: str) -> None:
"""
Search inside tags' contents for tags. Allow searching for multiple keywords separated by comma.
@@ -172,123 +248,151 @@ class Tags(Cog):
matching_tags = self._get_tags_via_content(all, keywords, ctx.author)
await self._send_matching_tags(ctx, keywords, matching_tags)
- @search_tag_content.command(name='any')
- async def search_tag_content_any_keyword(self, ctx: Context, *, keywords: Optional[str] = 'any') -> None:
+ @search_tag_content.command(name="any")
+ async def search_tag_content_any_keyword(self, ctx: Context, *, keywords: Optional[str] = "any") -> None:
"""
Search inside tags' contents for tags. Allow searching for multiple keywords separated by comma.
Search for tags that has ANY of the keywords.
"""
- matching_tags = self._get_tags_via_content(any, keywords or 'any', ctx.author)
+ matching_tags = self._get_tags_via_content(any, keywords or "any", ctx.author)
await self._send_matching_tags(ctx, keywords, matching_tags)
- async def display_tag(self, ctx: Context, tag_name: str = None) -> bool:
+ async def get_tag_embed(
+ self,
+ ctx: Context,
+ tag_identifier: TagIdentifier,
+ ) -> Optional[Union[Embed, Literal[COOLDOWN.obj]]]:
"""
- If a tag is not found, display similar tag names as suggestions.
-
- If a tag is not specified, display a paginated embed of all tags.
+ Generate an embed of the requested tag or of suggestions if the tag doesn't exist/isn't accessible by the user.
- Tags are on cooldowns on a per-tag, per-channel basis. If a tag is on cooldown, display
- nothing and return True.
+ If the requested tag is on cooldown return `COOLDOWN.obj`, otherwise if no suggestions were found return None.
"""
- def _command_on_cooldown(tag_name: str) -> bool:
- """
- Check if the command is currently on cooldown, on a per-tag, per-channel basis.
-
- The cooldown duration is set in constants.py.
- """
- now = time.time()
-
- cooldown_conditions = (
- tag_name
- and tag_name in self.tag_cooldowns
- and (now - self.tag_cooldowns[tag_name]["time"]) < constants.Cooldowns.tags
- and self.tag_cooldowns[tag_name]["channel"] == ctx.channel.id
+ filtered_tags = [
+ (ident, tag) for ident, tag in
+ self.get_fuzzy_matches(tag_identifier)[:10]
+ if tag.accessible_by(ctx.author)
+ ]
+
+ tag = self.tags.get(tag_identifier)
+
+ if tag is None and tag_identifier.group is not None:
+ # Try exact match with only the name
+ tag = self.tags.get(TagIdentifier(None, tag_identifier.group))
+
+ if tag is None and len(filtered_tags) == 1:
+ tag_identifier = filtered_tags[0][0]
+ tag = filtered_tags[0][1]
+
+ if tag is not None:
+ if tag.on_cooldown_in(ctx.channel):
+ log.debug(f"Tag {str(tag_identifier)!r} is on cooldown.")
+ return COOLDOWN.obj
+ tag.set_cooldown_for(ctx.channel)
+
+ self.bot.stats.incr(
+ f"tags.usages"
+ f"{'.' + tag_identifier.group.replace('-', '_') if tag_identifier.group else ''}"
+ f".{tag_identifier.name.replace('-', '_')}"
)
+ return tag.embed
- if cooldown_conditions:
- return True
- return False
-
- if _command_on_cooldown(tag_name):
- time_elapsed = time.time() - self.tag_cooldowns[tag_name]["time"]
- time_left = constants.Cooldowns.tags - time_elapsed
- log.info(
- f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. "
- f"Cooldown ends in {time_left:.1f} seconds."
+ else:
+ if not filtered_tags:
+ return None
+ suggested_tags_text = "\n".join(
+ f"**\N{RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK}** {identifier}"
+ for identifier, tag in filtered_tags
+ if not tag.on_cooldown_in(ctx.channel)
+ )
+ return Embed(
+ title="Did you mean ...",
+ description=suggested_tags_text
)
- return True
-
- if tag_name is not None:
- temp_founds = self._get_tag(tag_name)
-
- founds = []
-
- for found_tag in temp_founds:
- if self.check_accessibility(ctx.author, found_tag):
- founds.append(found_tag)
- if len(founds) == 1:
- tag = founds[0]
- if ctx.channel.id not in TEST_CHANNELS:
- self.tag_cooldowns[tag_name] = {
- "time": time.time(),
- "channel": ctx.channel.id
- }
+ def accessible_tags(self, user: Member) -> list[str]:
+ """Return a formatted list of tags that are accessible by `user`; groups first, and alphabetically sorted."""
+ def tag_sort_key(tag_item: tuple[TagIdentifier, Tag]) -> str:
+ group, name = tag_item[0]
+ if group is None:
+ # Max codepoint character to force tags without a group to the end
+ group = chr(0x10ffff)
+
+ return group + name
+
+ result_lines = []
+ current_group = ""
+ group_accessible = True
+
+ for identifier, tag in sorted(self.tags.items(), key=tag_sort_key):
+
+ if identifier.group != current_group:
+ if not group_accessible:
+ # Remove group separator line if no tags in the previous group were accessible by the user.
+ result_lines.pop()
+ # A new group began, add a separator with the group name.
+ current_group = identifier.group
+ if current_group is not None:
+ group_accessible = False
+ result_lines.append(f"\n\N{BULLET} **{current_group}**")
+ else:
+ result_lines.append("\n\N{BULLET}")
+
+ if tag.accessible_by(user):
+ result_lines.append(f"**\N{RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK}** {identifier.name}")
+ group_accessible = True
+
+ return result_lines
+
+ def accessible_tags_in_group(self, group: str, user: discord.Member) -> list[str]:
+ """Return a formatted list of tags in `group`, that are accessible by `user`."""
+ return sorted(
+ f"**\N{RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK}** {identifier}"
+ for identifier, tag in self.tags.items()
+ if identifier.group == group and tag.accessible_by(user)
+ )
+
+ @tags_group.command(name="get", aliases=("show", "g"), usage="[tag_group] [tag_name]")
+ async def get_command(self, ctx: Context, *, argument_string: Optional[str]) -> bool:
+ """
+ If a single argument matching a group name is given, list all accessible tags from that group
+ Otherwise display the tag if one was found for the given arguments, or try to display suggestions for that name.
- self.bot.stats.incr(f"tags.usages.{tag['title'].replace('-', '_')}")
+ With no arguments, list all accessible tags.
- await wait_for_deletion(
- await ctx.send(embed=Embed.from_dict(tag['embed'])),
- [ctx.author.id],
- )
- return True
- elif founds and len(tag_name) >= 3:
- await wait_for_deletion(
- await ctx.send(
- embed=Embed(
- title='Did you mean ...',
- description='\n'.join(tag['title'] for tag in founds[:10])
- )
- ),
- [ctx.author.id],
+ Returns True if a message was sent, or if the tag is on cooldown.
+ Returns False if no message was sent.
+ """ # noqa: D205, D415
+ if not argument_string:
+ if self.tags:
+ await LinePaginator.paginate(
+ self.accessible_tags(ctx.author), ctx, Embed(title="Available tags"), **self.PAGINATOR_DEFAULTS
)
- return True
-
- else:
- tags = self._cache.values()
- if not tags:
- await ctx.send(embed=Embed(
- description="**There are no tags in the database!**",
- colour=Colour.red()
- ))
- return True
else:
- embed: Embed = Embed(title="**Current tags**")
+ await ctx.send(embed=Embed(description="**There are no tags!**"))
+ return True
+
+ identifier = TagIdentifier.from_string(argument_string)
+
+ if identifier.group is None:
+ # Try to find accessible tags from a group matching the identifier's name.
+ if group_tags := self.accessible_tags_in_group(identifier.name, ctx.author):
await LinePaginator.paginate(
- sorted(
- f"**»** {tag['title']}" for tag in tags
- if self.check_accessibility(ctx.author, tag)
- ),
- ctx,
- embed,
- footer_text=FOOTER_TEXT,
- empty=False,
- max_lines=15
+ group_tags, ctx, Embed(title=f"Tags under *{identifier.name}*"), **self.PAGINATOR_DEFAULTS
)
return True
- return False
-
- @tags_group.command(name='get', aliases=('show', 'g'))
- async def get_command(self, ctx: Context, *, tag_name: TagNameConverter = None) -> bool:
- """
- Get a specified tag, or a list of all tags if no tag is specified.
+ embed = await self.get_tag_embed(ctx, identifier)
+ if embed is None:
+ return False
- Returns True if something can be sent, or if the tag is on cooldown.
- Returns False if no matches are found.
- """
- return await self.display_tag(ctx, tag_name)
+ if embed is not COOLDOWN.obj:
+ await wait_for_deletion(
+ await ctx.send(embed=embed),
+ (ctx.author.id,)
+ )
+ # A valid tag was found and was either sent, or is on cooldown
+ return True
def setup(bot: Bot) -> None:
diff --git a/bot/exts/moderation/clean.py b/bot/exts/moderation/clean.py
index 94494b983..cb6836258 100644
--- a/bot/exts/moderation/clean.py
+++ b/bot/exts/moderation/clean.py
@@ -1,12 +1,11 @@
import contextlib
-import logging
import re
import time
from collections import defaultdict
from contextlib import suppress
from datetime import datetime
-from itertools import islice
-from typing import Any, Callable, Iterable, Literal, Optional, TYPE_CHECKING, Union
+from itertools import takewhile
+from typing import Callable, Iterable, Literal, Optional, TYPE_CHECKING, Union
from discord import Colour, Message, NotFound, TextChannel, User, errors
from discord.ext.commands import Cog, Context, Converter, Greedy, group, has_any_role
@@ -17,12 +16,11 @@ from bot.bot import Bot
from bot.constants import Channels, CleanMessages, Colours, Emojis, Event, Icons, MODERATION_ROLES
from bot.converters import Age, ISODateTime
from bot.exts.moderation.modlog import ModLog
+from bot.log import get_logger
from bot.utils.channel import is_mod_channel
-log = logging.getLogger(__name__)
+log = get_logger(__name__)
-# Default number of messages to look at in each channel.
-DEFAULT_TRAVERSE = 10
# Number of seconds before command invocations and responses are deleted in non-moderation channels.
MESSAGE_DELETE_DELAY = 5
@@ -33,12 +31,12 @@ CleanLimit = Union[Message, Age, ISODateTime]
class CleanChannels(Converter):
- """A converter that turns the given string to a list of channels to clean, or the literal `*` for all channels."""
+ """A converter to turn the string into a list of channels to clean, or the literal `*` for all public channels."""
_channel_converter = TextChannelConverter()
async def convert(self, ctx: Context, argument: str) -> Union[Literal["*"], list[TextChannel]]:
- """Converts a string to a list of channels to clean, or the literal `*` for all channels."""
+ """Converts a string to a list of channels to clean, or the literal `*` for all public channels."""
if argument == "*":
return "*"
return [await self._channel_converter.convert(ctx, channel) for channel in argument.split()]
@@ -87,7 +85,6 @@ class Clean(Cog):
@staticmethod
def _validate_input(
- traverse: int,
channels: Optional[CleanChannels],
bots_only: bool,
users: Optional[list[User]],
@@ -95,9 +92,9 @@ class Clean(Cog):
second_limit: Optional[CleanLimit],
) -> None:
"""Raise errors if an argument value or a combination of values is invalid."""
- # Is this an acceptable amount of messages to traverse?
- if traverse > CleanMessages.message_limit:
- raise BadArgument(f"Cannot traverse more than {CleanMessages.message_limit} messages.")
+ if first_limit is None:
+ # This is an optional argument for the sake of the master command, but it's actually required.
+ raise BadArgument("Missing cleaning limit.")
if (isinstance(first_limit, Message) or isinstance(second_limit, Message)) and channels:
raise BadArgument("Both a message limit and channels specified.")
@@ -110,10 +107,6 @@ class Clean(Cog):
if users and bots_only:
raise BadArgument("Marked as bots only, but users were specified.")
- # This is an implementation error rather than user error.
- if second_limit and not first_limit:
- raise ValueError("Second limit specified without the first.")
-
@staticmethod
async def _send_expiring_message(ctx: Context, content: str) -> None:
"""Send `content` to the context channel. Automatically delete if it's not a mod channel."""
@@ -121,12 +114,39 @@ class Clean(Cog):
await ctx.send(content, delete_after=delete_after)
@staticmethod
+ def _channels_set(
+ channels: CleanChannels, ctx: Context, first_limit: CleanLimit, second_limit: CleanLimit
+ ) -> set[TextChannel]:
+ """Standardize the input `channels` argument to a usable set of text channels."""
+ # Default to using the invoking context's channel or the channel of the message limit(s).
+ if not channels:
+ # Input was validated - if first_limit is a message, second_limit won't point at a different channel.
+ if isinstance(first_limit, Message):
+ channels = {first_limit.channel}
+ elif isinstance(second_limit, Message):
+ channels = {second_limit.channel}
+ else:
+ channels = {ctx.channel}
+ else:
+ if channels == "*":
+ channels = {
+ channel for channel in ctx.guild.channels
+ if isinstance(channel, TextChannel)
+ # Assume that non-public channels are not needed to optimize for speed.
+ and channel.permissions_for(ctx.guild.default_role).view_channel
+ }
+ else:
+ channels = set(channels)
+
+ return channels
+
+ @staticmethod
def _build_predicate(
+ first_limit: datetime,
+ second_limit: Optional[datetime] = None,
bots_only: bool = False,
users: Optional[list[User]] = None,
regex: Optional[re.Pattern] = None,
- first_limit: Optional[datetime] = None,
- second_limit: Optional[datetime] = None,
) -> Predicate:
"""Return the predicate that decides whether to delete a given message."""
def predicate_bots_only(message: Message) -> bool:
@@ -167,20 +187,18 @@ class Clean(Cog):
predicates = []
# Set up the correct predicate
+ if second_limit:
+ predicates.append(predicate_range) # Delete messages in the specified age range
+ else:
+ predicates.append(predicate_after) # Delete messages older than the specified age
+
if bots_only:
predicates.append(predicate_bots_only) # Delete messages from bots
if users:
predicates.append(predicate_specific_users) # Delete messages from specific user
if regex:
predicates.append(predicate_regex) # Delete messages that match regex
- # Add up to one of the following:
- if second_limit:
- predicates.append(predicate_range) # Delete messages in the specified age range
- elif first_limit:
- predicates.append(predicate_after) # Delete messages older than specific message
- if not predicates:
- return lambda m: True
if len(predicates) == 1:
return predicates[0]
return lambda m: all(pred(m) for pred in predicates)
@@ -195,16 +213,25 @@ class Clean(Cog):
# Invocation message has already been deleted
log.info("Tried to delete invocation message, but it was already deleted.")
- def _get_messages_from_cache(self, traverse: int, to_delete: Predicate) -> tuple[defaultdict[Any, list], list[int]]:
+ def _use_cache(self, limit: datetime) -> bool:
+ """Tell whether all messages to be cleaned can be found in the cache."""
+ return self.bot.cached_messages[0].created_at <= limit
+
+ def _get_messages_from_cache(
+ self,
+ channels: set[TextChannel],
+ to_delete: Predicate,
+ lower_limit: datetime
+ ) -> tuple[defaultdict[TextChannel, list], list[int]]:
"""Helper function for getting messages from the cache."""
message_mappings = defaultdict(list)
message_ids = []
- for message in islice(self.bot.cached_messages, traverse):
+ for message in takewhile(lambda m: m.created_at > lower_limit, reversed(self.bot.cached_messages)):
if not self.cleaning:
# Cleaning was canceled
return message_mappings, message_ids
- if to_delete(message):
+ if message.channel in channels and to_delete(message):
message_mappings[message.channel].append(message)
message_ids.append(message.id)
@@ -212,17 +239,16 @@ class Clean(Cog):
async def _get_messages_from_channels(
self,
- traverse: int,
channels: Iterable[TextChannel],
to_delete: Predicate,
- before: Optional[datetime] = None,
+ before: datetime,
after: Optional[datetime] = None
- ) -> tuple[defaultdict[Any, list], list]:
+ ) -> tuple[defaultdict[TextChannel, list], list]:
message_mappings = defaultdict(list)
message_ids = []
for channel in channels:
- async for message in channel.history(limit=traverse, before=before, after=after):
+ async for message in channel.history(limit=CleanMessages.message_limit, before=before, after=after):
if not self.cleaning:
# Cleaning was canceled, return empty containers.
@@ -293,7 +319,8 @@ class Clean(Cog):
return deleted
if len(to_delete) > 0:
# Deleting any leftover messages if there are any
- await channel.delete_messages(to_delete)
+ with suppress(NotFound):
+ await channel.delete_messages(to_delete)
deleted.extend(to_delete)
if not self.cleaning:
@@ -304,12 +331,17 @@ class Clean(Cog):
return deleted
- async def _modlog_cleaned_messages(self, messages: list[Message], channels: CleanChannels, ctx: Context) -> bool:
- """Log the deleted messages to the modlog. Return True if logging was successful."""
+ async def _modlog_cleaned_messages(
+ self,
+ messages: list[Message],
+ channels: CleanChannels,
+ ctx: Context
+ ) -> Optional[str]:
+ """Log the deleted messages to the modlog, returning the log url if logging was successful."""
if not messages:
# Can't build an embed, nothing to clean!
await self._send_expiring_message(ctx, ":x: No matching messages could be found.")
- return False
+ return None
# Reverse the list to have reverse chronological order
log_messages = reversed(messages)
@@ -317,7 +349,7 @@ class Clean(Cog):
# Build the embed and send it
if channels == "*":
- target_channels = "all channels"
+ target_channels = "all public channels"
else:
target_channels = ", ".join(channel.mention for channel in channels)
@@ -335,42 +367,33 @@ class Clean(Cog):
channel_id=Channels.mod_log,
)
- return True
+ return log_url
# endregion
async def _clean_messages(
self,
ctx: Context,
- traverse: int,
channels: Optional[CleanChannels],
bots_only: bool = False,
users: Optional[list[User]] = None,
regex: Optional[re.Pattern] = None,
first_limit: Optional[CleanLimit] = None,
second_limit: Optional[CleanLimit] = None,
- use_cache: Optional[bool] = True
- ) -> None:
- """A helper function that does the actual message cleaning."""
- self._validate_input(traverse, channels, bots_only, users, first_limit, second_limit)
+ attempt_delete_invocation: bool = True,
+ ) -> Optional[str]:
+ """A helper function that does the actual message cleaning, returns the log url if logging was successful."""
+ self._validate_input(channels, bots_only, users, first_limit, second_limit)
# Are we already performing a clean?
if self.cleaning:
await self._send_expiring_message(
ctx, ":x: Please wait for the currently ongoing clean operation to complete."
)
- return
+ return None
self.cleaning = True
- # Default to using the invoking context's channel or the channel of the message limit(s).
- if not channels:
- # Input was validated - if first_limit is a message, second_limit won't point at a different channel.
- if isinstance(first_limit, Message):
- channels = [first_limit.channel]
- elif isinstance(second_limit, Message):
- channels = [second_limit.channel]
- else:
- channels = [ctx.channel]
+ deletion_channels = self._channels_set(channels, ctx, first_limit, second_limit)
if isinstance(first_limit, Message):
first_limit = first_limit.created_at
@@ -380,19 +403,20 @@ class Clean(Cog):
first_limit, second_limit = sorted([first_limit, second_limit])
# Needs to be called after standardizing the input.
- predicate = self._build_predicate(bots_only, users, regex, first_limit, second_limit)
+ predicate = self._build_predicate(first_limit, second_limit, bots_only, users, regex)
- # Delete the invocation first
- await self._delete_invocation(ctx)
+ if attempt_delete_invocation:
+ # Delete the invocation first
+ await self._delete_invocation(ctx)
- if channels == "*" and use_cache:
- message_mappings, message_ids = self._get_messages_from_cache(traverse=traverse, to_delete=predicate)
+ if self._use_cache(first_limit):
+ log.trace(f"Messages for cleaning by {ctx.author.id} will be searched in the cache.")
+ message_mappings, message_ids = self._get_messages_from_cache(
+ channels=deletion_channels, to_delete=predicate, lower_limit=first_limit
+ )
else:
- deletion_channels = channels
- if channels == "*":
- deletion_channels = [channel for channel in ctx.guild.channels if isinstance(channel, TextChannel)]
+ log.trace(f"Messages for cleaning by {ctx.author.id} will be searched in channel histories.")
message_mappings, message_ids = await self._get_messages_from_channels(
- traverse=traverse,
channels=deletion_channels,
to_delete=predicate,
before=second_limit,
@@ -401,18 +425,27 @@ class Clean(Cog):
if not self.cleaning:
# Means that the cleaning was canceled
- return
+ return None
# Now let's delete the actual messages with purge.
self.mod_log.ignore(Event.message_delete, *message_ids)
deleted_messages = await self._delete_found(message_mappings)
self.cleaning = False
- logged = await self._modlog_cleaned_messages(deleted_messages, channels, ctx)
+ if not channels:
+ channels = deletion_channels
+ log_url = await self._modlog_cleaned_messages(deleted_messages, channels, ctx)
- if logged and is_mod_channel(ctx.channel):
- with suppress(NotFound): # Can happen if the invoker deleted their own messages.
- await ctx.message.add_reaction(Emojis.check_mark)
+ success_message = (
+ f"{Emojis.ok_hand} Deleted {len(deleted_messages)} messages. "
+ f"A log of the deleted messages can be found here {log_url}."
+ )
+ if log_url and is_mod_channel(ctx.channel):
+ await ctx.reply(success_message)
+ elif log_url:
+ if mods := self.bot.get_channel(Channels.mods):
+ await mods.send(f"{ctx.author.mention} {success_message}")
+ return log_url
# region: Commands
@@ -421,12 +454,10 @@ class Clean(Cog):
self,
ctx: Context,
users: Greedy[User] = None,
- traverse: Optional[int] = None,
first_limit: Optional[CleanLimit] = None,
second_limit: Optional[CleanLimit] = None,
- use_cache: Optional[bool] = None,
- bots_only: Optional[bool] = False,
regex: Optional[Regex] = None,
+ bots_only: Optional[bool] = False,
*,
channels: CleanChannels = None # "Optional" with discord.py silently ignores incorrect input.
) -> None:
@@ -436,91 +467,74 @@ class Clean(Cog):
If arguments are provided, will act as a master command from which all subcommands can be derived.
\u2003• `users`: A series of user mentions, ID's, or names.
- \u2003• `traverse`: The number of messages to look at in each channel. If using the cache, will look at the
- first `traverse` messages in the cache.
\u2003• `first_limit` and `second_limit`: A message, a duration delta, or an ISO datetime.
+ At least one limit is required.
If a message is provided, cleaning will happen in that channel, and channels cannot be provided.
- If a limit is provided, multiple channels cannot be provided.
If only one of them is provided, acts as `clean until`. If both are provided, acts as `clean between`.
- \u2003• `use_cache`: Whether to use the message cache.
- If not provided, will default to False unless an asterisk is used for the channels.
- \u2003• `bots_only`: Whether to delete only bots. If specified, users cannot be specified.
\u2003• `regex`: A regex pattern the message must contain to be deleted.
The pattern must be provided enclosed in backticks.
If the pattern contains spaces, it still needs to be enclosed in double quotes on top of that.
- \u2003• `channels`: A series of channels to delete in, or an asterisk to delete from all channels.
+ \u2003• `bots_only`: Whether to delete only bots. If specified, users cannot be specified.
+ \u2003• `channels`: A series of channels to delete in, or an asterisk to delete from all public channels.
"""
- if not any([traverse, users, first_limit, second_limit, regex, channels]):
+ if not any([users, first_limit, second_limit, regex, channels]):
await ctx.send_help(ctx.command)
return
- if not traverse:
- if first_limit:
- traverse = CleanMessages.message_limit
- else:
- traverse = DEFAULT_TRAVERSE
- if use_cache is None:
- use_cache = channels == "*"
-
- await self._clean_messages(
- ctx, traverse, channels, bots_only, users, regex, first_limit, second_limit, use_cache
- )
+ await self._clean_messages(ctx, channels, bots_only, users, regex, first_limit, second_limit)
@clean_group.command(name="user", aliases=["users"])
async def clean_user(
self,
ctx: Context,
user: User,
- traverse: Optional[int] = DEFAULT_TRAVERSE,
- use_cache: Optional[bool] = True,
+ message_or_time: CleanLimit,
*,
channels: CleanChannels = None
) -> None:
- """Delete messages posted by the provided user, stop cleaning after traversing `traverse` messages."""
- await self._clean_messages(ctx, traverse, users=[user], channels=channels, use_cache=use_cache)
+ """
+ Delete messages posted by the provided user, stop cleaning after reaching `message_or_time`.
- @clean_group.command(name="all", aliases=["everything"])
- async def clean_all(
- self,
- ctx: Context,
- traverse: Optional[int] = DEFAULT_TRAVERSE,
- use_cache: Optional[bool] = True,
- *,
- channels: CleanChannels = None
- ) -> None:
- """Delete all messages, regardless of poster, stop cleaning after traversing `traverse` messages."""
- await self._clean_messages(ctx, traverse, channels=channels, use_cache=use_cache)
+ `message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO
+ datetime.
+
+ If a message is specified, `channels` cannot be specified.
+ """
+ await self._clean_messages(ctx, users=[user], channels=channels, first_limit=message_or_time)
@clean_group.command(name="bots", aliases=["bot"])
- async def clean_bots(
- self,
- ctx: Context,
- traverse: Optional[int] = DEFAULT_TRAVERSE,
- use_cache: Optional[bool] = True,
- *,
- channels: CleanChannels = None
- ) -> None:
- """Delete all messages posted by a bot, stop cleaning after traversing `traverse` messages."""
- await self._clean_messages(ctx, traverse, bots_only=True, channels=channels, use_cache=use_cache)
+ async def clean_bots(self, ctx: Context, message_or_time: CleanLimit, *, channels: CleanChannels = None) -> None:
+ """
+ Delete all messages posted by a bot, stop cleaning after traversing `traverse` messages.
+
+ `message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO
+ datetime.
+
+ If a message is specified, `channels` cannot be specified.
+ """
+ await self._clean_messages(ctx, bots_only=True, channels=channels, first_limit=message_or_time)
@clean_group.command(name="regex", aliases=["word", "expression", "pattern"])
async def clean_regex(
self,
ctx: Context,
regex: Regex,
- traverse: Optional[int] = DEFAULT_TRAVERSE,
- use_cache: Optional[bool] = True,
+ message_or_time: CleanLimit,
*,
channels: CleanChannels = None
) -> None:
"""
- Delete all messages that match a certain regex, stop cleaning after traversing `traverse` messages.
+ Delete all messages that match a certain regex, stop cleaning after reaching `message_or_time`.
+
+ `message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO
+ datetime.
+ If a message is specified, `channels` cannot be specified.
The pattern must be provided enclosed in backticks.
If the pattern contains spaces, it still needs to be enclosed in double quotes on top of that.
For example: `[0-9]`
"""
- await self._clean_messages(ctx, traverse, regex=regex, channels=channels, use_cache=use_cache)
+ await self._clean_messages(ctx, regex=regex, channels=channels, first_limit=message_or_time)
@clean_group.command(name="until")
async def clean_until(
@@ -537,7 +551,6 @@ class Clean(Cog):
"""
await self._clean_messages(
ctx,
- CleanMessages.message_limit,
channels=[channel] if channel else None,
first_limit=until,
)
@@ -561,7 +574,6 @@ class Clean(Cog):
"""
await self._clean_messages(
ctx,
- CleanMessages.message_limit,
channels=[channel] if channel else None,
first_limit=first_limit,
second_limit=second_limit,
diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py
index 14db37367..178be734d 100644
--- a/bot/exts/moderation/defcon.py
+++ b/bot/exts/moderation/defcon.py
@@ -17,12 +17,9 @@ from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_RO
from bot.converters import DurationDelta, Expiry
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
-from bot.utils import scheduling
+from bot.utils import scheduling, time
from bot.utils.messages import format_user
from bot.utils.scheduling import Scheduler
-from bot.utils.time import (
- TimestampFormats, discord_timestamp, humanize_delta, parse_duration_string, relativedelta_to_timedelta
-)
log = get_logger(__name__)
@@ -88,7 +85,7 @@ class Defcon(Cog):
try:
settings = await self.defcon_settings.to_dict()
- self.threshold = parse_duration_string(settings["threshold"]) if settings.get("threshold") else None
+ self.threshold = time.parse_duration_string(settings["threshold"]) if settings.get("threshold") else None
self.expiry = datetime.fromisoformat(settings["expiry"]) if settings.get("expiry") else None
except RedisError:
log.exception("Unable to get DEFCON settings!")
@@ -102,7 +99,7 @@ class Defcon(Cog):
self.scheduler.schedule_at(self.expiry, 0, self._remove_threshold())
self._update_notifier()
- log.info(f"DEFCON synchronized: {humanize_delta(self.threshold) if self.threshold else '-'}")
+ log.info(f"DEFCON synchronized: {time.humanize_delta(self.threshold) if self.threshold else '-'}")
self._update_channel_topic()
@@ -112,7 +109,7 @@ class Defcon(Cog):
if self.threshold:
now = arrow.utcnow()
- if now - member.created_at < relativedelta_to_timedelta(self.threshold):
+ if now - member.created_at < time.relativedelta_to_timedelta(self.threshold):
log.info(f"Rejecting user {member}: Account is too new")
message_sent = False
@@ -151,11 +148,12 @@ class Defcon(Cog):
@has_any_role(*MODERATION_ROLES)
async def status(self, ctx: Context) -> None:
"""Check the current status of DEFCON mode."""
+ expiry = time.format_relative(self.expiry) if self.expiry else "-"
embed = Embed(
colour=Colour.og_blurple(), title="DEFCON Status",
description=f"""
- **Threshold:** {humanize_delta(self.threshold) if self.threshold else "-"}
- **Expires:** {discord_timestamp(self.expiry, TimestampFormats.RELATIVE) if self.expiry else "-"}
+ **Threshold:** {time.humanize_delta(self.threshold) if self.threshold else "-"}
+ **Expires:** {expiry}
**Verification level:** {ctx.guild.verification_level.name}
"""
)
@@ -213,7 +211,8 @@ class Defcon(Cog):
def _update_channel_topic(self) -> None:
"""Update the #defcon channel topic with the current DEFCON status."""
- new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold) if self.threshold else '-'})"
+ threshold = time.humanize_delta(self.threshold) if self.threshold else '-'
+ new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {threshold})"
self.mod_log.ignore(Event.guild_channel_update, Channels.defcon)
scheduling.create_task(self.channel.edit(topic=new_topic))
@@ -255,12 +254,12 @@ class Defcon(Cog):
expiry_message = ""
if expiry:
- activity_duration = relativedelta(expiry, arrow.utcnow().datetime)
- expiry_message = f" for the next {humanize_delta(activity_duration, max_units=2)}"
+ formatted_expiry = time.humanize_delta(expiry, max_units=2)
+ expiry_message = f" for the next {formatted_expiry}"
if self.threshold:
channel_message = (
- f"updated; accounts must be {humanize_delta(self.threshold)} "
+ f"updated; accounts must be {time.humanize_delta(self.threshold)} "
f"old to join the server{expiry_message}"
)
else:
@@ -290,7 +289,7 @@ class Defcon(Cog):
def _log_threshold_stat(self, threshold: relativedelta) -> None:
"""Adds the threshold to the bot stats in days."""
- threshold_days = relativedelta_to_timedelta(threshold).total_seconds() / SECONDS_IN_DAY
+ threshold_days = time.relativedelta_to_timedelta(threshold).total_seconds() / SECONDS_IN_DAY
self.bot.stats.gauge("defcon.threshold", threshold_days)
async def _send_defcon_log(self, action: Action, actor: User) -> None:
@@ -298,7 +297,7 @@ class Defcon(Cog):
info = action.value
log_msg: str = (
f"**Staffer:** {actor.mention} {actor} (`{actor.id}`)\n"
- f"{info.template.format(threshold=(humanize_delta(self.threshold) if self.threshold else '-'))}"
+ f"{info.template.format(threshold=(time.humanize_delta(self.threshold) if self.threshold else '-'))}"
)
status_msg = f"DEFCON {action.name.lower()}"
@@ -317,7 +316,7 @@ class Defcon(Cog):
@tasks.loop(hours=1)
async def defcon_notifier(self) -> None:
"""Routinely notify moderators that DEFCON is active."""
- await self.channel.send(f"Defcon is on and is set to {humanize_delta(self.threshold)}.")
+ await self.channel.send(f"Defcon is on and is set to {time.humanize_delta(self.threshold)}.")
def cog_unload(self) -> None:
"""Cancel the notifer and threshold removal tasks when the cog unloads."""
diff --git a/bot/exts/moderation/incidents.py b/bot/exts/moderation/incidents.py
index e265e29d3..b579416a6 100644
--- a/bot/exts/moderation/incidents.py
+++ b/bot/exts/moderation/incidents.py
@@ -1,16 +1,18 @@
import asyncio
-import typing as t
+import re
from datetime import datetime
from enum import Enum
+from typing import Optional
import discord
-from discord.ext.commands import Cog
+from async_rediscache import RedisCache
+from discord.ext.commands import Cog, Context, MessageConverter, MessageNotFound
from bot.bot import Bot
-from bot.constants import Channels, Colours, Emojis, Guild, Webhooks
+from bot.constants import Channels, Colours, Emojis, Guild, Roles, Webhooks
from bot.log import get_logger
from bot.utils import scheduling
-from bot.utils.messages import sub_clyde
+from bot.utils.messages import format_user, sub_clyde
log = get_logger(__name__)
@@ -22,6 +24,12 @@ CRAWL_LIMIT = 50
# Seconds for `crawl_task` to sleep after adding reactions to a message
CRAWL_SLEEP = 2
+DISCORD_MESSAGE_LINK_RE = re.compile(
+ r"(https?:\/\/(?:(ptb|canary|www)\.)?discord(?:app)?\.com\/channels\/"
+ r"[0-9]{15,20}"
+ r"\/[0-9]{15,20}\/[0-9]{15,20})"
+)
+
class Signal(Enum):
"""
@@ -37,17 +45,17 @@ class Signal(Enum):
# Reactions from non-mod roles will be removed
-ALLOWED_ROLES: t.Set[int] = set(Guild.moderation_roles)
+ALLOWED_ROLES: set[int] = set(Guild.moderation_roles)
# Message must have all of these emoji to pass the `has_signals` check
-ALL_SIGNALS: t.Set[str] = {signal.value for signal in Signal}
+ALL_SIGNALS: set[str] = {signal.value for signal in Signal}
# An embed coupled with an optional file to be dispatched
# If the file is not None, the embed attempts to show it in its body
-FileEmbed = t.Tuple[discord.Embed, t.Optional[discord.File]]
+FileEmbed = tuple[discord.Embed, Optional[discord.File]]
-async def download_file(attachment: discord.Attachment) -> t.Optional[discord.File]:
+async def download_file(attachment: discord.Attachment) -> Optional[discord.File]:
"""
Download & return `attachment` file.
@@ -121,7 +129,7 @@ def is_incident(message: discord.Message) -> bool:
return all(conditions)
-def own_reactions(message: discord.Message) -> t.Set[str]:
+def own_reactions(message: discord.Message) -> set[str]:
"""Get the set of reactions placed on `message` by the bot itself."""
return {str(reaction.emoji) for reaction in message.reactions if reaction.me}
@@ -131,6 +139,109 @@ def has_signals(message: discord.Message) -> bool:
return ALL_SIGNALS.issubset(own_reactions(message))
+def shorten_text(text: str) -> str:
+ """
+ Truncate the text if there are over 3 lines or 300 characters, or if it is a single word.
+
+ The maximum length of the string would be 303 characters across 3 lines at maximum.
+ """
+ original_length = len(text)
+ # Truncate text to a maximum of 300 characters
+ if len(text) > 300:
+ text = text[:300]
+
+ # Limit to a maximum of three lines
+ text = "\n".join(text.split("\n", maxsplit=3)[:3])
+
+ # If it is a single word, then truncate it to 50 characters
+ if text.find(" ") == -1:
+ text = text[:50]
+
+ # Remove extra whitespaces from the `text`
+ text = text.strip()
+
+ # Add placeholder if the text was shortened
+ if len(text) < original_length:
+ text = f"{text}..."
+
+ return text
+
+
+async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[discord.Embed]:
+ """
+ Create an embedded representation of the discord message link contained in the incident report.
+
+ The Embed would contain the following information -->
+ Author: @Jason Terror ♦ (736234578745884682)
+ Channel: Special/#bot-commands (814190307980607493)
+ Content: This is a very important message!
+ """
+ embed = None
+
+ try:
+ message: discord.Message = await MessageConverter().convert(ctx, message_link)
+ except MessageNotFound:
+ mod_logs_channel = ctx.bot.get_channel(Channels.mod_log)
+
+ last_100_logs: list[discord.Message] = await mod_logs_channel.history(limit=100).flatten()
+
+ for log_entry in last_100_logs:
+ if not log_entry.embeds:
+ continue
+
+ log_embed: discord.Embed = log_entry.embeds[0]
+ if (
+ log_embed.author.name == "Message deleted"
+ and f"[Jump to message]({message_link})" in log_embed.description
+ ):
+ embed = discord.Embed(
+ colour=discord.Colour.dark_gold(),
+ title="Deleted Message Link",
+ description=(
+ f"Found <#{Channels.mod_log}> entry for deleted message: "
+ f"[Jump to message]({log_entry.jump_url})."
+ )
+ )
+ if not embed:
+ embed = discord.Embed(
+ colour=discord.Colour.red(),
+ title="Bad Message Link",
+ description=f"Message {message_link} not found."
+ )
+ except discord.DiscordException as e:
+ log.exception(f"Failed to make message link embed for '{message_link}', raised exception: {e}")
+ else:
+ channel = message.channel
+ if not channel.permissions_for(channel.guild.get_role(Roles.helpers)).view_channel:
+ log.info(
+ f"Helpers don't have read permissions in #{channel.name},"
+ f" not sending message link embed for {message_link}"
+ )
+ return
+
+ embed = discord.Embed(
+ colour=discord.Colour.gold(),
+ description=(
+ f"**Author:** {format_user(message.author)}\n"
+ f"**Channel:** {channel.mention} ({channel.category}"
+ f"{f'/#{channel.parent.name} - ' if isinstance(channel, discord.Thread) else '/#'}"
+ f"{channel.name})\n"
+ ),
+ timestamp=message.created_at
+ )
+ embed.set_author(name=message.author, icon_url=message.author.display_avatar.url)
+ embed.add_field(
+ name="Content",
+ value=shorten_text(message.content) if message.content else "[No Message Content]"
+ )
+ embed.set_footer(text=f"Message ID: {message.id}")
+
+ if message.attachments:
+ embed.set_image(url=message.attachments[0].url)
+
+ return embed
+
+
async def add_signals(incident: discord.Message) -> None:
"""
Add `Signal` member emoji to `incident` as reactions.
@@ -168,6 +279,7 @@ class Incidents(Cog):
* See: `crawl_incidents`
On message:
+ * Run message through `extract_message_links` and send them into the channel
* Add `Signal` member emoji if message qualifies as an incident
* Ignore messages starting with #
* Use this if verbal communication is necessary
@@ -181,18 +293,35 @@ class Incidents(Cog):
* If `Signal.ACTIONED` or `Signal.NOT_ACTIONED` were chosen, attempt to
relay the incident message to #incidents-archive
* If relay successful, delete original message
+ * Delete quotation message if cached
* See: `on_raw_reaction_add`
Please refer to function docstrings for implementation details.
"""
+ # This dictionary maps an incident report message to the message link embed's ID
+ # RedisCache[discord.Message.id, discord.Message.id]
+ message_link_embeds_cache = RedisCache()
+
def __init__(self, bot: Bot) -> None:
"""Prepare `event_lock` and schedule `crawl_task` on start-up."""
self.bot = bot
+ self.incidents_webhook = None
+
+ scheduling.create_task(self.fetch_webhook(), event_loop=self.bot.loop)
self.event_lock = asyncio.Lock()
self.crawl_task = scheduling.create_task(self.crawl_incidents(), event_loop=self.bot.loop)
+ async def fetch_webhook(self) -> None:
+ """Fetch the incidents webhook object, so we can post message link embeds to it."""
+ await self.bot.wait_until_guild_available()
+
+ try:
+ self.incidents_webhook = await self.bot.fetch_webhook(Webhooks.incidents)
+ except discord.HTTPException:
+ log.error(f"Failed to fetch incidents webhook with id `{Webhooks.incidents}`.")
+
async def crawl_incidents(self) -> None:
"""
Crawl #incidents and add missing emoji where necessary.
@@ -292,8 +421,11 @@ class Incidents(Cog):
This ensures that if there is a racing event awaiting the lock, it will fail to find the
message, and will abort. There is a `timeout` to ensure that this doesn't hold the lock
forever should something go wrong.
+
+ Deletes cache value (`message_link_embeds_cache`) of `incident` if it exists. It then removes the
+ webhook message for that particular link from the channel.
"""
- members_roles: t.Set[int] = {role.id for role in member.roles}
+ members_roles: set[int] = {role.id for role in member.roles}
if not members_roles & ALLOWED_ROLES: # Intersection is truthy on at least 1 common element
log.debug(f"Removing invalid reaction: user {member} is not permitted to send signals")
try:
@@ -340,7 +472,11 @@ class Incidents(Cog):
else:
log.trace("Deletion was confirmed")
- async def resolve_message(self, message_id: int) -> t.Optional[discord.Message]:
+ if self.incidents_webhook:
+ # Deletes the message link embeds found in cache from the channel and cache.
+ await self.delete_msg_link_embed(incident.id)
+
+ async def resolve_message(self, message_id: int) -> Optional[discord.Message]:
"""
Get `discord.Message` for `message_id` from cache, or API.
@@ -355,7 +491,7 @@ class Incidents(Cog):
"""
await self.bot.wait_until_guild_available() # First make sure that the cache is ready
log.trace(f"Resolving message for: {message_id=}")
- message: t.Optional[discord.Message] = self.bot._connection._get_message(message_id)
+ message: Optional[discord.Message] = self.bot._connection._get_message(message_id)
if message is not None:
log.trace("Message was found in cache")
@@ -419,9 +555,107 @@ class Incidents(Cog):
@Cog.listener()
async def on_message(self, message: discord.Message) -> None:
- """Pass `message` to `add_signals` if and only if it satisfies `is_incident`."""
- if is_incident(message):
- await add_signals(message)
+ """
+ Pass `message` to `add_signals` and `extract_message_links` if it satisfies `is_incident`.
+
+ If `message` is an incident report, then run it through `extract_message_links` to get all
+ the message link embeds (embeds which contain information about that particular link).
+ These message link embeds are then sent into the channel.
+
+ Also passes the message into `add_signals` if the message is an incident.
+ """
+ if not is_incident(message):
+ return
+
+ await add_signals(message)
+
+ # Only use this feature if incidents webhook embed is found
+ if self.incidents_webhook:
+ if embed_list := await self.extract_message_links(message):
+ await self.send_message_link_embeds(embed_list, message, self.incidents_webhook)
+
+ @Cog.listener()
+ async def on_raw_message_delete(self, payload: discord.RawMessageDeleteEvent) -> None:
+ """
+ Delete message link embeds for `payload.message_id`.
+
+ Search through the cache for message, if found delete it from cache and channel.
+ """
+ if self.incidents_webhook:
+ await self.delete_msg_link_embed(payload.message_id)
+
+ async def extract_message_links(self, message: discord.Message) -> Optional[list[discord.Embed]]:
+ """
+ Check if there's any message links in the text content.
+
+ Then pass the message_link into `make_message_link_embed` to format an
+ embed for it containing information about the link.
+
+ As Discord only allows a max of 10 embeds in a single webhook, just send the
+ first 10 embeds and don't care about the rest.
+
+ If no links are found for the message, just log a trace statement.
+ """
+ message_links = DISCORD_MESSAGE_LINK_RE.findall(message.content)
+ if not message_links:
+ log.trace(
+ f"No message links detected on incident message with id {message.id}."
+ )
+ return
+
+ embeds = []
+ for message_link in message_links[:10]:
+ ctx = await self.bot.get_context(message)
+ embed = await make_message_link_embed(ctx, message_link[0])
+ if embed:
+ embeds.append(embed)
+
+ return embeds
+
+ async def send_message_link_embeds(
+ self,
+ webhook_embed_list: list,
+ message: discord.Message,
+ webhook: discord.Webhook,
+ ) -> Optional[int]:
+ """
+ Send message link embeds to #incidents channel.
+
+ Using the `webhook` passed in as a parameter to send
+ the embeds in the `webhook_embed_list` parameter.
+
+ After sending each embed it maps the `message.id`
+ to the `webhook_msg_ids` IDs in the async redis-cache.
+ """
+ try:
+ webhook_msg = await webhook.send(
+ embeds=[embed for embed in webhook_embed_list if embed],
+ username=sub_clyde(message.author.name),
+ avatar_url=message.author.display_avatar.url,
+ wait=True,
+ )
+ except discord.DiscordException:
+ log.exception(
+ f"Failed to send message link embed {message.id} to #incidents."
+ )
+ else:
+ await self.message_link_embeds_cache.set(message.id, webhook_msg.id)
+ log.trace("Message link embeds sent successfully to #incidents!")
+ return webhook_msg.id
+
+ async def delete_msg_link_embed(self, message_id: int) -> None:
+ """Delete the Discord message link message found in cache for `message_id`."""
+ log.trace("Deleting Discord message link's webhook message.")
+ webhook_msg_id = await self.message_link_embeds_cache.get(int(message_id))
+
+ if webhook_msg_id:
+ try:
+ await self.incidents_webhook.delete_message(webhook_msg_id)
+ except discord.errors.NotFound:
+ log.trace(f"Incidents message link embed (`{webhook_msg_id}`) has already been deleted, skipping.")
+
+ await self.message_link_embeds_cache.delete(message_id)
+ log.trace("Successfully deleted discord links webhook message.")
def setup(bot: Bot) -> None:
diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py
index 762eb6afa..2fc54856f 100644
--- a/bot/exts/moderation/infraction/_scheduler.py
+++ b/bot/exts/moderation/infraction/_scheduler.py
@@ -136,7 +136,7 @@ class InfractionScheduler:
infr_type = infraction["type"]
icon = _utils.INFRACTION_ICONS[infr_type][0]
reason = infraction["reason"]
- expiry = time.format_infraction_with_duration(infraction["expires_at"])
+ expiry = time.format_with_duration(infraction["expires_at"])
id_ = infraction['id']
if user_reason is None:
@@ -166,13 +166,12 @@ class InfractionScheduler:
# apply kick/ban infractions first, this would mean that we'd make it
# impossible for us to deliver a DM. See python-discord/bot#982.
if not infraction["hidden"] and infr_type in {"ban", "kick"}:
- dm_result = f"{constants.Emojis.failmail} "
- dm_log_text = "\nDM: **Failed**"
-
- # Accordingly update whether the user was successfully notified via DM.
- if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon):
+ if await _utils.notify_infraction(infraction, user, user_reason):
dm_result = ":incoming_envelope: "
dm_log_text = "\nDM: Sent"
+ else:
+ dm_result = f"{constants.Emojis.failmail} "
+ dm_log_text = "\nDM: **Failed**"
end_msg = ""
if is_mod_channel(ctx.channel):
@@ -221,7 +220,7 @@ class InfractionScheduler:
failed = True
if failed:
- log.trace(f"Deleted infraction {infraction['id']} from database because applying infraction failed.")
+ log.trace(f"Trying to delete infraction {id_} from database because applying infraction failed.")
try:
await self.bot.api_client.delete(f"bot/infractions/{id_}")
except ResponseCodeError as e:
@@ -234,13 +233,12 @@ class InfractionScheduler:
# If we need to DM and haven't already tried to
if not infraction["hidden"] and infr_type not in {"ban", "kick"}:
- dm_result = f"{constants.Emojis.failmail} "
- dm_log_text = "\nDM: **Failed**"
-
- # Accordingly update whether the user was successfully notified via DM.
- if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon):
+ if await _utils.notify_infraction(infraction, user, user_reason):
dm_result = ":incoming_envelope: "
dm_log_text = "\nDM: Sent"
+ else:
+ dm_result = f"{constants.Emojis.failmail} "
+ dm_log_text = "\nDM: **Failed**"
# Send a confirmation message to the invoking context.
log.trace(f"Sending infraction #{id_} confirmation message.")
@@ -261,7 +259,7 @@ class InfractionScheduler:
{additional_info}
"""),
content=log_content,
- footer=f"ID {infraction['id']}"
+ footer=f"ID: {id_}"
)
log.info(f"Applied {purge}{infr_type} infraction #{id_} to {user}.")
@@ -377,20 +375,15 @@ class InfractionScheduler:
actor = infraction["actor"]
type_ = infraction["type"]
id_ = infraction["id"]
- inserted_at = infraction["inserted_at"]
- expiry = infraction["expires_at"]
log.info(f"Marking infraction #{id_} as inactive (expired).")
- expiry = dateutil.parser.isoparse(expiry) if expiry else None
- created = time.format_infraction_with_duration(inserted_at, expiry)
-
log_content = None
log_text = {
"Member": f"<@{user_id}>",
"Actor": f"<@{actor}>",
"Reason": infraction["reason"],
- "Created": created,
+ "Created": time.format_with_duration(infraction["inserted_at"], infraction["expires_at"]),
}
try:
diff --git a/bot/exts/moderation/infraction/_utils.py b/bot/exts/moderation/infraction/_utils.py
index c0ef80e3d..c1be18362 100644
--- a/bot/exts/moderation/infraction/_utils.py
+++ b/bot/exts/moderation/infraction/_utils.py
@@ -1,14 +1,17 @@
import typing as t
from datetime import datetime
+import arrow
import discord
from discord.ext.commands import Context
+import bot
from bot.api import ResponseCodeError
from bot.constants import Colours, Icons
from bot.converters import MemberOrUser
from bot.errors import InvalidInfractedUserError
from bot.log import get_logger
+from bot.utils import time
log = get_logger(__name__)
@@ -20,7 +23,7 @@ INFRACTION_ICONS = {
"note": (Icons.user_warn, None),
"superstar": (Icons.superstarify, Icons.unsuperstarify),
"warning": (Icons.user_warn, None),
- "voice_ban": (Icons.voice_state_red, Icons.voice_state_green),
+ "voice_mute": (Icons.voice_state_red, Icons.voice_state_green),
}
RULES_URL = "https://pythondiscord.com/pages/rules"
@@ -30,9 +33,9 @@ Infraction = t.Dict[str, t.Union[str, int, bool]]
APPEAL_SERVER_INVITE = "https://discord.gg/WXrCJxWBnm"
INFRACTION_TITLE = "Please review our rules"
-INFRACTION_APPEAL_SERVER_FOOTER = f"\n\nTo appeal this infraction, join our [appeals server]({APPEAL_SERVER_INVITE})."
+INFRACTION_APPEAL_SERVER_FOOTER = f"\nTo appeal this infraction, join our [appeals server]({APPEAL_SERVER_INVITE})."
INFRACTION_APPEAL_MODMAIL_FOOTER = (
- '\n\nIf you would like to discuss or appeal this infraction, '
+ '\nIf you would like to discuss or appeal this infraction, '
'send a message to the ModMail bot.'
)
INFRACTION_AUTHOR_NAME = "Infraction information"
@@ -42,6 +45,7 @@ LONGEST_EXTRAS = max(len(INFRACTION_APPEAL_SERVER_FOOTER), len(INFRACTION_APPEAL
INFRACTION_DESCRIPTION_TEMPLATE = (
"**Type:** {type}\n"
"**Expires:** {expires}\n"
+ "**Duration:** {duration}\n"
"**Reason:** {reason}\n"
)
@@ -78,7 +82,8 @@ async def post_infraction(
reason: str,
expires_at: datetime = None,
hidden: bool = False,
- active: bool = True
+ active: bool = True,
+ dm_sent: bool = False,
) -> t.Optional[dict]:
"""Posts an infraction to the API."""
if isinstance(user, (discord.Member, discord.User)) and user.bot:
@@ -93,7 +98,8 @@ async def post_infraction(
"reason": reason,
"type": infr_type,
"user": user.id,
- "active": active
+ "active": active,
+ "dm_sent": dm_sent
}
if expires_at:
payload['expires_at'] = expires_at.isoformat()
@@ -156,18 +162,44 @@ async def send_active_infraction_message(ctx: Context, infraction: Infraction) -
async def notify_infraction(
+ infraction: Infraction,
user: MemberOrUser,
- infr_type: str,
- expires_at: t.Optional[str] = None,
- reason: t.Optional[str] = None,
- icon_url: str = Icons.token_removed
+ reason: t.Optional[str] = None
) -> bool:
- """DM a user about their new infraction and return True if the DM is successful."""
+ """
+ DM a user about their new infraction and return True if the DM is successful.
+
+ `reason` can be used to override what is in `infraction`. Otherwise, this data will
+ be retrieved from `infraction`.
+ """
+ infr_id = infraction["id"]
+ infr_type = infraction["type"].replace("_", " ").title()
+ icon_url = INFRACTION_ICONS[infraction["type"]][0]
+
+ if infraction["expires_at"] is None:
+ expires_at = "Never"
+ duration = "Permanent"
+ else:
+ expiry = arrow.get(infraction["expires_at"])
+ expires_at = time.format_relative(expiry)
+ duration = time.humanize_delta(infraction["inserted_at"], expiry, max_units=2)
+
+ if infraction["active"]:
+ remaining = time.humanize_delta(expiry, arrow.utcnow(), max_units=2)
+ if duration != remaining:
+ duration += f" ({remaining} remaining)"
+ else:
+ expires_at += " (Inactive)"
+
log.trace(f"Sending {user} a DM about their {infr_type} infraction.")
+ if reason is None:
+ reason = infraction["reason"]
+
text = INFRACTION_DESCRIPTION_TEMPLATE.format(
type=infr_type.title(),
- expires=expires_at or "N/A",
+ expires=expires_at,
+ duration=duration,
reason=reason or "No reason provided."
)
@@ -175,7 +207,7 @@ async def notify_infraction(
if len(text) > 4096 - LONGEST_EXTRAS:
text = f"{text[:4093-LONGEST_EXTRAS]}..."
- text += INFRACTION_APPEAL_SERVER_FOOTER if infr_type.lower() == 'ban' else INFRACTION_APPEAL_MODMAIL_FOOTER
+ text += INFRACTION_APPEAL_SERVER_FOOTER if infraction["type"] == 'ban' else INFRACTION_APPEAL_MODMAIL_FOOTER
embed = discord.Embed(
description=text,
@@ -186,7 +218,15 @@ async def notify_infraction(
embed.title = INFRACTION_TITLE
embed.url = RULES_URL
- return await send_private_embed(user, embed)
+ dm_sent = await send_private_embed(user, embed)
+ if dm_sent:
+ await bot.instance.api_client.patch(
+ f"bot/infractions/{infr_id}",
+ json={"dm_sent": True}
+ )
+ log.debug(f"Update infraction #{infr_id} dm_sent field to true.")
+
+ return dm_sent
async def notify_pardon(
diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py
index e495a94b3..18bed5080 100644
--- a/bot/exts/moderation/infraction/infractions.py
+++ b/bot/exts/moderation/infraction/infractions.py
@@ -9,8 +9,8 @@ from discord.ext.commands import Context, command
from bot import constants
from bot.bot import Bot
from bot.constants import Event
-from bot.converters import Duration, Expiry, MemberOrUser, UnambiguousMemberOrUser
-from bot.decorators import respect_role_hierarchy
+from bot.converters import Age, Duration, Expiry, MemberOrUser, UnambiguousMemberOrUser
+from bot.decorators import ensure_future_timestamp, respect_role_hierarchy
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction._scheduler import InfractionScheduler
from bot.log import get_logger
@@ -19,6 +19,11 @@ from bot.utils.messages import format_user
log = get_logger(__name__)
+if t.TYPE_CHECKING:
+ from bot.exts.moderation.clean import Clean
+ from bot.exts.moderation.infraction.management import ModManagement
+ from bot.exts.moderation.watchchannels.bigbrother import BigBrother
+
class Infractions(InfractionScheduler, commands.Cog):
"""Apply and pardon infractions on users for moderation purposes."""
@@ -27,7 +32,7 @@ class Infractions(InfractionScheduler, commands.Cog):
category_description = "Server moderation tools."
def __init__(self, bot: Bot):
- super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning", "voice_ban"})
+ super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning", "voice_mute"})
self.category = "Moderation"
self._muted_role = discord.Object(constants.Roles.muted)
@@ -76,6 +81,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_kick(ctx, user, reason)
@command()
+ @ensure_future_timestamp(timestamp_arg=3)
async def ban(
self,
ctx: Context,
@@ -91,8 +97,9 @@ class Infractions(InfractionScheduler, commands.Cog):
"""
await self.apply_ban(ctx, user, reason, expires_at=duration)
- @command(aliases=('pban',))
- async def purgeban(
+ @command(aliases=("cban", "purgeban", "pban"))
+ @ensure_future_timestamp(timestamp_arg=3)
+ async def cleanban(
self,
ctx: Context,
user: UnambiguousMemberOrUser,
@@ -101,14 +108,63 @@ class Infractions(InfractionScheduler, commands.Cog):
reason: t.Optional[str] = None
) -> None:
"""
- Same as ban but removes all their messages of the last 24 hours.
+ Same as ban, but also cleans all their messages from the last hour.
If duration is specified, it temporarily bans that user for the given duration.
"""
- await self.apply_ban(ctx, user, reason, 1, expires_at=duration)
+ clean_cog: t.Optional[Clean] = self.bot.get_cog("Clean")
+ if clean_cog is None:
+ # If we can't get the clean cog, fall back to native purgeban.
+ await self.apply_ban(ctx, user, reason, purge_days=1, expires_at=duration)
+ return
+
+ infraction = await self.apply_ban(ctx, user, reason, expires_at=duration)
+ if not infraction or not infraction.get("id"):
+ # Ban was unsuccessful, quit early.
+ await ctx.send(":x: Failed to apply ban.")
+ log.error("Failed to apply ban to user %d", user.id)
+ return
+
+ # Calling commands directly skips Discord.py's convertors, so we need to convert args manually.
+ clean_time = await Age().convert(ctx, "1h")
+
+ log_url = await clean_cog._clean_messages(
+ ctx,
+ users=[user],
+ channels="*",
+ first_limit=clean_time,
+ attempt_delete_invocation=False,
+ )
+ if not log_url:
+ # Cleaning failed, or there were no messages to clean, exit early.
+ return
+
+ infr_manage_cog: t.Optional[ModManagement] = self.bot.get_cog("ModManagement")
+ if infr_manage_cog is None:
+ # If we can't get the mod management cog, don't bother appending the log.
+ return
+
+ # Overwrite the context's send function so infraction append
+ # doesn't output the update infraction confirmation message.
+ async def send(*args, **kwargs) -> None:
+ pass
+ ctx.send = send
+ await infr_manage_cog.infraction_append(ctx, infraction, None, reason=f"[Clean log]({log_url})")
- @command(aliases=('vban',))
- async def voiceban(
+ @command(aliases=("vban",))
+ async def voiceban(self, ctx: Context) -> None:
+ """
+ NOT IMPLEMENTED.
+
+ Permanently ban a user from joining voice channels.
+
+ If duration is specified, it temporarily voice bans that user for the given duration.
+ """
+ await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `voicemute`?")
+
+ @command(aliases=("vmute",))
+ @ensure_future_timestamp(timestamp_arg=3)
+ async def voicemute(
self,
ctx: Context,
user: UnambiguousMemberOrUser,
@@ -117,16 +173,17 @@ class Infractions(InfractionScheduler, commands.Cog):
reason: t.Optional[str]
) -> None:
"""
- Permanently ban user from using voice channels.
+ Permanently mute user in voice channels.
- If duration is specified, it temporarily voice bans that user for the given duration.
+ If duration is specified, it temporarily voice mutes that user for the given duration.
"""
- await self.apply_voice_ban(ctx, user, reason, expires_at=duration)
+ await self.apply_voice_mute(ctx, user, reason, expires_at=duration)
# endregion
# region: Temporary infractions
@command(aliases=["mute"])
+ @ensure_future_timestamp(timestamp_arg=3)
async def tempmute(
self, ctx: Context,
user: UnambiguousMemberOrUser,
@@ -160,6 +217,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_mute(ctx, user, reason, expires_at=duration)
@command(aliases=("tban",))
+ @ensure_future_timestamp(timestamp_arg=3)
async def tempban(
self,
ctx: Context,
@@ -186,16 +244,26 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_ban(ctx, user, reason, expires_at=duration)
@command(aliases=("tempvban", "tvban"))
- async def tempvoiceban(
- self,
- ctx: Context,
- user: UnambiguousMemberOrUser,
- duration: Expiry,
- *,
- reason: t.Optional[str]
+ async def tempvoiceban(self, ctx: Context) -> None:
+ """
+ NOT IMPLEMENTED.
+
+ Temporarily voice bans that user for the given duration.
+ """
+ await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `tempvoicemute`?")
+
+ @command(aliases=("tempvmute", "tvmute"))
+ @ensure_future_timestamp(timestamp_arg=3)
+ async def tempvoicemute(
+ self,
+ ctx: Context,
+ user: UnambiguousMemberOrUser,
+ duration: Expiry,
+ *,
+ reason: t.Optional[str]
) -> None:
"""
- Temporarily voice ban a user for the given reason and duration.
+ Temporarily voice mute a user for the given reason and duration.
A unit of time should be appended to the duration.
Units (∗case-sensitive):
@@ -209,7 +277,7 @@ class Infractions(InfractionScheduler, commands.Cog):
Alternatively, an ISO 8601 timestamp can be provided for the duration.
"""
- await self.apply_voice_ban(ctx, user, reason, expires_at=duration)
+ await self.apply_voice_mute(ctx, user, reason, expires_at=duration)
# endregion
# region: Permanent shadow infractions
@@ -232,6 +300,7 @@ class Infractions(InfractionScheduler, commands.Cog):
# region: Temporary shadow infractions
@command(hidden=True, aliases=["shadowtempban", "stempban", "stban"])
+ @ensure_future_timestamp(timestamp_arg=3)
async def shadow_tempban(
self,
ctx: Context,
@@ -271,9 +340,18 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.pardon_infraction(ctx, "ban", user)
@command(aliases=("uvban",))
- async def unvoiceban(self, ctx: Context, user: UnambiguousMemberOrUser) -> None:
- """Prematurely end the active voice ban infraction for the user."""
- await self.pardon_infraction(ctx, "voice_ban", user)
+ async def unvoiceban(self, ctx: Context) -> None:
+ """
+ NOT IMPLEMENTED.
+
+ Temporarily voice bans that user for the given duration.
+ """
+ await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `unvoicemute`?")
+
+ @command(aliases=("uvmute",))
+ async def unvoicemute(self, ctx: Context, user: UnambiguousMemberOrUser) -> None:
+ """Prematurely end the active voice mute infraction for the user."""
+ await self.pardon_infraction(ctx, "voice_mute", user)
# endregion
# region: Base apply functions
@@ -339,7 +417,7 @@ class Infractions(InfractionScheduler, commands.Cog):
reason: t.Optional[str],
purge_days: t.Optional[int] = 0,
**kwargs
- ) -> None:
+ ) -> t.Optional[dict]:
"""
Apply a ban infraction with kwargs passed to `post_infraction`.
@@ -347,7 +425,7 @@ class Infractions(InfractionScheduler, commands.Cog):
"""
if isinstance(user, Member) and user.top_role >= ctx.me.top_role:
await ctx.send(":x: I can't ban users above or equal to me in the role hierarchy.")
- return
+ return None
# In the case of a permanent ban, we don't need get_active_infractions to tell us if one is active
is_temporary = kwargs.get("expires_at") is not None
@@ -356,19 +434,19 @@ class Infractions(InfractionScheduler, commands.Cog):
if active_infraction:
if is_temporary:
log.trace("Tempban ignored as it cannot overwrite an active ban.")
- return
+ return None
if active_infraction.get('expires_at') is None:
log.trace("Permaban already exists, notify.")
await ctx.send(f":x: User is already permanently banned (#{active_infraction['id']}).")
- return
+ return None
log.trace("Old tempban is being replaced by new permaban.")
await self.pardon_infraction(ctx, "ban", user, send_msg=is_temporary)
infraction = await _utils.post_infraction(ctx, user, "ban", reason, active=True, **kwargs)
if infraction is None:
- return
+ return None
infraction["purge"] = "purge " if purge_days else ""
@@ -380,27 +458,25 @@ class Infractions(InfractionScheduler, commands.Cog):
action = ctx.guild.ban(user, reason=reason, delete_message_days=purge_days)
await self.apply_infraction(ctx, infraction, user, action)
+ bb_cog: t.Optional[BigBrother] = self.bot.get_cog("Big Brother")
if infraction.get('expires_at') is not None:
log.trace(f"Ban isn't permanent; user {user} won't be unwatched by Big Brother.")
- return
-
- bb_cog = self.bot.get_cog("Big Brother")
- if not bb_cog:
+ elif not bb_cog:
log.error(f"Big Brother cog not loaded; perma-banned user {user} won't be unwatched.")
- return
-
- log.trace(f"Big Brother cog loaded; attempting to unwatch perma-banned user {user}.")
+ else:
+ log.trace(f"Big Brother cog loaded; attempting to unwatch perma-banned user {user}.")
+ bb_reason = "User has been permanently banned from the server. Automatically removed."
+ await bb_cog.apply_unwatch(ctx, user, bb_reason, send_message=False)
- bb_reason = "User has been permanently banned from the server. Automatically removed."
- await bb_cog.apply_unwatch(ctx, user, bb_reason, send_message=False)
+ return infraction
@respect_role_hierarchy(member_arg=2)
- async def apply_voice_ban(self, ctx: Context, user: MemberOrUser, reason: t.Optional[str], **kwargs) -> None:
- """Apply a voice ban infraction with kwargs passed to `post_infraction`."""
- if await _utils.get_active_infraction(ctx, user, "voice_ban"):
+ async def apply_voice_mute(self, ctx: Context, user: MemberOrUser, reason: t.Optional[str], **kwargs) -> None:
+ """Apply a voice mute infraction with kwargs passed to `post_infraction`."""
+ if await _utils.get_active_infraction(ctx, user, "voice_mute"):
return
- infraction = await _utils.post_infraction(ctx, user, "voice_ban", reason, active=True, **kwargs)
+ infraction = await _utils.post_infraction(ctx, user, "voice_mute", reason, active=True, **kwargs)
if infraction is None:
return
@@ -414,7 +490,7 @@ class Infractions(InfractionScheduler, commands.Cog):
if not isinstance(user, Member):
return
- await user.move_to(None, reason="Disconnected from voice to apply voiceban.")
+ await user.move_to(None, reason="Disconnected from voice to apply voice mute.")
await user.remove_roles(self._voice_verified_role, reason=reason)
await self.apply_infraction(ctx, infraction, user, action())
@@ -471,7 +547,7 @@ class Infractions(InfractionScheduler, commands.Cog):
return log_text
- async def pardon_voice_ban(
+ async def pardon_voice_mute(
self,
user_id: int,
guild: discord.Guild,
@@ -487,9 +563,9 @@ class Infractions(InfractionScheduler, commands.Cog):
# DM user about infraction expiration
notified = await _utils.notify_pardon(
user=user,
- title="Voice ban ended",
- content="You have been unbanned and can verify yourself again in the server.",
- icon_url=_utils.INFRACTION_ICONS["voice_ban"][1]
+ title="Voice mute ended",
+ content="You have been unmuted and can verify yourself again in the server.",
+ icon_url=_utils.INFRACTION_ICONS["voice_mute"][1]
)
log_text["DM"] = "Sent" if notified else "**Failed**"
@@ -514,8 +590,8 @@ class Infractions(InfractionScheduler, commands.Cog):
return await self.pardon_mute(user_id, guild, reason, notify=notify)
elif infraction["type"] == "ban":
return await self.pardon_ban(user_id, guild, reason)
- elif infraction["type"] == "voice_ban":
- return await self.pardon_voice_ban(user_id, guild, notify=notify)
+ elif infraction["type"] == "voice_mute":
+ return await self.pardon_voice_mute(user_id, guild, notify=notify)
# endregion
diff --git a/bot/exts/moderation/infraction/management.py b/bot/exts/moderation/infraction/management.py
index 0a33ac5e2..62d349519 100644
--- a/bot/exts/moderation/infraction/management.py
+++ b/bot/exts/moderation/infraction/management.py
@@ -1,10 +1,7 @@
import textwrap
import typing as t
-from datetime import datetime, timezone
-import dateutil.parser
import discord
-from dateutil.relativedelta import relativedelta
from discord.ext import commands
from discord.ext.commands import Context
from discord.utils import escape_markdown
@@ -12,7 +9,9 @@ from discord.utils import escape_markdown
from bot import constants
from bot.bot import Bot
from bot.converters import Expiry, Infraction, MemberOrUser, Snowflake, UnambiguousUser, allowed_strings
+from bot.decorators import ensure_future_timestamp
from bot.errors import InvalidInfraction
+from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction.infractions import Infractions
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
@@ -20,7 +19,6 @@ from bot.pagination import LinePaginator
from bot.utils import messages, time
from bot.utils.channel import is_mod_channel
from bot.utils.members import get_or_fetch_member
-from bot.utils.time import humanize_delta, until_expiration
log = get_logger(__name__)
@@ -43,12 +41,10 @@ class ModManagement(commands.Cog):
"""Get currently loaded Infractions cog instance."""
return self.bot.get_cog("Infractions")
- # region: Edit infraction commands
-
@commands.group(name='infraction', aliases=('infr', 'infractions', 'inf', 'i'), invoke_without_command=True)
async def infraction_group(self, ctx: Context, infraction: Infraction = None) -> None:
"""
- Infraction manipulation commands.
+ Infraction management commands.
If `infraction` is passed then this command fetches that infraction. The `Infraction` converter
supports 'l', 'last' and 'recent' to get the most recent infraction made by `ctx.author`.
@@ -63,6 +59,30 @@ class ModManagement(commands.Cog):
)
await self.send_infraction_list(ctx, embed, [infraction])
+ @infraction_group.command(name="resend", aliases=("send", "rs", "dm"))
+ async def infraction_resend(self, ctx: Context, infraction: Infraction) -> None:
+ """Resend a DM to a user about a given infraction of theirs."""
+ if infraction["hidden"]:
+ await ctx.send(f"{constants.Emojis.failmail} You may not resend hidden infractions.")
+ return
+
+ member_id = infraction["user"]["id"]
+ member = await get_or_fetch_member(ctx.guild, member_id)
+ if not member:
+ await ctx.send(f"{constants.Emojis.failmail} Cannot find member `{member_id}` in the guild.")
+ return
+
+ id_ = infraction["id"]
+ reason = infraction["reason"] or "No reason provided."
+ reason += "\n\n**This is a re-sent message for a previously applied infraction which may have been edited.**"
+
+ if await _utils.notify_infraction(infraction, member, reason):
+ await ctx.send(f":incoming_envelope: Resent DM for infraction `{id_}`.")
+ else:
+ await ctx.send(f"{constants.Emojis.failmail} Failed to resend DM for infraction `{id_}`.")
+
+ # region: Edit infraction commands
+
@infraction_group.command(name="append", aliases=("amend", "add", "a"))
async def infraction_append(
self,
@@ -103,6 +123,7 @@ class ModManagement(commands.Cog):
await self.infraction_edit(ctx, infraction, duration, reason=reason)
@infraction_group.command(name='edit', aliases=('e',))
+ @ensure_future_timestamp(timestamp_arg=3)
async def infraction_edit(
self,
ctx: Context,
@@ -151,7 +172,7 @@ class ModManagement(commands.Cog):
confirm_messages.append("marked as permanent")
elif duration is not None:
request_data['expires_at'] = duration.isoformat()
- expiry = time.format_infraction_with_duration(request_data['expires_at'])
+ expiry = time.format_with_duration(duration)
confirm_messages.append(f"set to expire on {expiry}")
else:
confirm_messages.append("expiry unchanged")
@@ -176,15 +197,15 @@ class ModManagement(commands.Cog):
if 'expires_at' in request_data:
# A scheduled task should only exist if the old infraction wasn't permanent
if infraction['expires_at']:
- self.infractions_cog.scheduler.cancel(new_infraction['id'])
+ self.infractions_cog.scheduler.cancel(infraction_id)
# If the infraction was not marked as permanent, schedule a new expiration task
if request_data['expires_at']:
self.infractions_cog.schedule_expiration(new_infraction)
log_text += f"""
- Previous expiry: {until_expiration(infraction['expires_at']) or "Permanent"}
- New expiry: {until_expiration(new_infraction['expires_at']) or "Permanent"}
+ Previous expiry: {time.until_expiration(infraction['expires_at'])}
+ New expiry: {time.until_expiration(new_infraction['expires_at'])}
""".rstrip()
changes = ' & '.join(confirm_messages)
@@ -210,7 +231,8 @@ class ModManagement(commands.Cog):
Member: {user_text}
Actor: <@{new_infraction['actor']}>
Edited by: {ctx.message.author.mention}{log_text}
- """)
+ """),
+ footer=f"ID: {infraction_id}"
)
# endregion
@@ -243,8 +265,9 @@ class ModManagement(commands.Cog):
else:
user_str = str(user.id)
+ formatted_infraction_count = self.format_infraction_count(len(infraction_list))
embed = discord.Embed(
- title=f"Infractions for {user_str} ({len(infraction_list)} total)",
+ title=f"Infractions for {user_str} ({formatted_infraction_count} total)",
colour=discord.Colour.orange()
)
await self.send_infraction_list(ctx, embed, infraction_list)
@@ -256,15 +279,70 @@ class ModManagement(commands.Cog):
'bot/infractions/expanded',
params={'search': reason}
)
+
+ formatted_infraction_count = self.format_infraction_count(len(infraction_list))
+ embed = discord.Embed(
+ title=f"Infractions matching `{reason}` ({formatted_infraction_count} total)",
+ colour=discord.Colour.orange()
+ )
+ await self.send_infraction_list(ctx, embed, infraction_list)
+
+ # endregion
+ # region: Search for infractions by given actor
+
+ @infraction_group.command(name="by", aliases=("b",))
+ async def search_by_actor(
+ self,
+ ctx: Context,
+ actor: t.Union[t.Literal["m", "me"], UnambiguousUser],
+ oldest_first: bool = False
+ ) -> None:
+ """
+ Search for infractions made by `actor`.
+
+ Use "m" or "me" as the `actor` to get infractions by author.
+
+ Use "1" for `oldest_first` to send oldest infractions first.
+ """
+ if isinstance(actor, str):
+ actor = ctx.author
+
+ if oldest_first:
+ ordering = 'inserted_at' # oldest infractions first
+ else:
+ ordering = '-inserted_at' # newest infractions first
+
+ infraction_list = await self.bot.api_client.get(
+ 'bot/infractions/expanded',
+ params={
+ 'actor__id': str(actor.id),
+ 'ordering': ordering
+ }
+ )
+
+ formatted_infraction_count = self.format_infraction_count(len(infraction_list))
embed = discord.Embed(
- title=f"Infractions matching `{reason}` ({len(infraction_list)} total)",
+ title=f"Infractions by {actor} ({formatted_infraction_count} total)",
colour=discord.Colour.orange()
)
+
await self.send_infraction_list(ctx, embed, infraction_list)
# endregion
# region: Utility functions
+ @staticmethod
+ def format_infraction_count(infraction_count: int) -> str:
+ """
+ Returns a string-formatted infraction count.
+
+ API limits returned infractions to a maximum of 100, so if `infraction_count`
+ is 100 then we return `"100+"`. Otherwise, return `str(infraction_count)`.
+ """
+ if infraction_count == 100:
+ return "100+"
+ return str(infraction_count)
+
async def send_infraction_list(
self,
ctx: Context,
@@ -295,7 +373,9 @@ class ModManagement(commands.Cog):
active = infraction["active"]
user = infraction["user"]
expires_at = infraction["expires_at"]
- created = time.format_infraction(infraction["inserted_at"])
+ inserted_at = infraction["inserted_at"]
+ created = time.discord_timestamp(inserted_at)
+ dm_sent = infraction["dm_sent"]
# Format the user string.
if user_obj := self.bot.get_user(user["id"]):
@@ -307,25 +387,27 @@ class ModManagement(commands.Cog):
user_str = f"<@{user['id']}> ({name}#{user['discriminator']:04})"
if active:
- remaining = time.until_expiration(expires_at) or "Expired"
+ remaining = time.until_expiration(expires_at)
else:
remaining = "Inactive"
if expires_at is None:
duration = "*Permanent*"
else:
- date_from = datetime.fromtimestamp(
- float(time.DISCORD_TIMESTAMP_REGEX.match(created).group(1)),
- timezone.utc
- )
- date_to = dateutil.parser.isoparse(expires_at)
- duration = humanize_delta(relativedelta(date_to, date_from))
+ duration = time.humanize_delta(inserted_at, expires_at)
+
+ # Format `dm_sent`
+ if dm_sent is None:
+ dm_sent_text = "N/A"
+ else:
+ dm_sent_text = "Yes" if dm_sent else "No"
lines = textwrap.dedent(f"""
{"**===============**" if active else "==============="}
Status: {"__**Active**__" if active else "Inactive"}
User: {user_str}
Type: **{infraction["type"]}**
+ DM Sent: {dm_sent_text}
Shadow: {infraction["hidden"]}
Created: {created}
Expires: {remaining}
diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py
index 08c92b8f3..c4a7e5081 100644
--- a/bot/exts/moderation/infraction/superstarify.py
+++ b/bot/exts/moderation/infraction/superstarify.py
@@ -11,12 +11,13 @@ from discord.utils import escape_markdown
from bot import constants
from bot.bot import Bot
from bot.converters import Duration, Expiry
+from bot.decorators import ensure_future_timestamp
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction._scheduler import InfractionScheduler
from bot.log import get_logger
+from bot.utils import time
from bot.utils.members import get_or_fetch_member
from bot.utils.messages import format_user
-from bot.utils.time import format_infraction
log = get_logger(__name__)
NICKNAME_POLICY_URL = "https://pythondiscord.com/pages/rules/#nickname-policy"
@@ -57,32 +58,28 @@ class Superstarify(InfractionScheduler, Cog):
return
infraction = active_superstarifies[0]
- forced_nick = self.get_nick(infraction["id"], before.id)
+ infr_id = infraction["id"]
+
+ forced_nick = self.get_nick(infr_id, before.id)
if after.display_name == forced_nick:
return # Nick change was triggered by this event. Ignore.
+ reason = (
+ "You have tried to change your nickname on the **Python Discord** server "
+ f"from **{before.display_name}** to **{after.display_name}**, but as you "
+ "are currently in superstar-prison, you do not have permission to do so."
+ )
+
log.info(
f"{after.display_name} ({after.id}) tried to escape superstar prison. "
f"Changing the nick back to {before.display_name}."
)
await after.edit(
nick=forced_nick,
- reason=f"Superstarified member tried to escape the prison: {infraction['id']}"
- )
-
- notified = await _utils.notify_infraction(
- user=after,
- infr_type="Superstarify",
- expires_at=format_infraction(infraction["expires_at"]),
- reason=(
- "You have tried to change your nickname on the **Python Discord** server "
- f"from **{before.display_name}** to **{after.display_name}**, but as you "
- "are currently in superstar-prison, you do not have permission to do so."
- ),
- icon_url=_utils.INFRACTION_ICONS["superstar"][0]
+ reason=f"Superstarified member tried to escape the prison: {infr_id}"
)
- if not notified:
+ if not await _utils.notify_infraction(infraction, after, reason):
log.info("Failed to DM user about why they cannot change their nickname.")
@Cog.listener()
@@ -107,6 +104,7 @@ class Superstarify(InfractionScheduler, Cog):
await self.reapply_infraction(infraction, action)
@command(name="superstarify", aliases=("force_nick", "star", "starify", "superstar"))
+ @ensure_future_timestamp(timestamp_arg=3)
async def superstarify(
self,
ctx: Context,
@@ -150,7 +148,7 @@ class Superstarify(InfractionScheduler, Cog):
id_ = infraction["id"]
forced_nick = self.get_nick(id_, member.id)
- expiry_str = format_infraction(infraction["expires_at"])
+ expiry_str = time.discord_timestamp(infraction["expires_at"])
# Apply the infraction
async def action() -> None:
diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py
index 462f8533d..796c1f021 100644
--- a/bot/exts/moderation/modlog.py
+++ b/bot/exts/moderation/modlog.py
@@ -11,13 +11,13 @@ from deepdiff import DeepDiff
from discord import Colour, Message, Thread
from discord.abc import GuildChannel
from discord.ext.commands import Cog, Context
-from discord.utils import escape_markdown
+from discord.utils import escape_markdown, format_dt, snowflake_time
from bot.bot import Bot
from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles, URLs
from bot.log import get_logger
+from bot.utils import time
from bot.utils.messages import format_user
-from bot.utils.time import humanize_delta
log = get_logger(__name__)
@@ -41,7 +41,6 @@ class ModLog(Cog, name="ModLog"):
self.bot = bot
self._ignored = {event: [] for event in Event}
- self._cached_deletes = []
self._cached_edits = []
async def upload_log(
@@ -97,6 +96,7 @@ class ModLog(Cog, name="ModLog"):
footer: t.Optional[str] = None,
) -> Context:
"""Generate log embed and send to logging channel."""
+ await self.bot.wait_until_guild_available()
# Truncate string directly here to avoid removing newlines
embed = discord.Embed(
description=text[:4093] + "..." if len(text) > 4096 else text
@@ -116,7 +116,7 @@ class ModLog(Cog, name="ModLog"):
if ping_everyone:
if content:
- content = f"<@&{Roles.moderators}>\n{content}"
+ content = f"<@&{Roles.moderators}> {content}"
else:
content = f"<@&{Roles.moderators}>"
@@ -407,7 +407,7 @@ class ModLog(Cog, name="ModLog"):
now = datetime.now(timezone.utc)
difference = abs(relativedelta(now, member.created_at))
- message = format_user(member) + "\n\n**Account age:** " + humanize_delta(difference)
+ message = format_user(member) + "\n\n**Account age:** " + time.humanize_delta(difference)
if difference.days < 1 and difference.months < 1 and difference.years < 1: # New user account!
message = f"{Emojis.new} {message}"
@@ -552,29 +552,28 @@ class ModLog(Cog, name="ModLog"):
return channel.id in GuildConstant.modlog_blacklist
- @Cog.listener()
- async def on_message_delete(self, message: discord.Message) -> None:
- """Log message delete event to message change log."""
+ async def log_cached_deleted_message(self, message: discord.Message) -> None:
+ """
+ Log the message's details to message change log.
+
+ This is called when a cached message is deleted.
+ """
channel = message.channel
author = message.author
if self.is_message_blacklisted(message):
return
- self._cached_deletes.append(message.id)
-
if message.id in self._ignored[Event.message_delete]:
self._ignored[Event.message_delete].remove(message.id)
return
- if author.bot:
- return
-
if channel.category:
response = (
f"**Author:** {format_user(author)}\n"
f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{message.id}`\n"
+ f"**Sent at:** {format_dt(message.created_at)}\n"
f"[Jump to message]({message.jump_url})\n"
"\n"
)
@@ -583,6 +582,7 @@ class ModLog(Cog, name="ModLog"):
f"**Author:** {format_user(author)}\n"
f"**Channel:** #{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{message.id}`\n"
+ f"**Sent at:** {format_dt(message.created_at)}\n"
f"[Jump to message]({message.jump_url})\n"
"\n"
)
@@ -610,17 +610,15 @@ class ModLog(Cog, name="ModLog"):
channel_id=Channels.message_log
)
- @Cog.listener()
- async def on_raw_message_delete(self, event: discord.RawMessageDeleteEvent) -> None:
- """Log raw message delete event to message change log."""
- if self.is_channel_ignored(event.channel_id):
- return
-
- await asyncio.sleep(1) # Wait here in case the normal event was fired
+ async def log_uncached_deleted_message(self, event: discord.RawMessageDeleteEvent) -> None:
+ """
+ Log the message's details to message change log.
- if event.message_id in self._cached_deletes:
- # It was in the cache and the normal event was fired, so we can just ignore it
- self._cached_deletes.remove(event.message_id)
+ This is called when a message absent from the cache is deleted.
+ Hence, the message contents aren't logged.
+ """
+ await self.bot.wait_until_guild_available()
+ if self.is_channel_ignored(event.channel_id):
return
if event.message_id in self._ignored[Event.message_delete]:
@@ -633,6 +631,7 @@ class ModLog(Cog, name="ModLog"):
response = (
f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{event.message_id}`\n"
+ f"**Sent at:** {format_dt(snowflake_time(event.message_id))}\n"
"\n"
"This message was not cached, so the message content cannot be displayed."
)
@@ -640,6 +639,7 @@ class ModLog(Cog, name="ModLog"):
response = (
f"**Channel:** #{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{event.message_id}`\n"
+ f"**Sent at:** {format_dt(snowflake_time(event.message_id))}\n"
"\n"
"This message was not cached, so the message content cannot be displayed."
)
@@ -652,6 +652,14 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
+ async def on_raw_message_delete(self, event: discord.RawMessageDeleteEvent) -> None:
+ """Log message deletions to message change log."""
+ if event.cached_message is not None:
+ await self.log_cached_deleted_message(event.cached_message)
+ else:
+ await self.log_uncached_deleted_message(event)
+
+ @Cog.listener()
async def on_message_edit(self, msg_before: discord.Message, msg_after: discord.Message) -> None:
"""Log message edit event to message change log."""
if self.is_message_blacklisted(msg_before):
@@ -709,7 +717,7 @@ class ModLog(Cog, name="ModLog"):
# datetime as the baseline and create a human-readable delta between this edit event
# and the last time the message was edited
timestamp = msg_before.edited_at
- delta = humanize_delta(relativedelta(msg_after.edited_at, msg_before.edited_at))
+ delta = time.humanize_delta(msg_after.edited_at, msg_before.edited_at)
footer = f"Last edited {delta} ago"
else:
# Message was not previously edited, use the created_at datetime as the baseline, no
@@ -725,6 +733,10 @@ class ModLog(Cog, name="ModLog"):
@Cog.listener()
async def on_raw_message_edit(self, event: discord.RawMessageUpdateEvent) -> None:
"""Log raw message edit event to message change log."""
+ if event.guild_id is None:
+ return # ignore DM edits
+
+ await self.bot.wait_until_guild_available()
try:
channel = self.bot.get_channel(int(event.data["channel_id"]))
message = await channel.fetch_message(event.message_id)
@@ -773,6 +785,10 @@ class ModLog(Cog, name="ModLog"):
@Cog.listener()
async def on_thread_update(self, before: Thread, after: Thread) -> None:
"""Log thread archiving, un-archiving and name edits."""
+ if self.is_channel_ignored(after.id):
+ log.trace("Ignoring update of thread %s (%d)", after.mention, after.id)
+ return
+
if before.name != after.name:
await self.send_log_message(
Icons.hash_blurple,
@@ -809,6 +825,10 @@ class ModLog(Cog, name="ModLog"):
@Cog.listener()
async def on_thread_delete(self, thread: Thread) -> None:
"""Log thread deletion."""
+ if self.is_channel_ignored(thread.id):
+ log.trace("Ignoring deletion of thread %s (%d)", thread.mention, thread.id)
+ return
+
await self.send_log_message(
Icons.hash_red,
Colours.soft_red,
@@ -827,6 +847,10 @@ class ModLog(Cog, name="ModLog"):
if thread.me:
return
+ if self.is_channel_ignored(thread.id):
+ log.trace("Ignoring creation of thread %s (%d)", thread.mention, thread.id)
+ return
+
await self.send_log_message(
Icons.hash_green,
Colours.soft_green,
diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py
index f67d8f662..b5cd29b12 100644
--- a/bot/exts/moderation/modpings.py
+++ b/bot/exts/moderation/modpings.py
@@ -1,8 +1,9 @@
+import asyncio
import datetime
import arrow
from async_rediscache import RedisCache
-from dateutil.parser import isoparse
+from dateutil.parser import isoparse, parse as dateutil_parse
from discord import Embed, Member
from discord.ext.commands import Cog, Context, group, has_any_role
@@ -10,11 +11,13 @@ from bot.bot import Bot
from bot.constants import Colours, Emojis, Guild, Icons, MODERATION_ROLES, Roles
from bot.converters import Expiry
from bot.log import get_logger
-from bot.utils import scheduling
+from bot.utils import scheduling, time
from bot.utils.scheduling import Scheduler
log = get_logger(__name__)
+MAXIMUM_WORK_LIMIT = 16
+
class ModPings(Cog):
"""Commands for a moderator to turn moderator pings on and off."""
@@ -24,13 +27,23 @@ class ModPings(Cog):
# The cache's values are the times when the role should be re-applied to them, stored in ISO format.
pings_off_mods = RedisCache()
+ # RedisCache[discord.Member.id, 'start timestamp|total worktime in seconds']
+ # The cache's keys are mod's ID
+ # The cache's values are their pings on schedule timestamp and the total seconds (work time) until pings off
+ modpings_schedule = RedisCache()
+
def __init__(self, bot: Bot):
self.bot = bot
- self._role_scheduler = Scheduler(self.__class__.__name__)
+ self._role_scheduler = Scheduler("ModPingsOnOff")
+ self._modpings_scheduler = Scheduler("ModPingsSchedule")
self.guild = None
self.moderators_role = None
+ self.modpings_schedule_task = scheduling.create_task(
+ self.reschedule_modpings_schedule(),
+ event_loop=self.bot.loop
+ )
self.reschedule_task = scheduling.create_task(
self.reschedule_roles(),
name="mod-pings-reschedule",
@@ -61,6 +74,53 @@ class ModPings(Cog):
expiry = isoparse(pings_off[mod.id])
self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod))
+ async def reschedule_modpings_schedule(self) -> None:
+ """Reschedule moderators schedule ping."""
+ await self.bot.wait_until_guild_available()
+ schedule_cache = await self.modpings_schedule.to_dict()
+
+ log.info("Scheduling modpings schedule for applicable moderators found in cache.")
+ for mod_id, schedule in schedule_cache.items():
+ start_timestamp, work_time = schedule.split("|")
+ start = datetime.datetime.fromtimestamp(float(start_timestamp))
+
+ mod = await self.bot.fetch_user(mod_id)
+ self._modpings_scheduler.schedule_at(
+ start,
+ mod_id,
+ self.add_role_schedule(mod, work_time, start)
+ )
+
+ async def remove_role_schedule(self, mod: Member, work_time: int, schedule_start: datetime.datetime) -> None:
+ """Removes the moderator's role to the given moderator."""
+ log.trace(f"Removing moderator role from mod with ID {mod.id}")
+ await mod.remove_roles(self.moderators_role, reason="Moderator schedule time expired.")
+
+ # Remove the task before scheduling it again
+ self._modpings_scheduler.cancel(mod.id)
+
+ # Add the task again
+ log.trace(f"Adding mod pings schedule task again for mod with ID {mod.id}")
+ schedule_start += datetime.timedelta(days=1)
+ self._modpings_scheduler.schedule_at(
+ schedule_start,
+ mod.id,
+ self.add_role_schedule(mod, work_time, schedule_start)
+ )
+
+ async def add_role_schedule(self, mod: Member, work_time: int, schedule_start: datetime.datetime) -> None:
+ """Adds the moderator's role to the given moderator."""
+ # If the moderator has pings off, then skip adding role
+ if mod.id in await self.pings_off_mods.to_dict():
+ log.trace(f"Skipping adding moderator role to mod with ID {mod.id} - found in pings off cache.")
+ else:
+ log.trace(f"Applying moderator role to mod with ID {mod.id}")
+ await mod.add_roles(self.moderators_role, reason="Moderator scheduled time started!")
+
+ log.trace(f"Sleeping for {work_time} seconds, worktime for mod with ID {mod.id}")
+ await asyncio.sleep(work_time)
+ await self.remove_role_schedule(mod, work_time, schedule_start)
+
async def reapply_role(self, mod: Member) -> None:
"""Reapply the moderator's role to the given moderator."""
log.trace(f"Re-applying role to mod with ID {mod.id}.")
@@ -132,12 +192,66 @@ class ModPings(Cog):
await ctx.send(f"{Emojis.check_mark} Moderators role has been re-applied.")
+ @modpings_group.group(
+ name='schedule',
+ aliases=('s',),
+ invoke_without_command=True
+ )
+ @has_any_role(*MODERATION_ROLES)
+ async def schedule_modpings(self, ctx: Context, start: str, end: str) -> None:
+ """Schedule modpings role to be added at <start> and removed at <end> everyday at UTC time!"""
+ start, end = dateutil_parse(start), dateutil_parse(end)
+
+ if end < start:
+ end += datetime.timedelta(days=1)
+
+ if (end - start) > datetime.timedelta(hours=MAXIMUM_WORK_LIMIT):
+ await ctx.send(
+ f":x: {ctx.author.mention} You can't have the modpings role for"
+ f" more than {MAXIMUM_WORK_LIMIT} hours!"
+ )
+ return
+
+ if start < datetime.datetime.utcnow():
+ # The datetime has already gone for the day, so make it tomorrow
+ # otherwise the scheduler would schedule it immediately
+ start += datetime.timedelta(days=1)
+
+ work_time = (end - start).total_seconds()
+
+ await self.modpings_schedule.set(ctx.author.id, f"{start.timestamp()}|{work_time}")
+
+ if ctx.author.id in self._modpings_scheduler:
+ self._modpings_scheduler.cancel(ctx.author.id)
+
+ self._modpings_scheduler.schedule_at(
+ start,
+ ctx.author.id,
+ self.add_role_schedule(ctx.author, work_time, start)
+ )
+
+ await ctx.send(
+ f"{Emojis.ok_hand} {ctx.author.mention} Scheduled mod pings from "
+ f"{time.discord_timestamp(start, time.TimestampFormats.TIME)} to "
+ f"{time.discord_timestamp(end, time.TimestampFormats.TIME)}!"
+ )
+
+ @schedule_modpings.command(name='delete', aliases=('del', 'd'))
+ async def modpings_schedule_delete(self, ctx: Context) -> None:
+ """Delete your modpings schedule."""
+ self._modpings_scheduler.cancel(ctx.author.id)
+ await self.modpings_schedule.delete(ctx.author.id)
+ await ctx.send(f"{Emojis.ok_hand} {ctx.author.mention} Deleted your modpings schedule!")
+
def cog_unload(self) -> None:
"""Cancel role tasks when the cog unloads."""
log.trace("Cog unload: canceling role tasks.")
self.reschedule_task.cancel()
self._role_scheduler.cancel_all()
+ self.modpings_schedule_task.cancel()
+ self._modpings_scheduler.cancel_all()
+
def setup(bot: Bot) -> None:
"""Load the ModPings cog."""
diff --git a/bot/exts/moderation/slowmode.py b/bot/exts/moderation/slowmode.py
index 9583597e0..b6a771441 100644
--- a/bot/exts/moderation/slowmode.py
+++ b/bot/exts/moderation/slowmode.py
@@ -16,7 +16,7 @@ SLOWMODE_MAX_DELAY = 21600 # seconds
COMMONLY_SLOWMODED_CHANNELS = {
Channels.python_general: "python_general",
- Channels.discord_py: "discordpy",
+ Channels.discord_bots: "discord_bots",
Channels.off_topic_0: "ot0",
}
@@ -39,8 +39,7 @@ class Slowmode(Cog):
if channel is None:
channel = ctx.channel
- delay = relativedelta(seconds=channel.slowmode_delay)
- humanized_delay = time.humanize_delta(delay)
+ humanized_delay = time.humanize_delta(seconds=channel.slowmode_delay)
await ctx.send(f'The slowmode delay for {channel.mention} is {humanized_delay}.')
diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py
index 99bbd8721..985cc6eb1 100644
--- a/bot/exts/moderation/stream.py
+++ b/bot/exts/moderation/stream.py
@@ -14,9 +14,8 @@ from bot.constants import (
from bot.converters import Expiry
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling
+from bot.utils import scheduling, time
from bot.utils.members import get_or_fetch_member
-from bot.utils.time import discord_timestamp, format_infraction_with_duration
log = get_logger(__name__)
@@ -131,11 +130,15 @@ class Stream(commands.Cog):
await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted")
- await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {discord_timestamp(duration)}.")
+ await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {time.discord_timestamp(duration)}.")
# Convert here for nicer logging
- revoke_time = format_infraction_with_duration(str(duration))
- log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.")
+ humanized_duration = time.humanize_delta(duration, arrow.utcnow(), max_units=2)
+ end_time = duration.strftime("%Y-%m-%d %H:%M:%S")
+ log.debug(
+ f"Successfully gave {member} ({member.id}) permission "
+ f"to stream for {humanized_duration} (until {end_time})."
+ )
@commands.command(aliases=("pstream",))
@commands.has_any_role(*MODERATION_ROLES)
diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py
index ed5571d2a..37338d19c 100644
--- a/bot/exts/moderation/verification.py
+++ b/bot/exts/moderation/verification.py
@@ -5,9 +5,7 @@ from discord.ext.commands import Cog, Context, command, has_any_role
from bot import constants
from bot.bot import Bot
-from bot.decorators import in_whitelist
from bot.log import get_logger
-from bot.utils.checks import InWhitelistCheckFailure
log = get_logger(__name__)
@@ -29,11 +27,11 @@ You can find a copy of our rules for reference at <https://pythondiscord.com/pag
Additionally, if you'd like to receive notifications for the announcements \
we post in <#{constants.Channels.announcements}>
-from time to time, you can send `!subscribe` to <#{constants.Channels.bot_commands}> at any time \
+from time to time, you can send `{constants.Bot.prefix}subscribe` to <#{constants.Channels.bot_commands}> at any time \
to assign yourself the **Announcements** role. We'll mention this role every time we make an announcement.
-If you'd like to unsubscribe from the announcement notifications, simply send `!unsubscribe` to \
-<#{constants.Channels.bot_commands}>.
+If you'd like to unsubscribe from the announcement notifications, simply send `{constants.Bot.prefix}subscribe` to \
+<#{constants.Channels.bot_commands}> and click the role again!.
To introduce you to our community, we've made the following video:
https://youtu.be/ZH26PuX3re0
@@ -61,11 +59,9 @@ async def safe_dm(coro: t.Coroutine) -> None:
class Verification(Cog):
"""
- User verification and role management.
+ User verification.
Statistics are collected in the 'verification.' namespace.
-
- Additionally, this cog offers the !subscribe and !unsubscribe commands,
"""
def __init__(self, bot: Bot) -> None:
@@ -108,67 +104,8 @@ class Verification(Cog):
log.exception("DM dispatch failed on unexpected error code")
# endregion
- # region: subscribe commands
-
- @command(name='subscribe')
- @in_whitelist(channels=(constants.Channels.bot_commands,))
- async def subscribe_command(self, ctx: Context, *_) -> None: # We don't actually care about the args
- """Subscribe to announcement notifications by assigning yourself the role."""
- has_role = False
-
- for role in ctx.author.roles:
- if role.id == constants.Roles.announcements:
- has_role = True
- break
-
- if has_role:
- await ctx.send(f"{ctx.author.mention} You're already subscribed!")
- return
-
- log.debug(f"{ctx.author} called !subscribe. Assigning the 'Announcements' role.")
- await ctx.author.add_roles(discord.Object(constants.Roles.announcements), reason="Subscribed to announcements")
-
- log.trace(f"Deleting the message posted by {ctx.author}.")
-
- await ctx.send(
- f"{ctx.author.mention} Subscribed to <#{constants.Channels.announcements}> notifications.",
- )
-
- @command(name='unsubscribe')
- @in_whitelist(channels=(constants.Channels.bot_commands,))
- async def unsubscribe_command(self, ctx: Context, *_) -> None: # We don't actually care about the args
- """Unsubscribe from announcement notifications by removing the role from yourself."""
- has_role = False
-
- for role in ctx.author.roles:
- if role.id == constants.Roles.announcements:
- has_role = True
- break
-
- if not has_role:
- await ctx.send(f"{ctx.author.mention} You're already unsubscribed!")
- return
-
- log.debug(f"{ctx.author} called !unsubscribe. Removing the 'Announcements' role.")
- await ctx.author.remove_roles(
- discord.Object(constants.Roles.announcements), reason="Unsubscribed from announcements"
- )
-
- log.trace(f"Deleting the message posted by {ctx.author}.")
-
- await ctx.send(
- f"{ctx.author.mention} Unsubscribed from <#{constants.Channels.announcements}> notifications."
- )
-
- # endregion
# region: miscellaneous
- # This cannot be static (must have a __func__ attribute).
- async def cog_command_error(self, ctx: Context, error: Exception) -> None:
- """Check for & ignore any InWhitelistCheckFailure."""
- if isinstance(error, InWhitelistCheckFailure):
- error.handled = True
-
@command(name='verify')
@has_any_role(*constants.MODERATION_ROLES)
async def perform_manual_verification(self, ctx: Context, user: discord.Member) -> None:
diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py
index 31799ec73..d6b8f1239 100644
--- a/bot/exts/moderation/voice_gate.py
+++ b/bot/exts/moderation/voice_gate.py
@@ -10,7 +10,7 @@ from discord.ext.commands import Cog, Context, command
from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, Event, MODERATION_ROLES, Roles, VoiceGate as GateConf
+from bot.constants import Channels, MODERATION_ROLES, Roles, VoiceGate as GateConf
from bot.decorators import has_no_roles, in_whitelist
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
@@ -30,7 +30,7 @@ FAILED_MESSAGE = (
MESSAGE_FIELD_MAP = {
"joined_at": f"have been on the server for less than {GateConf.minimum_days_member} days",
- "voice_banned": "have an active voice ban infraction",
+ "voice_gate_blocked": "have an active voice infraction",
"total_messages": f"have sent less than {GateConf.minimum_messages} messages",
"activity_blocks": f"have been active for fewer than {GateConf.minimum_activity_blocks} ten-minute blocks",
}
@@ -170,9 +170,10 @@ class VoiceGate(Cog):
ctx.author.joined_at > arrow.utcnow() - timedelta(days=GateConf.minimum_days_member)
),
"total_messages": data["total_messages"] < GateConf.minimum_messages,
- "voice_banned": data["voice_banned"],
- "activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks
+ "voice_gate_blocked": data["voice_gate_blocked"],
+ "activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks,
}
+
failed = any(checks.values())
failed_reasons = [MESSAGE_FIELD_MAP[key] for key, value in checks.items() if value is True]
[self.bot.stats.incr(f"voice_gate.failed.{key}") for key, value in checks.items() if value is True]
@@ -190,7 +191,6 @@ class VoiceGate(Cog):
await ctx.channel.send(ctx.author.mention, embed=embed)
return
- self.mod_log.ignore(Event.member_update, ctx.author.id)
embed = discord.Embed(
title="Voice gate passed",
description="You have been granted permission to use voice channels in Python Discord.",
@@ -237,10 +237,6 @@ class VoiceGate(Cog):
log.trace(f"Excluding moderator message {message.id} from deletion in #{message.channel}.")
return
- # Ignore deleted voice verification messages
- if ctx.command is not None and ctx.command.name == "voice_verify":
- self.mod_log.ignore(Event.message_delete, message.id)
-
with suppress(discord.NotFound):
await message.delete()
diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py
index 34d445912..ee9b6ba45 100644
--- a/bot/exts/moderation/watchchannels/_watchchannel.py
+++ b/bot/exts/moderation/watchchannels/_watchchannel.py
@@ -18,9 +18,8 @@ from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE
from bot.exts.moderation.modlog import ModLog
from bot.log import CustomLogger, get_logger
from bot.pagination import LinePaginator
-from bot.utils import CogABCMeta, messages, scheduling
+from bot.utils import CogABCMeta, messages, scheduling, time
from bot.utils.members import get_or_fetch_member
-from bot.utils.time import get_time_delta
log = get_logger(__name__)
@@ -286,7 +285,7 @@ class WatchChannel(metaclass=CogABCMeta):
actor = actor.display_name if actor else self.watched_users[user_id]['actor']
inserted_at = self.watched_users[user_id]['inserted_at']
- time_delta = get_time_delta(inserted_at)
+ time_delta = time.format_relative(inserted_at)
reason = self.watched_users[user_id]['reason']
@@ -360,7 +359,7 @@ class WatchChannel(metaclass=CogABCMeta):
if member:
line += f" ({member.name}#{member.discriminator})"
inserted_at = user_data['inserted_at']
- line += f", added {get_time_delta(inserted_at)}"
+ line += f", added {time.format_relative(inserted_at)}"
if not member: # Cross off users who left the server.
line = f"~~{line}~~"
list_data["info"][user_id] = line
diff --git a/bot/exts/moderation/watchchannels/bigbrother.py b/bot/exts/moderation/watchchannels/bigbrother.py
index ab37b1b80..31b106a20 100644
--- a/bot/exts/moderation/watchchannels/bigbrother.py
+++ b/bot/exts/moderation/watchchannels/bigbrother.py
@@ -22,7 +22,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):
destination=Channels.big_brother_logs,
webhook_id=Webhooks.big_brother,
api_endpoint='bot/infractions',
- api_default_params={'active': 'true', 'type': 'watch', 'ordering': '-inserted_at'},
+ api_default_params={'active': 'true', 'type': 'watch', 'ordering': '-inserted_at', 'limit': 10_000},
logger=log
)
diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py
index 2fafaec97..0554bf37a 100644
--- a/bot/exts/recruitment/talentpool/_cog.py
+++ b/bot/exts/recruitment/talentpool/_cog.py
@@ -17,7 +17,6 @@ from bot.log import get_logger
from bot.pagination import LinePaginator
from bot.utils import scheduling, time
from bot.utils.members import get_or_fetch_member
-from bot.utils.time import get_time_delta
AUTOREVIEW_ENABLED_KEY = "autoreview_enabled"
REASON_MAX_CHARS = 1000
@@ -181,7 +180,7 @@ class TalentPool(Cog, name="Talentpool"):
if member:
line += f" ({member.name}#{member.discriminator})"
inserted_at = user_data['inserted_at']
- line += f", added {get_time_delta(inserted_at)}"
+ line += f", added {time.format_relative(inserted_at)}"
if not member: # Cross off users who left the server.
line = f"~~{line}~~"
if user_data['reviewed']:
@@ -260,7 +259,7 @@ class TalentPool(Cog, name="Talentpool"):
return
if len(reason) > REASON_MAX_CHARS:
- await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.")
+ await ctx.send(f":x: Maximum allowed characters for the reason is {REASON_MAX_CHARS}.")
return
# Manual request with `raise_for_status` as False because we want the actual response
@@ -445,7 +444,7 @@ class TalentPool(Cog, name="Talentpool"):
async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None:
"""Edits the unnominate reason for the nomination with the given `id`."""
if len(reason) > REASON_MAX_CHARS:
- await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.")
+ await ctx.send(f":x: Maximum allowed characters for the end reason is {REASON_MAX_CHARS}.")
return
try:
@@ -483,12 +482,9 @@ class TalentPool(Cog, name="Talentpool"):
@has_any_role(*MODERATION_ROLES)
async def get_review(self, ctx: Context, user_id: int) -> None:
"""Get the user's review as a markdown file."""
- review = (await self.reviewer.make_review(user_id))[0]
- if review:
- file = discord.File(StringIO(review), f"{user_id}_review.md")
- await ctx.send(file=file)
- else:
- await ctx.send(f"There doesn't appear to be an active nomination for {user_id}")
+ review, _, _ = await self.reviewer.make_review(user_id)
+ file = discord.File(StringIO(review), f"{user_id}_review.md")
+ await ctx.send(file=file)
@nomination_group.command(aliases=('review',))
@has_any_role(*MODERATION_ROLES)
@@ -501,7 +497,7 @@ class TalentPool(Cog, name="Talentpool"):
await ctx.message.add_reaction(Emojis.check_mark)
@Cog.listener()
- async def on_member_ban(self, guild: Guild, user: Union[MemberOrUser]) -> None:
+ async def on_member_ban(self, guild: Guild, user: MemberOrUser) -> None:
"""Remove `user` from the talent pool after they are banned."""
await self.end_nomination(user.id, "User was banned.")
@@ -516,6 +512,9 @@ class TalentPool(Cog, name="Talentpool"):
if payload.channel_id != Channels.nomination_voting:
return
+ if payload.user_id == self.bot.user.id:
+ return
+
message: PartialMessage = self.bot.get_channel(payload.channel_id).get_partial_message(payload.message_id)
emoji = str(payload.emoji)
@@ -562,7 +561,7 @@ class TalentPool(Cog, name="Talentpool"):
actor = await get_or_fetch_member(guild, actor_id)
reason = site_entry["reason"] or "*None*"
- created = time.format_infraction(site_entry["inserted_at"])
+ created = time.discord_timestamp(site_entry["inserted_at"])
entries.append(
f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}"
)
@@ -571,7 +570,7 @@ class TalentPool(Cog, name="Talentpool"):
active = nomination_object["active"]
- start_date = time.format_infraction(nomination_object["inserted_at"])
+ start_date = time.discord_timestamp(nomination_object["inserted_at"])
if active:
lines = textwrap.dedent(
f"""
@@ -585,7 +584,7 @@ class TalentPool(Cog, name="Talentpool"):
"""
)
else:
- end_date = time.format_infraction(nomination_object["ended_at"])
+ end_date = time.discord_timestamp(nomination_object["ended_at"])
lines = textwrap.dedent(
f"""
===============
diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py
index d880c524c..b4d177622 100644
--- a/bot/exts/recruitment/talentpool/_review.py
+++ b/bot/exts/recruitment/talentpool/_review.py
@@ -10,17 +10,17 @@ from typing import List, Optional, Union
import arrow
from dateutil.parser import isoparse
-from discord import Embed, Emoji, Member, Message, NoMoreItems, PartialMessage, TextChannel
+from discord import Embed, Emoji, Member, Message, NoMoreItems, NotFound, PartialMessage, TextChannel
from discord.ext.commands import Context
from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, Colours, Emojis, Guild
+from bot.constants import Channels, Colours, Emojis, Guild, Roles
from bot.log import get_logger
+from bot.utils import time
from bot.utils.members import get_or_fetch_member
from bot.utils.messages import count_unique_users_reaction, pin_no_system_message
from bot.utils.scheduling import Scheduler
-from bot.utils.time import get_time_delta, time_since
if typing.TYPE_CHECKING:
from bot.exts.recruitment.talentpool._cog import TalentPool
@@ -36,9 +36,8 @@ MAX_MESSAGE_SIZE = 2000
MAX_EMBED_SIZE = 4000
# Regex for finding the first message of a nomination, and extracting the nominee.
-# Historic nominations will have 2 role mentions at the start, new ones won't, optionally match for this.
NOMINATION_MESSAGE_REGEX = re.compile(
- r"(?:<@&\d+> <@&\d+>\n)*?<@!?(\d+?)> \(.+#\d{4}\) for Helper!\n\n\*\*Nominated by:\*\*",
+ r"<@!?(\d+)> \(.+#\d{4}\) for Helper!\n\n",
re.MULTILINE
)
@@ -78,14 +77,14 @@ class Reviewer:
async def post_review(self, user_id: int, update_database: bool) -> None:
"""Format the review of a user and post it to the nomination voting channel."""
- review, reviewed_emoji = await self.make_review(user_id)
- if not review:
+ review, reviewed_emoji, nominee = await self.make_review(user_id)
+ if not nominee:
return
guild = self.bot.get_guild(Guild.id)
channel = guild.get_channel(Channels.nomination_voting)
- log.trace(f"Posting the review of {user_id}")
+ log.trace(f"Posting the review of {nominee} ({nominee.id})")
messages = await self._bulk_send(channel, review)
await pin_no_system_message(messages[0])
@@ -95,12 +94,17 @@ class Reviewer:
for reaction in (reviewed_emoji, "\N{THUMBS UP SIGN}", "\N{THUMBS DOWN SIGN}"):
await last_message.add_reaction(reaction)
+ thread = await last_message.create_thread(
+ name=f"Nomination - {nominee}",
+ )
+ await thread.send(fr"<@&{Roles.mod_team}> <@&{Roles.admins}>")
+
if update_database:
nomination = self._pool.cache.get(user_id)
await self.bot.api_client.patch(f"bot/nominations/{nomination['id']}", json={"reviewed": True})
- async def make_review(self, user_id: int) -> typing.Tuple[str, Optional[Emoji]]:
- """Format a generic review of a user and return it with the reviewed emoji."""
+ async def make_review(self, user_id: int) -> typing.Tuple[str, Optional[Emoji], Optional[Member]]:
+ """Format a generic review of a user and return it with the reviewed emoji and the user themselves."""
log.trace(f"Formatting the review of {user_id}")
# Since `cache` is a defaultdict, we should take care
@@ -110,17 +114,17 @@ class Reviewer:
nomination = self._pool.cache.get(user_id)
if not nomination:
log.trace(f"There doesn't appear to be an active nomination for {user_id}")
- return "", None
+ return f"There doesn't appear to be an active nomination for {user_id}", None, None
guild = self.bot.get_guild(Guild.id)
- member = await get_or_fetch_member(guild, user_id)
+ nominee = await get_or_fetch_member(guild, user_id)
- if not member:
+ if not nominee:
return (
f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server :pensive:"
- ), None
+ ), None, None
- opening = f"{member.mention} ({member}) for Helper!"
+ opening = f"{nominee.mention} ({nominee}) for Helper!"
current_nominations = "\n\n".join(
f"**<@{entry['actor']}>:** {entry['reason'] or '*no reason given*'}"
@@ -128,7 +132,7 @@ class Reviewer:
)
current_nominations = f"**Nominated by:**\n{current_nominations}"
- review_body = await self._construct_review_body(member)
+ review_body = await self._construct_review_body(nominee)
reviewed_emoji = self._random_ducky(guild)
vote_request = (
@@ -138,7 +142,7 @@ class Reviewer:
)
review = "\n\n".join((opening, current_nominations, review_body, vote_request))
- return review, reviewed_emoji
+ return review, reviewed_emoji, nominee
async def archive_vote(self, message: PartialMessage, passed: bool) -> None:
"""Archive this vote to #nomination-archive."""
@@ -210,8 +214,21 @@ class Reviewer:
colour=colour
))
+ # Thread channel IDs are the same as the message ID of the parent message.
+ nomination_thread = message.guild.get_thread(message.id)
+ if not nomination_thread:
+ try:
+ nomination_thread = await message.guild.fetch_channel(message.id)
+ except NotFound:
+ log.warning(f"Could not find a thread linked to {message.channel.id}-{message.id}")
+ return
+
for message_ in messages:
- await message_.delete()
+ with contextlib.suppress(NotFound):
+ await message_.delete()
+
+ with contextlib.suppress(NotFound):
+ await nomination_thread.edit(archived=True)
async def _construct_review_body(self, member: Member) -> str:
"""Formats the body of the nomination, with details of activity, infractions, and previous nominations."""
@@ -256,7 +273,7 @@ class Reviewer:
last_channel = user_activity["top_channel_activity"][-1]
channels += f", and {last_channel[1]} in {last_channel[0]}"
- joined_at_formatted = time_since(member.joined_at)
+ joined_at_formatted = time.format_relative(member.joined_at)
review = (
f"{member.name} joined the server **{joined_at_formatted}**"
f" and has **{messages} messages**{channels}."
@@ -304,7 +321,7 @@ class Reviewer:
infractions += ", with the last infraction issued "
# Infractions were ordered by time since insertion descending.
- infractions += get_time_delta(infraction_list[0]['inserted_at'])
+ infractions += time.format_relative(infraction_list[0]['inserted_at'])
return f"They have {infractions}."
@@ -348,7 +365,7 @@ class Reviewer:
nomination_times = f"{num_entries} times" if num_entries > 1 else "once"
rejection_times = f"{len(history)} times" if len(history) > 1 else "once"
- end_time = time_since(isoparse(history[0]['ended_at']))
+ end_time = time.format_relative(history[0]['ended_at'])
review = (
f"They were nominated **{nomination_times}** before"
@@ -360,10 +377,10 @@ class Reviewer:
@staticmethod
def _random_ducky(guild: Guild) -> Union[Emoji, str]:
- """Picks a random ducky emoji. If no duckies found returns :eyes:."""
+ """Picks a random ducky emoji. If no duckies found returns 👀."""
duckies = [emoji for emoji in guild.emojis if emoji.name.startswith("ducky")]
if not duckies:
- return ":eyes:"
+ return "\N{EYES}"
return random.choice(duckies)
@staticmethod
diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py
index 788692777..8f0094bc9 100644
--- a/bot/exts/utils/bot.py
+++ b/bot/exts/utils/bot.py
@@ -1,7 +1,6 @@
-from contextlib import suppress
from typing import Optional
-from discord import Embed, Forbidden, TextChannel, Thread
+from discord import Embed, TextChannel
from discord.ext.commands import Cog, Context, command, group, has_any_role
from bot.bot import Bot
@@ -17,20 +16,6 @@ class BotCog(Cog, name="Bot"):
def __init__(self, bot: Bot):
self.bot = bot
- @Cog.listener()
- async def on_thread_join(self, thread: Thread) -> None:
- """
- Try to join newly created threads.
-
- Despite the event name being misleading, this is dispatched when new threads are created.
- """
- if thread.me:
- # We have already joined this thread
- return
-
- with suppress(Forbidden):
- await thread.join()
-
@group(invoke_without_command=True, name="bot", hidden=True)
async def botinfo_group(self, ctx: Context) -> None:
"""Bot informational commands."""
diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py
index 86e4505fa..ad82d49c9 100644
--- a/bot/exts/utils/reminders.py
+++ b/bot/exts/utils/reminders.py
@@ -13,13 +13,12 @@ from bot.constants import Guild, Icons, MODERATION_ROLES, POSITIVE_REPLIES, Role
from bot.converters import Duration, UnambiguousUser
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling
+from bot.utils import scheduling, time
from bot.utils.checks import has_any_role_check, has_no_roles_check
from bot.utils.lock import lock_arg
from bot.utils.members import get_or_fetch_member
from bot.utils.messages import send_denial
from bot.utils.scheduling import Scheduler
-from bot.utils.time import TimestampFormats, discord_timestamp
log = get_logger(__name__)
@@ -67,20 +66,19 @@ class Reminders(Cog):
else:
self.schedule_reminder(reminder)
- def ensure_valid_reminder(self, reminder: dict) -> t.Tuple[bool, discord.User, discord.TextChannel]:
- """Ensure reminder author and channel can be fetched otherwise delete the reminder."""
- user = self.bot.get_user(reminder['author'])
+ def ensure_valid_reminder(self, reminder: dict) -> t.Tuple[bool, discord.TextChannel]:
+ """Ensure reminder channel can be fetched otherwise delete the reminder."""
channel = self.bot.get_channel(reminder['channel_id'])
is_valid = True
- if not user or not channel:
+ if not channel:
is_valid = False
log.info(
f"Reminder {reminder['id']} invalid: "
- f"User {reminder['author']}={user}, Channel {reminder['channel_id']}={channel}."
+ f"Channel {reminder['channel_id']}={channel}."
)
scheduling.create_task(self.bot.api_client.delete(f"bot/reminders/{reminder['id']}"))
- return is_valid, user, channel
+ return is_valid, channel
@staticmethod
async def _send_confirmation(
@@ -169,9 +167,9 @@ class Reminders(Cog):
self.schedule_reminder(reminder)
@lock_arg(LOCK_NAMESPACE, "reminder", itemgetter("id"), raise_error=True)
- async def send_reminder(self, reminder: dict, expected_time: datetime = None) -> None:
+ async def send_reminder(self, reminder: dict, expected_time: t.Optional[time.Timestamp] = None) -> None:
"""Send the reminder."""
- is_valid, user, channel = self.ensure_valid_reminder(reminder)
+ is_valid, channel = self.ensure_valid_reminder(reminder)
if not is_valid:
# No need to cancel the task too; it'll simply be done once this coroutine returns.
return
@@ -207,14 +205,14 @@ class Reminders(Cog):
f"There was an error when trying to reply to a reminder invocation message, {e}, "
"fall back to using jump_url"
)
- await channel.send(content=f"{user.mention} {additional_mentions}", embed=embed)
+ await channel.send(content=f"<@{reminder['author']}> {additional_mentions}", embed=embed)
log.debug(f"Deleting reminder #{reminder['id']} (the user has been reminded).")
await self.bot.api_client.delete(f"bot/reminders/{reminder['id']}")
@group(name="remind", aliases=("reminder", "reminders", "remindme"), invoke_without_command=True)
async def remind_group(
- self, ctx: Context, mentions: Greedy[ReminderMention], expiration: Duration, *, content: str
+ self, ctx: Context, mentions: Greedy[ReminderMention], expiration: Duration, *, content: t.Optional[str] = None
) -> None:
"""
Commands for managing your reminders.
@@ -234,7 +232,7 @@ class Reminders(Cog):
@remind_group.command(name="new", aliases=("add", "create"))
async def new_reminder(
- self, ctx: Context, mentions: Greedy[ReminderMention], expiration: Duration, *, content: str
+ self, ctx: Context, mentions: Greedy[ReminderMention], expiration: Duration, *, content: t.Optional[str] = None
) -> None:
"""
Set yourself a simple reminder.
@@ -283,6 +281,20 @@ class Reminders(Cog):
mention_ids = [mention.id for mention in mentions]
+ # If `content` isn't provided then we try to get message content of a replied message
+ if not content:
+ if reference := ctx.message.reference:
+ if isinstance((resolved_message := reference.resolved), discord.Message):
+ content = resolved_message.content
+ # If we weren't able to get the content of a replied message
+ if content is None:
+ await send_denial(ctx, "Your reminder must have a content and/or reply to a message.")
+ return
+
+ # If the replied message has no content (e.g. only attachments/embeds)
+ if content == "":
+ content = "See referenced message."
+
# Now we can attempt to actually set the reminder.
reminder = await self.bot.api_client.post(
'bot/reminders',
@@ -296,7 +308,8 @@ class Reminders(Cog):
}
)
- mention_string = f"Your reminder will arrive on {discord_timestamp(expiration, TimestampFormats.DAY_TIME)}"
+ formatted_time = time.discord_timestamp(expiration, time.TimestampFormats.DAY_TIME)
+ mention_string = f"Your reminder will arrive on {formatted_time}"
if mentions:
mention_string += f" and will mention {len(mentions)} other(s)"
@@ -333,8 +346,7 @@ class Reminders(Cog):
for content, remind_at, id_, mentions in reminders:
# Parse and humanize the time, make it pretty :D
- remind_datetime = isoparse(remind_at)
- time = discord_timestamp(remind_datetime, TimestampFormats.RELATIVE)
+ expiry = time.format_relative(remind_at)
mentions = ", ".join([
# Both Role and User objects have the `name` attribute
@@ -343,7 +355,7 @@ class Reminders(Cog):
mention_string = f"\n**Mentions:** {mentions}" if mentions else ""
text = textwrap.dedent(f"""
- **Reminder #{id_}:** *expires {time}* (ID: {id_}){mention_string}
+ **Reminder #{id_}:** *expires {expiry}* (ID: {id_}){mention_string}
{content}
""").strip()
diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py
index fbfc58d0b..3c1009d2a 100644
--- a/bot/exts/utils/snekbox.py
+++ b/bot/exts/utils/snekbox.py
@@ -2,13 +2,14 @@ import asyncio
import contextlib
import datetime
import re
-import textwrap
from functools import partial
from signal import Signals
+from textwrap import dedent
from typing import Optional, Tuple
-from discord import HTTPException, Message, NotFound, Reaction, User
-from discord.ext.commands import Cog, Context, command, guild_only
+from botcore.regex import FORMATTED_CODE_REGEX, RAW_CODE_REGEX
+from discord import AllowedMentions, HTTPException, Message, NotFound, Reaction, User
+from discord.ext.commands import Cog, Command, Context, Converter, command, guild_only
from bot.bot import Bot
from bot.constants import Categories, Channels, Roles, URLs
@@ -20,33 +21,97 @@ from bot.utils.messages import wait_for_deletion
log = get_logger(__name__)
ESCAPE_REGEX = re.compile("[`\u202E\u200B]{3,}")
-FORMATTED_CODE_REGEX = re.compile(
- r"(?P<delim>(?P<block>```)|``?)" # code delimiter: 1-3 backticks; (?P=block) only matches if it's a block
- r"(?(block)(?:(?P<lang>[a-z]+)\n)?)" # if we're in a block, match optional language (only letters plus newline)
- r"(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code
- r"(?P<code>.*?)" # extract all code inside the markup
- r"\s*" # any more whitespace before the end of the code markup
- r"(?P=delim)", # match the exact same delimiter from the start again
- re.DOTALL | re.IGNORECASE # "." also matches newlines, case insensitive
-)
-RAW_CODE_REGEX = re.compile(
- r"^(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code
- r"(?P<code>.*?)" # extract all the rest as code
- r"\s*$", # any trailing whitespace until the end of the string
- re.DOTALL # "." also matches newlines
-)
+
+# The timeit command should only output the very last line, so all other output should be suppressed.
+# This will be used as the setup code along with any setup code provided.
+TIMEIT_SETUP_WRAPPER = """
+import atexit
+import sys
+from collections import deque
+
+if not hasattr(sys, "_setup_finished"):
+ class Writer(deque):
+ '''A single-item deque wrapper for sys.stdout that will return the last line when read() is called.'''
+
+ def __init__(self):
+ super().__init__(maxlen=1)
+
+ def write(self, string):
+ '''Append the line to the queue if it is not empty.'''
+ if string.strip():
+ self.append(string)
+
+ def read(self):
+ '''This method will be called when print() is called.
+
+ The queue is emptied as we don't need the output later.
+ '''
+ return self.pop()
+
+ def flush(self):
+ '''This method will be called eventually, but we don't need to do anything here.'''
+ pass
+
+ sys.stdout = Writer()
+
+ def print_last_line():
+ if sys.stdout: # If the deque is empty (i.e. an error happened), calling read() will raise an error
+ # Use sys.__stdout__ here because sys.stdout is set to a Writer() instance
+ print(sys.stdout.read(), file=sys.__stdout__)
+
+ atexit.register(print_last_line) # When exiting, print the last line (hopefully it will be the timeit output)
+ sys._setup_finished = None
+{setup}
+"""
MAX_PASTE_LEN = 10000
-# `!eval` command whitelists and blacklists.
-NO_EVAL_CHANNELS = (Channels.python_general,)
-NO_EVAL_CATEGORIES = ()
-EVAL_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners)
+# The Snekbox commands' whitelists and blacklists.
+NO_SNEKBOX_CHANNELS = (Channels.python_general,)
+NO_SNEKBOX_CATEGORIES = ()
+SNEKBOX_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners)
SIGKILL = 9
-REEVAL_EMOJI = '\U0001f501' # :repeat:
-REEVAL_TIMEOUT = 30
+REDO_EMOJI = '\U0001f501' # :repeat:
+REDO_TIMEOUT = 30
+
+
+class CodeblockConverter(Converter):
+ """Attempts to extract code from a codeblock, if provided."""
+
+ @classmethod
+ async def convert(cls, ctx: Context, code: str) -> list[str]:
+ """
+ Extract code from the Markdown, format it, and insert it into the code template.
+
+ If there is any code block, ignore text outside the code block.
+ Use the first code block, but prefer a fenced code block.
+ If there are several fenced code blocks, concatenate only the fenced code blocks.
+
+ Return a list of code blocks if any, otherwise return a list with a single string of code.
+ """
+ if match := list(FORMATTED_CODE_REGEX.finditer(code)):
+ blocks = [block for block in match if block.group("block")]
+
+ if len(blocks) > 1:
+ codeblocks = [block.group("code") for block in blocks]
+ info = "several code blocks"
+ else:
+ match = match[0] if len(blocks) == 0 else blocks[0]
+ code, block, lang, delim = match.group("code", "block", "lang", "delim")
+ codeblocks = [dedent(code)]
+ if block:
+ info = (f"'{lang}' highlighted" if lang else "plain") + " code block"
+ else:
+ info = f"{delim}-enclosed inline code"
+ else:
+ codeblocks = [dedent(RAW_CODE_REGEX.fullmatch(code).group("code"))]
+ info = "unformatted or badly formatted code"
+
+ code = "\n".join(codeblocks)
+ log.trace(f"Extracted {info} for evaluation:\n{code}")
+ return codeblocks
class Snekbox(Cog):
@@ -56,15 +121,19 @@ class Snekbox(Cog):
self.bot = bot
self.jobs = {}
- async def post_eval(self, code: str) -> dict:
+ async def post_job(self, code: str, *, args: Optional[list[str]] = None) -> dict:
"""Send a POST request to the Snekbox API to evaluate code and return the results."""
url = URLs.snekbox_eval_api
data = {"input": code}
+
+ if args is not None:
+ data["args"] = args
+
async with self.bot.http_session.post(url, json=data, raise_for_status=True) as resp:
return await resp.json()
async def upload_output(self, output: str) -> Optional[str]:
- """Upload the eval output to a paste service and return a URL to it if successful."""
+ """Upload the job's output to a paste service and return a URL to it if successful."""
log.trace("Uploading full output to paste service...")
if len(output) > MAX_PASTE_LEN:
@@ -73,49 +142,37 @@ class Snekbox(Cog):
return await send_to_paste_service(output, extension="txt")
@staticmethod
- def prepare_input(code: str) -> str:
+ def prepare_timeit_input(codeblocks: list[str]) -> tuple[str, list[str]]:
"""
- Extract code from the Markdown, format it, and insert it into the code template.
+ Join the codeblocks into a single string, then return the code and the arguments in a tuple.
- If there is any code block, ignore text outside the code block.
- Use the first code block, but prefer a fenced code block.
- If there are several fenced code blocks, concatenate only the fenced code blocks.
+ If there are multiple codeblocks, insert the first one into the wrapped setup code.
"""
- if match := list(FORMATTED_CODE_REGEX.finditer(code)):
- blocks = [block for block in match if block.group("block")]
+ args = ["-m", "timeit"]
+ setup = ""
+ if len(codeblocks) > 1:
+ setup = codeblocks.pop(0)
- if len(blocks) > 1:
- code = '\n'.join(block.group("code") for block in blocks)
- info = "several code blocks"
- else:
- match = match[0] if len(blocks) == 0 else blocks[0]
- code, block, lang, delim = match.group("code", "block", "lang", "delim")
- if block:
- info = (f"'{lang}' highlighted" if lang else "plain") + " code block"
- else:
- info = f"{delim}-enclosed inline code"
- else:
- code = RAW_CODE_REGEX.fullmatch(code).group("code")
- info = "unformatted or badly formatted code"
+ code = "\n".join(codeblocks)
- code = textwrap.dedent(code)
- log.trace(f"Extracted {info} for evaluation:\n{code}")
- return code
+ args.extend(["-s", TIMEIT_SETUP_WRAPPER.format(setup=setup)])
+
+ return code, args
@staticmethod
- def get_results_message(results: dict) -> Tuple[str, str]:
+ def get_results_message(results: dict, job_name: str) -> Tuple[str, str]:
"""Return a user-friendly message and error corresponding to the process's return code."""
stdout, returncode = results["stdout"], results["returncode"]
- msg = f"Your eval job has completed with return code {returncode}"
+ msg = f"Your {job_name} job has completed with return code {returncode}"
error = ""
if returncode is None:
- msg = "Your eval job has failed"
+ msg = f"Your {job_name} job has failed"
error = stdout.strip()
elif returncode == 128 + SIGKILL:
- msg = "Your eval job timed out or ran out of memory"
+ msg = f"Your {job_name} job timed out or ran out of memory"
elif returncode == 255:
- msg = "Your eval job has failed"
+ msg = f"Your {job_name} job has failed"
error = "A fatal NsJail error occurred"
else:
# Try to append signal's name if one exists
@@ -144,8 +201,6 @@ class Snekbox(Cog):
Prepend each line with a line number. Truncate if there are over 10 lines or 1000 characters
and upload the full output to a paste service.
"""
- log.trace("Formatting output...")
-
output = output.rstrip("\n")
original_output = output # To be uploaded to a pasting service if needed
paste_link = None
@@ -185,19 +240,27 @@ class Snekbox(Cog):
return output, paste_link
- async def send_eval(self, ctx: Context, code: str) -> Message:
+ async def send_job(
+ self,
+ ctx: Context,
+ code: str,
+ *,
+ args: Optional[list[str]] = None,
+ job_name: str
+ ) -> Message:
"""
Evaluate code, format it, and send the output to the corresponding channel.
Return the bot response.
"""
async with ctx.typing():
- results = await self.post_eval(code)
- msg, error = self.get_results_message(results)
+ results = await self.post_job(code, args=args)
+ msg, error = self.get_results_message(results, job_name)
if error:
output, paste_link = error, None
else:
+ log.trace("Formatting output...")
output, paste_link = await self.format_output(results["stdout"])
icon = self.get_status_emoji(results)
@@ -205,7 +268,7 @@ class Snekbox(Cog):
if paste_link:
msg = f"{msg}\nFull output: {paste_link}"
- # Collect stats of eval fails + successes
+ # Collect stats of job fails + successes
if icon == ":x:":
self.bot.stats.incr("snekbox.python.fail")
else:
@@ -214,90 +277,93 @@ class Snekbox(Cog):
filter_cog = self.bot.get_cog("Filtering")
filter_triggered = False
if filter_cog:
- filter_triggered = await filter_cog.filter_eval(msg, ctx.message)
+ filter_triggered = await filter_cog.filter_snekbox_output(msg, ctx.message)
if filter_triggered:
response = await ctx.send("Attempt to circumvent filter detected. Moderator team has been alerted.")
else:
- response = await ctx.send(msg)
+ allowed_mentions = AllowedMentions(everyone=False, roles=False, users=[ctx.author])
+ response = await ctx.send(msg, allowed_mentions=allowed_mentions)
scheduling.create_task(wait_for_deletion(response, (ctx.author.id,)), event_loop=self.bot.loop)
- log.info(f"{ctx.author}'s job had a return code of {results['returncode']}")
+ log.info(f"{ctx.author}'s {job_name} job had a return code of {results['returncode']}")
return response
- async def continue_eval(self, ctx: Context, response: Message) -> Optional[str]:
+ async def continue_job(
+ self, ctx: Context, response: Message, command: Command
+ ) -> tuple[Optional[str], Optional[list[str]]]:
"""
- Check if the eval session should continue.
+ Check if the job's session should continue.
- Return the new code to evaluate or None if the eval session should be terminated.
+ If the code is to be re-evaluated, return the new code, and the args if the command is the timeit command.
+ Otherwise return (None, None) if the job's session should be terminated.
"""
- _predicate_eval_message_edit = partial(predicate_eval_message_edit, ctx)
- _predicate_emoji_reaction = partial(predicate_eval_emoji_reaction, ctx)
+ _predicate_message_edit = partial(predicate_message_edit, ctx)
+ _predicate_emoji_reaction = partial(predicate_emoji_reaction, ctx)
with contextlib.suppress(NotFound):
try:
_, new_message = await self.bot.wait_for(
'message_edit',
- check=_predicate_eval_message_edit,
- timeout=REEVAL_TIMEOUT
+ check=_predicate_message_edit,
+ timeout=REDO_TIMEOUT
)
- await ctx.message.add_reaction(REEVAL_EMOJI)
+ await ctx.message.add_reaction(REDO_EMOJI)
await self.bot.wait_for(
'reaction_add',
check=_predicate_emoji_reaction,
timeout=10
)
- code = await self.get_code(new_message)
- await ctx.message.clear_reaction(REEVAL_EMOJI)
+ code = await self.get_code(new_message, ctx.command)
+ await ctx.message.clear_reaction(REDO_EMOJI)
with contextlib.suppress(HTTPException):
await response.delete()
+ if code is None:
+ return None, None
+
except asyncio.TimeoutError:
- await ctx.message.clear_reaction(REEVAL_EMOJI)
- return None
+ await ctx.message.clear_reaction(REDO_EMOJI)
+ return None, None
+
+ codeblocks = await CodeblockConverter.convert(ctx, code)
- return code
+ if command is self.timeit_command:
+ return self.prepare_timeit_input(codeblocks)
+ else:
+ return "\n".join(codeblocks), None
+
+ return None, None
- async def get_code(self, message: Message) -> Optional[str]:
+ async def get_code(self, message: Message, command: Command) -> Optional[str]:
"""
Return the code from `message` to be evaluated.
- If the message is an invocation of the eval command, return the first argument or None if it
+ If the message is an invocation of the command, return the first argument or None if it
doesn't exist. Otherwise, return the full content of the message.
"""
log.trace(f"Getting context for message {message.id}.")
new_ctx = await self.bot.get_context(message)
- if new_ctx.command is self.eval_command:
- log.trace(f"Message {message.id} invokes eval command.")
+ if new_ctx.command is command:
+ log.trace(f"Message {message.id} invokes {command} command.")
split = message.content.split(maxsplit=1)
code = split[1] if len(split) > 1 else None
else:
- log.trace(f"Message {message.id} does not invoke eval command.")
+ log.trace(f"Message {message.id} does not invoke {command} command.")
code = message.content
return code
- @command(name="eval", aliases=("e",))
- @guild_only()
- @redirect_output(
- destination_channel=Channels.bot_commands,
- bypass_roles=EVAL_ROLES,
- categories=NO_EVAL_CATEGORIES,
- channels=NO_EVAL_CHANNELS,
- ping_user=False
- )
- async def eval_command(self, ctx: Context, *, code: str = None) -> None:
- """
- Run Python code and get the results.
-
- This command supports multiple lines of code, including code wrapped inside a formatted code
- block. Code can be re-evaluated by editing the original message within 10 seconds and
- clicking the reaction that subsequently appears.
-
- We've done our best to make this sandboxed, but do let us know if you manage to find an
- issue with it!
- """
+ async def run_job(
+ self,
+ job_name: str,
+ ctx: Context,
+ code: str,
+ *,
+ args: Optional[list[str]] = None,
+ ) -> None:
+ """Handles checks, stats and re-evaluation of a snekbox job."""
if ctx.author.id in self.jobs:
await ctx.send(
f"{ctx.author.mention} You've already got a job running - "
@@ -305,10 +371,6 @@ class Snekbox(Cog):
)
return
- if not code: # None or empty string
- await ctx.send_help(ctx.command)
- return
-
if Roles.helpers in (role.id for role in ctx.author.roles):
self.bot.stats.incr("snekbox_usages.roles.helpers")
else:
@@ -325,26 +387,74 @@ class Snekbox(Cog):
while True:
self.jobs[ctx.author.id] = datetime.datetime.now()
- code = self.prepare_input(code)
try:
- response = await self.send_eval(ctx, code)
+ response = await self.send_job(ctx, code, args=args, job_name=job_name)
finally:
del self.jobs[ctx.author.id]
- code = await self.continue_eval(ctx, response)
+ code, args = await self.continue_job(ctx, response, ctx.command)
if not code:
break
log.info(f"Re-evaluating code from message {ctx.message.id}:\n{code}")
+ @command(name="eval", aliases=("e",))
+ @guild_only()
+ @redirect_output(
+ destination_channel=Channels.bot_commands,
+ bypass_roles=SNEKBOX_ROLES,
+ categories=NO_SNEKBOX_CATEGORIES,
+ channels=NO_SNEKBOX_CHANNELS,
+ ping_user=False
+ )
+ async def eval_command(self, ctx: Context, *, code: CodeblockConverter) -> None:
+ """
+ Run Python code and get the results.
+
+ This command supports multiple lines of code, including code wrapped inside a formatted code
+ block. Code can be re-evaluated by editing the original message within 10 seconds and
+ clicking the reaction that subsequently appears.
+
+ We've done our best to make this sandboxed, but do let us know if you manage to find an
+ issue with it!
+ """
+ await self.run_job("eval", ctx, "\n".join(code))
+
+ @command(name="timeit", aliases=("ti",))
+ @guild_only()
+ @redirect_output(
+ destination_channel=Channels.bot_commands,
+ bypass_roles=SNEKBOX_ROLES,
+ categories=NO_SNEKBOX_CATEGORIES,
+ channels=NO_SNEKBOX_CHANNELS,
+ ping_user=False
+ )
+ async def timeit_command(self, ctx: Context, *, code: CodeblockConverter) -> None:
+ """
+ Profile Python Code to find execution time.
+
+ This command supports multiple lines of code, including code wrapped inside a formatted code
+ block. Code can be re-evaluated by editing the original message within 10 seconds and
+ clicking the reaction that subsequently appears.
+
+ If multiple formatted codeblocks are provided, the first one will be the setup code, which will
+ not be timed. The remaining codeblocks will be joined together and timed.
+
+ We've done our best to make this sandboxed, but do let us know if you manage to find an
+ issue with it!
+ """
+ code, args = self.prepare_timeit_input(code)
+
+ await self.run_job("timeit", ctx, code=code, args=args)
+
-def predicate_eval_message_edit(ctx: Context, old_msg: Message, new_msg: Message) -> bool:
+def predicate_message_edit(ctx: Context, old_msg: Message, new_msg: Message) -> bool:
"""Return True if the edited message is the context message and the content was indeed modified."""
return new_msg.id == ctx.message.id and old_msg.content != new_msg.content
-def predicate_eval_emoji_reaction(ctx: Context, reaction: Reaction, user: User) -> bool:
- """Return True if the reaction REEVAL_EMOJI was added by the context message author on this message."""
- return reaction.message.id == ctx.message.id and user.id == ctx.author.id and str(reaction) == REEVAL_EMOJI
+def predicate_emoji_reaction(ctx: Context, reaction: Reaction, user: User) -> bool:
+ """Return True if the reaction REDO_EMOJI was added by the context message author on this message."""
+ return reaction.message.id == ctx.message.id and user.id == ctx.author.id and str(reaction) == REDO_EMOJI
def setup(bot: Bot) -> None:
diff --git a/bot/exts/utils/thread_bumper.py b/bot/exts/utils/thread_bumper.py
new file mode 100644
index 000000000..35057f1fe
--- /dev/null
+++ b/bot/exts/utils/thread_bumper.py
@@ -0,0 +1,147 @@
+import typing as t
+
+import discord
+from async_rediscache import RedisCache
+from discord.ext import commands
+
+from bot import constants
+from bot.bot import Bot
+from bot.log import get_logger
+from bot.pagination import LinePaginator
+from bot.utils import channel, scheduling
+
+log = get_logger(__name__)
+
+
+class ThreadBumper(commands.Cog):
+ """Cog that allow users to add the current thread to a list that get reopened on archive."""
+
+ # RedisCache[discord.Thread.id, "sentinel"]
+ threads_to_bump = RedisCache()
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.init_task = scheduling.create_task(self.ensure_bumped_threads_are_active(), event_loop=self.bot.loop)
+
+ async def unarchive_threads_not_manually_archived(self, threads: list[discord.Thread]) -> None:
+ """
+ Iterate through and unarchive any threads that weren't manually archived recently.
+
+ This is done by extracting the manually archived threads from the audit log.
+
+ Only the last 200 thread_update logs are checked,
+ as this is assumed to be more than enough to cover bot downtime.
+ """
+ guild = self.bot.get_guild(constants.Guild.id)
+
+ recent_manually_archived_thread_ids = []
+ async for thread_update in guild.audit_logs(limit=200, action=discord.AuditLogAction.thread_update):
+ if getattr(thread_update.after, "archived", False):
+ recent_manually_archived_thread_ids.append(thread_update.target.id)
+
+ for thread in threads:
+ if thread.id in recent_manually_archived_thread_ids:
+ log.info(
+ "#%s (%d) was manually archived. Leaving archived, and removing from bumped threads.",
+ thread.name,
+ thread.id
+ )
+ await self.threads_to_bump.delete(thread.id)
+ else:
+ await thread.edit(archived=False)
+
+ async def ensure_bumped_threads_are_active(self) -> None:
+ """Ensure bumped threads are active, since threads could have been archived while the bot was down."""
+ await self.bot.wait_until_guild_available()
+
+ threads_to_maybe_bump = []
+ for thread_id, _ in await self.threads_to_bump.items():
+ try:
+ thread = await channel.get_or_fetch_channel(thread_id)
+ except discord.NotFound:
+ log.info("Thread %d has been deleted, removing from bumped threads.", thread_id)
+ await self.threads_to_bump.delete(thread_id)
+ continue
+
+ if thread.archived:
+ threads_to_maybe_bump.append(thread)
+
+ await self.unarchive_threads_not_manually_archived(threads_to_maybe_bump)
+
+ @commands.group(name="bump")
+ async def thread_bump_group(self, ctx: commands.Context) -> None:
+ """A group of commands to manage the bumping of threads."""
+ if not ctx.invoked_subcommand:
+ await ctx.send_help(ctx.command)
+
+ @thread_bump_group.command(name="add", aliases=("a",))
+ async def add_thread_to_bump_list(self, ctx: commands.Context, thread: t.Optional[discord.Thread]) -> None:
+ """Add a thread to the bump list."""
+ await self.init_task
+
+ if not thread:
+ if isinstance(ctx.channel, discord.Thread):
+ thread = ctx.channel
+ else:
+ raise commands.BadArgument("You must provide a thread, or run this command within a thread.")
+
+ if await self.threads_to_bump.contains(thread.id):
+ raise commands.BadArgument("This thread is already in the bump list.")
+
+ await self.threads_to_bump.set(thread.id, "sentinel")
+ await ctx.send(f":ok_hand:{thread.mention} has been added to the bump list.")
+
+ @thread_bump_group.command(name="remove", aliases=("r", "rem", "d", "del", "delete"))
+ async def remove_thread_from_bump_list(self, ctx: commands.Context, thread: t.Optional[discord.Thread]) -> None:
+ """Remove a thread from the bump list."""
+ await self.init_task
+
+ if not thread:
+ if isinstance(ctx.channel, discord.Thread):
+ thread = ctx.channel
+ else:
+ raise commands.BadArgument("You must provide a thread, or run this command within a thread.")
+
+ if not await self.threads_to_bump.contains(thread.id):
+ raise commands.BadArgument("This thread is not in the bump list.")
+
+ await self.threads_to_bump.delete(thread.id)
+ await ctx.send(f":ok_hand: {thread.mention} has been removed from the bump list.")
+
+ @thread_bump_group.command(name="list", aliases=("get",))
+ async def list_all_threads_in_bump_list(self, ctx: commands.Context) -> None:
+ """List all the threads in the bump list."""
+ await self.init_task
+
+ lines = [f"<#{k}>" for k, _ in await self.threads_to_bump.items()]
+ embed = discord.Embed(
+ title="Threads in the bump list",
+ colour=constants.Colours.blue
+ )
+ await LinePaginator.paginate(lines, ctx, embed)
+
+ @commands.Cog.listener()
+ async def on_thread_update(self, _: discord.Thread, after: discord.Thread) -> None:
+ """
+ Listen for thread updates and check if the thread has been archived.
+
+ If the thread has been archived, and is in the bump list, un-archive it.
+ """
+ await self.init_task
+
+ if not after.archived:
+ return
+
+ if await self.threads_to_bump.contains(after.id):
+ await self.unarchive_threads_not_manually_archived([after])
+
+ async def cog_check(self, ctx: commands.Context) -> bool:
+ """Only allow staff & partner roles to invoke the commands in this cog."""
+ return await commands.has_any_role(
+ *constants.STAFF_PARTNERS_COMMUNITY_ROLES
+ ).predicate(ctx)
+
+
+def setup(bot: Bot) -> None:
+ """Load the ThreadBumper cog."""
+ bot.add_cog(ThreadBumper(bot))
diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py
index 821cebd8c..2a074788e 100644
--- a/bot/exts/utils/utils.py
+++ b/bot/exts/utils/utils.py
@@ -13,8 +13,7 @@ from bot.converters import Snowflake
from bot.decorators import in_whitelist
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import messages
-from bot.utils.time import time_since
+from bot.utils import messages, time
log = get_logger(__name__)
@@ -49,7 +48,7 @@ class Utils(Cog):
self.bot = bot
@command()
- @in_whitelist(channels=(Channels.bot_commands, Channels.discord_py), roles=STAFF_PARTNERS_COMMUNITY_ROLES)
+ @in_whitelist(channels=(Channels.bot_commands, Channels.discord_bots), roles=STAFF_PARTNERS_COMMUNITY_ROLES)
async def charinfo(self, ctx: Context, *, characters: str) -> None:
"""Shows you information on up to 50 unicode characters."""
match = re.match(r"<(a?):(\w+):(\d+)>", characters)
@@ -173,7 +172,7 @@ class Utils(Cog):
lines = []
for snowflake in snowflakes:
created_at = snowflake_time(snowflake)
- lines.append(f"**{snowflake}**\nCreated at {created_at} ({time_since(created_at)}).")
+ lines.append(f"**{snowflake}**\nCreated at {created_at} ({time.format_relative(created_at)}).")
await LinePaginator.paginate(
lines,
diff --git a/bot/log.py b/bot/log.py
index b3cecdcf2..100cd06f6 100644
--- a/bot/log.py
+++ b/bot/log.py
@@ -48,16 +48,17 @@ def setup() -> None:
logging.addLevelName(TRACE_LEVEL, "TRACE")
logging.setLoggerClass(CustomLogger)
+ root_log = get_logger()
+
format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s"
log_format = logging.Formatter(format_string)
- log_file = Path("logs", "bot.log")
- log_file.parent.mkdir(exist_ok=True)
- file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7, encoding="utf8")
- file_handler.setFormatter(log_format)
-
- root_log = get_logger()
- root_log.addHandler(file_handler)
+ if constants.FILE_LOGS:
+ log_file = Path("logs", "bot.log")
+ log_file.parent.mkdir(exist_ok=True)
+ file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7, encoding="utf8")
+ file_handler.setFormatter(log_format)
+ root_log.addHandler(file_handler)
if "COLOREDLOGS_LEVEL_STYLES" not in os.environ:
coloredlogs.DEFAULT_LEVEL_STYLES = {
diff --git a/bot/monkey_patches.py b/bot/monkey_patches.py
index 23482f7c3..4840fa454 100644
--- a/bot/monkey_patches.py
+++ b/bot/monkey_patches.py
@@ -1,3 +1,4 @@
+import re
from datetime import timedelta
import arrow
@@ -7,6 +8,7 @@ from discord.ext import commands
from bot.log import get_logger
log = get_logger(__name__)
+MESSAGE_ID_RE = re.compile(r'(?P<message_id>[0-9]{15,20})$')
class Command(commands.Command):
@@ -50,3 +52,25 @@ def patch_typing() -> None:
pass
http.HTTPClient.send_typing = honeybadger_type
+
+
+class FixedPartialMessageConverter(commands.PartialMessageConverter):
+ """
+ Make the Message converter infer channelID from the given context if only a messageID is given.
+
+ Discord.py's Message converter is supposed to infer channelID based
+ on ctx.channel if only a messageID is given. A refactor commit, linked below,
+ a few weeks before d.py's archival broke this defined behaviour of the converter.
+ Currently, if only a messageID is given to the converter, it will only find that message
+ if it's in the bot's cache.
+
+ https://github.com/Rapptz/discord.py/commit/1a4e73d59932cdbe7bf2c281f25e32529fc7ae1f
+ """
+
+ @staticmethod
+ def _get_id_matches(ctx: commands.Context, argument: str) -> tuple[int, int, int]:
+ """Inserts ctx.channel.id before calling super method if argument is just a messageID."""
+ match = MESSAGE_ID_RE.match(argument)
+ if match:
+ argument = f"{ctx.channel.id}-{match.group('message_id')}"
+ return commands.PartialMessageConverter._get_id_matches(ctx, argument)
diff --git a/bot/resources/tags/contribute.md b/bot/resources/tags/contribute.md
index 070975646..50c5cd11f 100644
--- a/bot/resources/tags/contribute.md
+++ b/bot/resources/tags/contribute.md
@@ -7,6 +7,6 @@ Looking to contribute to Open Source Projects for the first time? Want to add a
• [Site](https://github.com/python-discord/site) - resources, guides, and more
**Where to start**
-1. Read our [contributing guidelines](https://pythondiscord.com/pages/guides/pydis-guides/contributing/)
+1. Read our [contribution guide](https://pythondiscord.com/pages/guides/pydis-guides/contributing/)
2. Chat with us in <#635950537262759947> if you're ready to jump in or have any questions
3. Open an issue or ask to be assigned to an issue to work on
diff --git a/bot/resources/tags/dictcomps.md b/bot/resources/tags/dictcomps.md
index 6c8018761..75fbe0f8a 100644
--- a/bot/resources/tags/dictcomps.md
+++ b/bot/resources/tags/dictcomps.md
@@ -11,4 +11,4 @@ One can use a dict comp to change an existing dictionary using its `items` metho
>>> {key.upper(): value * 2 for key, value in first_dict.items()}
{'I': 2, 'LOVE': 8, 'PYTHON': 12}
```
-For more information and examples, check out [PEP 274](https://www.python.org/dev/peps/pep-0274/)
+For more information and examples, check out [PEP 274](https://peps.python.org/pep-0274/)
diff --git a/bot/resources/tags/docstring.md b/bot/resources/tags/docstring.md
index 20043131e..6e9d9aa09 100644
--- a/bot/resources/tags/docstring.md
+++ b/bot/resources/tags/docstring.md
@@ -15,4 +15,4 @@ You can get the docstring by using the [`inspect.getdoc`](https://docs.python.or
For the last example, you can print it by doing this: `print(inspect.getdoc(greet))`.
-For more details about what a docstring is and its usage, check out this guide by [Real Python](https://realpython.com/documenting-python-code/#docstrings-background), or the [official docstring specification](https://www.python.org/dev/peps/pep-0257/#what-is-a-docstring).
+For more details about what a docstring is and its usage, check out this guide by [Real Python](https://realpython.com/documenting-python-code/#docstrings-background), or the [official docstring specification](https://peps.python.org/pep-0257/#what-is-a-docstring).
diff --git a/bot/resources/tags/enumerate.md b/bot/resources/tags/enumerate.md
index dd984af52..da9c86a36 100644
--- a/bot/resources/tags/enumerate.md
+++ b/bot/resources/tags/enumerate.md
@@ -10,4 +10,4 @@ into beautiful, _pythonic_ code:
for index, item in enumerate(my_list):
print(f"{index}: {item}")
```
-For more information, check out [the official docs](https://docs.python.org/3/library/functions.html#enumerate), or [PEP 279](https://www.python.org/dev/peps/pep-0279/).
+For more information, check out [the official docs](https://docs.python.org/3/library/functions.html#enumerate), or [PEP 279](https://peps.python.org/pep-0279/).
diff --git a/bot/resources/tags/faq.md b/bot/resources/tags/faq.md
new file mode 100644
index 000000000..e1c57b3a0
--- /dev/null
+++ b/bot/resources/tags/faq.md
@@ -0,0 +1,6 @@
+---
+embed:
+ title: "Frequently asked questions"
+---
+
+As the largest Python community on Discord, we get hundreds of questions every day. Many of these questions have been asked before. We've compiled a list of the most frequently asked questions along with their answers, which can be found on our [FAQ page](https://www.pythondiscord.com/pages/frequently-asked-questions/).
diff --git a/bot/resources/tags/indent.md b/bot/resources/tags/indent.md
index dec8407b0..4c3cdd126 100644
--- a/bot/resources/tags/indent.md
+++ b/bot/resources/tags/indent.md
@@ -16,9 +16,9 @@ The first line is not indented. The next two lines are indented to be inside of
**Indentation is used after:**
**1.** [Compound statements](https://docs.python.org/3/reference/compound_stmts.html) (eg. `if`, `while`, `for`, `try`, `with`, `def`, `class`, and their counterparts)
-**2.** [Continuation lines](https://www.python.org/dev/peps/pep-0008/#indentation)
+**2.** [Continuation lines](https://peps.python.org/pep-0008/#indentation)
**More Info**
-**1.** [Indentation style guide](https://www.python.org/dev/peps/pep-0008/#indentation)
-**2.** [Tabs or Spaces?](https://www.python.org/dev/peps/pep-0008/#tabs-or-spaces)
+**1.** [Indentation style guide](https://peps.python.org/pep-0008/#indentation)
+**2.** [Tabs or Spaces?](https://peps.python.org/pep-0008/#tabs-or-spaces)
**3.** [Official docs on indentation](https://docs.python.org/3/reference/lexical_analysis.html#indentation)
diff --git a/bot/resources/tags/off-topic-names.md b/bot/resources/tags/off-topic-names.md
new file mode 100644
index 000000000..5d0614aaa
--- /dev/null
+++ b/bot/resources/tags/off-topic-names.md
@@ -0,0 +1,10 @@
+**Off-topic channels**
+
+There are three off-topic channels:
+• <#291284109232308226>
+• <#463035241142026251>
+• <#463035268514185226>
+
+The channel names change every night at midnight UTC and are often fun meta references to jokes or conversations that happened on the server.
+
+See our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) page for more guidance on how the channels should be used.
diff --git a/bot/resources/tags/off-topic.md b/bot/resources/tags/off-topic.md
deleted file mode 100644
index 6a864a1d5..000000000
--- a/bot/resources/tags/off-topic.md
+++ /dev/null
@@ -1,10 +0,0 @@
-**Off-topic channels**
-
-There are three off-topic channels:
-• <#291284109232308226>
-• <#463035241142026251>
-• <#463035268514185226>
-
-Their names change randomly every 24 hours, but you can always find them under the `OFF-TOPIC/GENERAL` category in the channel list.
-
-Please read our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) before participating in conversations.
diff --git a/bot/resources/tags/ot.md b/bot/resources/tags/ot.md
new file mode 100644
index 000000000..636e59110
--- /dev/null
+++ b/bot/resources/tags/ot.md
@@ -0,0 +1,3 @@
+**Off-topic channel:** <#463035268514185226>
+
+Please read our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) before participating in conversations.
diff --git a/bot/resources/tags/pathlib.md b/bot/resources/tags/pathlib.md
index dfeb7ecac..24ca895d8 100644
--- a/bot/resources/tags/pathlib.md
+++ b/bot/resources/tags/pathlib.md
@@ -18,4 +18,4 @@ Python 3 comes with a new module named `Pathlib`. Since Python 3.6, `pathlib.Pat
• [**Why you should use pathlib** - Trey Hunner](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
• [**Answering concerns about pathlib** - Trey Hunner](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
• [**Official Documentation**](https://docs.python.org/3/library/pathlib.html)
-• [**PEP 519** - Adding a file system path protocol](https://www.python.org/dev/peps/pep-0519/)
+• [**PEP 519** - Adding a file system path protocol](https://peps.python.org/pep-0519/)
diff --git a/bot/resources/tags/pep8.md b/bot/resources/tags/pep8.md
index 57b176122..a2510d697 100644
--- a/bot/resources/tags/pep8.md
+++ b/bot/resources/tags/pep8.md
@@ -1,5 +1,5 @@
**PEP 8** is the official style guide for Python. It includes comprehensive guidelines for code formatting, variable naming, and making your code easy to read. Professional Python developers are usually required to follow the guidelines, and will often use code-linters like flake8 to verify that the code they're writing complies with the style guide.
More information:
-• [PEP 8 document](https://www.python.org/dev/peps/pep-0008)
+• [PEP 8 document](https://peps.python.org/pep-0008/)
• [Our PEP 8 song!](https://www.youtube.com/watch?v=hgI0p1zf31k) :notes:
diff --git a/bot/resources/tags/positional-keyword.md b/bot/resources/tags/positional-keyword.md
index dd6ddfc4b..d6b4e0cd4 100644
--- a/bot/resources/tags/positional-keyword.md
+++ b/bot/resources/tags/positional-keyword.md
@@ -19,7 +19,7 @@ def sum(a, b=1):
sum(1, b=5)
sum(1, 5) # same as above
```
-[Somtimes this is forced](https://www.python.org/dev/peps/pep-0570/#history-of-positional-only-parameter-semantics-in-python), in the case of the `pow()` function.
+[Somtimes this is forced](https://peps.python.org/pep-0570/#history-of-positional-only-parameter-semantics-in-python), in the case of the `pow()` function.
The reverse is also true:
```py
@@ -33,6 +33,6 @@ The reverse is also true:
```
**More info**
-• [Keyword only arguments](https://www.python.org/dev/peps/pep-3102/)
-• [Positional only arguments](https://www.python.org/dev/peps/pep-0570/)
+• [Keyword only arguments](https://peps.python.org/pep-3102/)
+• [Positional only arguments](https://peps.python.org/pep-0570/)
• `!tags param-arg` (Parameters vs. Arguments)
diff --git a/bot/resources/tags/quotes.md b/bot/resources/tags/quotes.md
index 8421748a1..99ce93f61 100644
--- a/bot/resources/tags/quotes.md
+++ b/bot/resources/tags/quotes.md
@@ -16,5 +16,5 @@ Example:
If you need both single and double quotes inside your string, use the version that would result in the least amount of escapes. In the case of a tie, use the quotation you use the most.
**References:**
-• [pep-8 on quotes](https://www.python.org/dev/peps/pep-0008/#string-quotes)
-• [convention for triple quoted strings](https://www.python.org/dev/peps/pep-0257/)
+• [pep-8 on quotes](https://peps.python.org/pep-0008/#string-quotes)
+• [convention for triple quoted strings](https://peps.python.org/pep-0257/)
diff --git a/bot/resources/tags/regex.md b/bot/resources/tags/regex.md
new file mode 100644
index 000000000..35fee45a9
--- /dev/null
+++ b/bot/resources/tags/regex.md
@@ -0,0 +1,15 @@
+**Regular expressions**
+Regular expressions (regex) are a tool for finding patterns in strings. The standard library's `re` module defines functions for using regex patterns.
+
+**Example**
+We can use regex to pull out all the numbers in a sentence:
+```py
+>>> import re
+>>> x = "On Oct 18 1963 a cat was launched aboard rocket #47"
+>>> regex_pattern = r"[0-9]{1,3}" # Matches 1-3 digits
+>>> re.findall(regex_pattern, foo)
+['18', '196', '3', '47'] # Notice the year is cut off
+```
+**See Also**
+• [The re docs](https://docs.python.org/3/library/re.html) - for functions that use regex
+• [regex101.com](https://regex101.com) - an interactive site for testing your regular expression
diff --git a/bot/resources/tags/site.md b/bot/resources/tags/site.md
new file mode 100644
index 000000000..376f84742
--- /dev/null
+++ b/bot/resources/tags/site.md
@@ -0,0 +1,6 @@
+---
+embed:
+ title: "Python Discord Website"
+---
+
+[Our official website](https://www.pythondiscord.com/) is an open-source community project created with Python and Django. It contains information about the server itself, lets you sign up for upcoming events, has its own wiki, contains a list of valuable learning resources, and much more.
diff --git a/bot/resources/tags/sql-fstring.md b/bot/resources/tags/sql-fstring.md
index 94dd870fd..538a0aa87 100644
--- a/bot/resources/tags/sql-fstring.md
+++ b/bot/resources/tags/sql-fstring.md
@@ -13,4 +13,4 @@ Note: Different database libraries support different placeholder styles, e.g. `%
**See Also**
• [Extended Example with SQLite](https://docs.python.org/3/library/sqlite3.html) (search for "Instead, use the DB-API's parameter substitution")
-• [PEP-249](https://www.python.org/dev/peps/pep-0249) - A specification of how database libraries in Python should work
+• [PEP-249](https://peps.python.org/pep-0249/) - A specification of how database libraries in Python should work
diff --git a/bot/resources/tags/star-imports.md b/bot/resources/tags/star-imports.md
index 3b1b6a858..6e20e2b09 100644
--- a/bot/resources/tags/star-imports.md
+++ b/bot/resources/tags/star-imports.md
@@ -36,4 +36,4 @@ Conclusion: Namespaces are one honking great idea -- let's do more of those! *[3
**[1]** If the module defines the variable `__all__`, the names defined in `__all__` will get imported by the wildcard import, otherwise all the names in the module get imported (except for names with a leading underscore)
**[2]** [Namespaces and scopes](https://www.programiz.com/python-programming/namespace)
-**[3]** [Zen of Python](https://www.python.org/dev/peps/pep-0020/)
+**[3]** [Zen of Python](https://peps.python.org/pep-0020/)
diff --git a/bot/resources/tags/strip-gotcha.md b/bot/resources/tags/strip-gotcha.md
new file mode 100644
index 000000000..9ad495cd2
--- /dev/null
+++ b/bot/resources/tags/strip-gotcha.md
@@ -0,0 +1,17 @@
+When working with `strip`, `lstrip`, or `rstrip`, you might think that this would be the case:
+```py
+>>> "Monty Python".rstrip(" Python")
+"Monty"
+```
+While this seems intuitive, it would actually result in:
+```py
+"M"
+```
+as Python interprets the argument to these functions as a set of characters rather than a substring.
+
+If you want to remove a prefix/suffix from a string, `str.removeprefix` and `str.removesuffix` are recommended and were added in 3.9.
+```py
+>>> "Monty Python".removesuffix(" Python")
+"Monty"
+```
+See the documentation of [str.removeprefix](https://docs.python.org/3.10/library/stdtypes.html#str.removeprefix) and [str.removesuffix](https://docs.python.org/3.10/library/stdtypes.html#str.removesuffix) for more information.
diff --git a/bot/resources/tags/tools.md b/bot/resources/tags/tools.md
new file mode 100644
index 000000000..3cae75552
--- /dev/null
+++ b/bot/resources/tags/tools.md
@@ -0,0 +1,6 @@
+---
+embed:
+ title: "Tools"
+---
+
+The [Tools page](https://www.pythondiscord.com/resources/tools/) on our website contains a couple of the most popular tools for programming in Python.
diff --git a/bot/resources/tags/traceback.md b/bot/resources/tags/traceback.md
index 321737aac..e21fa6c6e 100644
--- a/bot/resources/tags/traceback.md
+++ b/bot/resources/tags/traceback.md
@@ -1,18 +1,15 @@
Please provide the full traceback for your exception in order to help us identify your issue.
+While the last line of the error message tells us what kind of error you got,
+the full traceback will tell us which line, and other critical information to solve your problem.
+Please avoid screenshots so we can copy and paste parts of the message.
A full traceback could look like:
```py
Traceback (most recent call last):
- File "tiny", line 3, in
- do_something()
- File "tiny", line 2, in do_something
- a = 6 / b
-ZeroDivisionError: division by zero
+ File "my_file.py", line 5, in <module>
+ add_three("6")
+ File "my_file.py", line 2, in add_three
+ a = num + 3
+TypeError: can only concatenate str (not "int") to str
```
-The best way to read your traceback is bottom to top.
-
-• Identify the exception raised (in this case `ZeroDivisionError`)
-• Make note of the line number (in this case `2`), and navigate there in your program.
-• Try to understand why the error occurred (in this case because `b` is `0`).
-
-To read more about exceptions and errors, please refer to the [PyDis Wiki](https://pythondiscord.com/pages/guides/pydis-guides/asking-good-questions/#examining-tracebacks) or the [official Python tutorial](https://docs.python.org/3.7/tutorial/errors.html).
+If the traceback is long, use [our pastebin](https://paste.pythondiscord.com/).
diff --git a/bot/resources/tags/with.md b/bot/resources/tags/with.md
index 62d5612f2..83f160b4f 100644
--- a/bot/resources/tags/with.md
+++ b/bot/resources/tags/with.md
@@ -5,4 +5,4 @@ with open("test.txt", "r") as file:
```
The above code automatically closes `file` when the `with` block exits, so you never have to manually do a `file.close()`. Most connection types, including file readers and database connections, support this.
-For more information, read [the official docs](https://docs.python.org/3/reference/compound_stmts.html#with), watch [Corey Schafer\'s context manager video](https://www.youtube.com/watch?v=-aKFBoZpiqA), or see [PEP 343](https://www.python.org/dev/peps/pep-0343/).
+For more information, read [the official docs](https://docs.python.org/3/reference/compound_stmts.html#with), watch [Corey Schafer\'s context manager video](https://www.youtube.com/watch?v=-aKFBoZpiqA), or see [PEP 343](https://peps.python.org/pep-0343/).
diff --git a/bot/utils/members.py b/bot/utils/members.py
index 77ddf1696..693286045 100644
--- a/bot/utils/members.py
+++ b/bot/utils/members.py
@@ -23,3 +23,26 @@ async def get_or_fetch_member(guild: discord.Guild, member_id: int) -> t.Optiona
return None
log.trace("%s fetched from API.", member)
return member
+
+
+async def handle_role_change(
+ member: discord.Member,
+ coro: t.Callable[..., t.Coroutine],
+ role: discord.Role
+) -> None:
+ """
+ Change `member`'s cooldown role via awaiting `coro` and handle errors.
+
+ `coro` is intended to be `discord.Member.add_roles` or `discord.Member.remove_roles`.
+ """
+ try:
+ await coro(role)
+ except discord.NotFound:
+ log.debug(f"Failed to change role for {member} ({member.id}): member not found")
+ except discord.Forbidden:
+ log.debug(
+ f"Forbidden to change role for {member} ({member.id}); "
+ f"possibly due to role hierarchy"
+ )
+ except discord.HTTPException as e:
+ log.error(f"Failed to change role for {member} ({member.id}): {e.status} {e.code}")
diff --git a/bot/utils/regex.py b/bot/utils/regex.py
deleted file mode 100644
index d77f5950b..000000000
--- a/bot/utils/regex.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import re
-
-INVITE_RE = re.compile(
- r"(discord([\.,]|dot)gg|" # Could be discord.gg/
- r"discord([\.,]|dot)com(\/|slash)invite|" # or discord.com/invite/
- r"discordapp([\.,]|dot)com(\/|slash)invite|" # or discordapp.com/invite/
- r"discord([\.,]|dot)me|" # or discord.me
- r"discord([\.,]|dot)li|" # or discord.li
- r"discord([\.,]|dot)io|" # or discord.io.
- r"((?<!\w)([\.,]|dot))gg" # or .gg/
- r")([\/]|slash)" # / or 'slash'
- r"(?P<invite>[a-zA-Z0-9\-]+)", # the invite code itself
- flags=re.IGNORECASE
-)
diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py
index 7b4c8e2de..23acacf74 100644
--- a/bot/utils/scheduling.py
+++ b/bot/utils/scheduling.py
@@ -5,6 +5,8 @@ import typing as t
from datetime import datetime
from functools import partial
+from arrow import Arrow
+
from bot.log import get_logger
@@ -58,7 +60,7 @@ class Scheduler:
self._scheduled_tasks[task_id] = task
self._log.debug(f"Scheduled task #{task_id} {id(task)}.")
- def schedule_at(self, time: datetime, task_id: t.Hashable, coroutine: t.Coroutine) -> None:
+ def schedule_at(self, time: t.Union[datetime, Arrow], task_id: t.Hashable, coroutine: t.Coroutine) -> None:
"""
Schedule `coroutine` to be executed at the given `time`.
diff --git a/bot/utils/time.py b/bot/utils/time.py
index eaa9b72e9..a0379c3ef 100644
--- a/bot/utils/time.py
+++ b/bot/utils/time.py
@@ -1,15 +1,12 @@
import datetime
import re
from enum import Enum
-from typing import Optional, Union
+from time import struct_time
+from typing import Literal, Optional, Union, overload
import arrow
-import dateutil.parser
from dateutil.relativedelta import relativedelta
-RFC1123_FORMAT = "%a, %d %b %Y %H:%M:%S GMT"
-DISCORD_TIMESTAMP_REGEX = re.compile(r"<t:(\d+):f>")
-
_DURATION_REGEX = re.compile(
r"((?P<years>\d+?) ?(years|year|Y|y) ?)?"
r"((?P<months>\d+?) ?(months|month|m) ?)?"
@@ -20,8 +17,19 @@ _DURATION_REGEX = re.compile(
r"((?P<seconds>\d+?) ?(seconds|second|S|s))?"
)
-
-ValidTimestamp = Union[int, datetime.datetime, datetime.date, datetime.timedelta, relativedelta]
+# All supported types for the single-argument overload of arrow.get(). tzinfo is excluded because
+# it's too implicit of a way for the caller to specify that they want the current time.
+Timestamp = Union[
+ arrow.Arrow,
+ datetime.datetime,
+ datetime.date,
+ struct_time,
+ int, # POSIX timestamp
+ float, # POSIX timestamp
+ str, # ISO 8601-formatted string
+ tuple[int, int, int], # ISO calendar tuple
+]
+_Precision = Literal["years", "months", "days", "hours", "minutes", "seconds"]
class TimestampFormats(Enum):
@@ -42,7 +50,7 @@ class TimestampFormats(Enum):
def _stringify_time_unit(value: int, unit: str) -> str:
"""
- Returns a string to represent a value and time unit, ensuring that it uses the right plural form of the unit.
+ Return a string to represent a value and time unit, ensuring the unit's correct plural form is used.
>>> _stringify_time_unit(1, "seconds")
"1 second"
@@ -61,33 +69,140 @@ def _stringify_time_unit(value: int, unit: str) -> str:
return f"{value} {unit}"
-def discord_timestamp(timestamp: ValidTimestamp, format: TimestampFormats = TimestampFormats.DATE_TIME) -> str:
- """Create and format a Discord flavored markdown timestamp."""
- if format not in TimestampFormats:
- raise ValueError(f"Format can only be one of {', '.join(TimestampFormats.args)}, not {format}.")
+def discord_timestamp(timestamp: Timestamp, format: TimestampFormats = TimestampFormats.DATE_TIME) -> str:
+ """
+ Format a timestamp as a Discord-flavored Markdown timestamp.
+
+ `timestamp` can be any type supported by the single-arg `arrow.get()`, except for a `tzinfo`.
+ """
+ timestamp = int(arrow.get(timestamp).timestamp())
+ return f"<t:{timestamp}:{format.value}>"
+
+
+# region humanize_delta overloads
+@overload
+def humanize_delta(
+ arg1: Union[relativedelta, Timestamp],
+ /,
+ *,
+ precision: _Precision = "seconds",
+ max_units: int = 6,
+ absolute: bool = True,
+) -> str:
+ ...
+
+
+@overload
+def humanize_delta(
+ end: Timestamp,
+ start: Timestamp,
+ /,
+ *,
+ precision: _Precision = "seconds",
+ max_units: int = 6,
+ absolute: bool = True,
+) -> str:
+ ...
+
+
+@overload
+def humanize_delta(
+ *,
+ years: int = 0,
+ months: int = 0,
+ weeks: float = 0,
+ days: float = 0,
+ hours: float = 0,
+ minutes: float = 0,
+ seconds: float = 0,
+ precision: _Precision = "seconds",
+ max_units: int = 6,
+ absolute: bool = True,
+) -> str:
+ ...
+# endregion
+
+
+def humanize_delta(
+ *args,
+ precision: _Precision = "seconds",
+ max_units: int = 6,
+ absolute: bool = True,
+ **kwargs,
+) -> str:
+ """
+ Return a human-readable version of a time duration.
+
+ `precision` is the smallest unit of time to include (e.g. "seconds", "minutes").
- # Convert each possible timestamp class to an integer.
- if isinstance(timestamp, datetime.datetime):
- timestamp = (timestamp - arrow.get(0)).total_seconds()
- elif isinstance(timestamp, datetime.date):
- timestamp = (timestamp - arrow.get(0)).total_seconds()
- elif isinstance(timestamp, datetime.timedelta):
- timestamp = timestamp.total_seconds()
- elif isinstance(timestamp, relativedelta):
- timestamp = timestamp.seconds
+ `max_units` is the maximum number of units of time to include.
+ Count units from largest to smallest (e.g. count days before months).
- return f"<t:{int(timestamp)}:{format.value}>"
+ Use the absolute value of the duration if `absolute` is True.
+ Usage:
-def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: int = 6) -> str:
- """
- Returns a human-readable version of the relativedelta.
+ Keyword arguments specifying values for time units, to construct a `relativedelta` and humanize
+ the duration represented by it:
+
+ >>> humanize_delta(days=2, hours=16, seconds=23)
+ '2 days, 16 hours and 23 seconds'
+
+ **One** `relativedelta` object, to humanize the duration represented by it:
+
+ >>> humanize_delta(relativedelta(years=12, months=6))
+ '12 years and 6 months'
+
+ Note that `leapdays` and absolute info (singular names) will be ignored during humanization.
+
+ **One** timestamp of a type supported by the single-arg `arrow.get()`, except for `tzinfo`,
+ to humanize the duration between it and the current time:
+
+ >>> humanize_delta('2021-08-06T12:43:01Z', absolute=True) # now = 2021-08-06T12:33:33Z
+ '9 minutes and 28 seconds'
+
+ >>> humanize_delta('2021-08-06T12:43:01Z', absolute=False) # now = 2021-08-06T12:33:33Z
+ '-9 minutes and -28 seconds'
+
+ **Two** timestamps, each of a type supported by the single-arg `arrow.get()`, except for
+ `tzinfo`, to humanize the duration between them:
+
+ >>> humanize_delta(datetime.datetime(2020, 1, 1), '2021-01-01T12:00:00Z', absolute=False)
+ '1 year and 12 hours'
+
+ >>> humanize_delta('2021-01-01T12:00:00Z', datetime.datetime(2020, 1, 1), absolute=False)
+ '-1 years and -12 hours'
+
+ Note that order of the arguments can result in a different output even if `absolute` is True:
+
+ >>> x = datetime.datetime(3000, 11, 1)
+ >>> y = datetime.datetime(3000, 9, 2)
+ >>> humanize_delta(y, x, absolute=True), humanize_delta(x, y, absolute=True)
+ ('1 month and 30 days', '1 month and 29 days')
- precision specifies the smallest unit of time to include (e.g. "seconds", "minutes").
- max_units specifies the maximum number of units of time to include (e.g. 1 may include days but not hours).
+ This is due to the nature of `relativedelta`; it does not represent a fixed period of time.
+ Instead, it's relative to the `datetime` to which it's added to get the other `datetime`.
+ In the example, the difference arises because all months don't have the same number of days.
"""
+ if args and kwargs:
+ raise ValueError("Unsupported combination of positional and keyword arguments.")
+
+ if len(args) == 0:
+ delta = relativedelta(**kwargs)
+ elif len(args) == 1 and isinstance(args[0], relativedelta):
+ delta = args[0]
+ elif len(args) <= 2:
+ end = arrow.get(args[0])
+ start = arrow.get(args[1]) if len(args) == 2 else arrow.utcnow()
+
+ delta = relativedelta(end.datetime, start.datetime)
+ if absolute:
+ delta = abs(delta)
+ else:
+ raise ValueError(f"Received {len(args)} positional arguments, but expected 1 or 2.")
+
if max_units <= 0:
- raise ValueError("max_units must be positive")
+ raise ValueError("max_units must be positive.")
units = (
("years", delta.years),
@@ -98,7 +213,7 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units:
("seconds", delta.seconds),
)
- # Add the time units that are >0, but stop at accuracy or max_units.
+ # Add the time units that are >0, but stop at precision or max_units.
time_strings = []
unit_count = 0
for unit, value in units:
@@ -109,7 +224,7 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units:
if unit == precision or unit_count >= max_units:
break
- # Add the 'and' between the last two units, if necessary
+ # Add the 'and' between the last two units, if necessary.
if len(time_strings) > 1:
time_strings[-1] = f"{time_strings[-2]} and {time_strings[-1]}"
del time_strings[-2]
@@ -123,19 +238,12 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units:
return humanized
-def get_time_delta(time_string: str) -> str:
- """Returns the time in human-readable time delta format."""
- date_time = dateutil.parser.isoparse(time_string)
- time_delta = time_since(date_time)
-
- return time_delta
-
-
def parse_duration_string(duration: str) -> Optional[relativedelta]:
"""
- Converts a `duration` string to a relativedelta object.
+ Convert a `duration` string to a relativedelta object.
+
+ The following symbols are supported for each unit of time:
- The function supports the following symbols for each unit of time:
- years: `Y`, `y`, `year`, `years`
- months: `m`, `month`, `months`
- weeks: `w`, `W`, `week`, `weeks`
@@ -143,8 +251,9 @@ def parse_duration_string(duration: str) -> Optional[relativedelta]:
- hours: `H`, `h`, `hour`, `hours`
- minutes: `M`, `minute`, `minutes`
- seconds: `S`, `s`, `second`, `seconds`
+
The units need to be provided in descending order of magnitude.
- If the string does represent a durationdelta object, it will return None.
+ Return None if the `duration` string cannot be parsed according to the symbols above.
"""
match = _DURATION_REGEX.fullmatch(duration)
if not match:
@@ -157,76 +266,63 @@ def parse_duration_string(duration: str) -> Optional[relativedelta]:
def relativedelta_to_timedelta(delta: relativedelta) -> datetime.timedelta:
- """Converts a relativedelta object to a timedelta object."""
+ """Convert a relativedelta object to a timedelta object."""
utcnow = arrow.utcnow()
return utcnow + delta - utcnow
-def time_since(past_datetime: datetime.datetime) -> str:
- """Takes a datetime and returns a discord timestamp that describes how long ago that datetime was."""
- return discord_timestamp(past_datetime, TimestampFormats.RELATIVE)
-
-
-def parse_rfc1123(stamp: str) -> datetime.datetime:
- """Parse RFC1123 time string into datetime."""
- return datetime.datetime.strptime(stamp, RFC1123_FORMAT).replace(tzinfo=datetime.timezone.utc)
+def format_relative(timestamp: Timestamp) -> str:
+ """
+ Format `timestamp` as a relative Discord timestamp.
+ A relative timestamp describes how much time has elapsed since `timestamp` or how much time
+ remains until `timestamp` is reached.
-def format_infraction(timestamp: str) -> str:
- """Format an infraction timestamp to a discord timestamp."""
- return discord_timestamp(dateutil.parser.isoparse(timestamp))
+ `timestamp` can be any type supported by the single-arg `arrow.get()`, except for a `tzinfo`.
+ """
+ return discord_timestamp(timestamp, TimestampFormats.RELATIVE)
-def format_infraction_with_duration(
- date_to: Optional[str],
- date_from: Optional[datetime.datetime] = None,
+def format_with_duration(
+ timestamp: Optional[Timestamp],
+ other_timestamp: Optional[Timestamp] = None,
max_units: int = 2,
- absolute: bool = True
) -> Optional[str]:
"""
- Return `date_to` formatted as a discord timestamp with the timestamp duration since `date_from`.
+ Return `timestamp` formatted as a discord timestamp with the timestamp duration since `other_timestamp`.
+
+ `timestamp` and `other_timestamp` can be any type supported by the single-arg `arrow.get()`,
+ except for a `tzinfo`. Use the current time if `other_timestamp` is None or unspecified.
- `max_units` specifies the maximum number of units of time to include in the duration. For
- example, a value of 1 may include days but not hours.
+ `max_units` is forwarded to `time.humanize_delta`. See its documentation for more information.
- If `absolute` is True, the absolute value of the duration delta is used. This prevents negative
- values in the case that `date_to` is in the past relative to `date_from`.
+ Return None if `timestamp` is None.
"""
- if not date_to:
+ if timestamp is None:
return None
- date_to_formatted = format_infraction(date_to)
-
- date_from = date_from or datetime.datetime.now(datetime.timezone.utc)
- date_to = dateutil.parser.isoparse(date_to).replace(microsecond=0)
+ if other_timestamp is None:
+ other_timestamp = arrow.utcnow()
- delta = relativedelta(date_to, date_from)
- if absolute:
- delta = abs(delta)
+ formatted_timestamp = discord_timestamp(timestamp)
+ duration = humanize_delta(timestamp, other_timestamp, max_units=max_units)
- duration = humanize_delta(delta, max_units=max_units)
- duration_formatted = f" ({duration})" if duration else ""
+ return f"{formatted_timestamp} ({duration})"
- return f"{date_to_formatted}{duration_formatted}"
-
-def until_expiration(
- expiry: Optional[str]
-) -> Optional[str]:
+def until_expiration(expiry: Optional[Timestamp]) -> str:
"""
- Get the remaining time until infraction's expiration, in a discord timestamp.
+ Get the remaining time until an infraction's expiration as a Discord timestamp.
- Returns a human-readable version of the remaining duration between arrow.utcnow() and an expiry.
- Similar to time_since, except that this function doesn't error on a null input
- and return null if the expiry is in the paste
- """
- if not expiry:
- return None
+ `expiry` can be any type supported by the single-arg `arrow.get()`, except for a `tzinfo`.
- now = arrow.utcnow()
- since = dateutil.parser.isoparse(expiry).replace(microsecond=0)
+ Return "Permanent" if `expiry` is None. Return "Expired" if `expiry` is in the past.
+ """
+ if expiry is None:
+ return "Permanent"
- if since < now:
- return None
+ expiry = arrow.get(expiry)
+ if expiry < arrow.utcnow():
+ return "Expired"
- return discord_timestamp(since, TimestampFormats.RELATIVE)
+ return format_relative(expiry)
diff --git a/config-default.yml b/config-default.yml
index 4a85ccc56..dae923158 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -1,4 +1,5 @@
-debug: !ENV ["BOT_DEBUG", "true"]
+debug: !ENV ["BOT_DEBUG", "true"]
+file_logs: !ENV ["FILE_LOGS", "false"]
bot:
@@ -173,7 +174,7 @@ guild:
how_to_get_help: 704250143020417084
# Topical
- discord_py: 343944376055103488
+ discord_bots: 343944376055103488
# Logs
attachment_log: &ATTACH_LOG 649243850006855680
@@ -263,7 +264,12 @@ guild:
- *BLACK_FORMATTER
roles:
+ # Self-assignable roles, see the Subscribe cog
+ advent_of_code: 518565788744024082
announcements: 463658397560995840
+ lovefest: 542431903886606399
+ pyweek_announcements: 897568414044938310
+
contributors: 295488872404484098
help_cooldown: 699189276025421825
muted: &MUTED_ROLE 277914926603829249
@@ -307,6 +313,7 @@ guild:
big_brother: 569133704568373283
dev_log: 680501655111729222
duck_pond: 637821475327311927
+ incidents: 816650601844572212
incidents_archive: 720671599790915702
python_news: &PYNEWS_WEBHOOK 704381182279942324
@@ -370,7 +377,7 @@ urls:
site_logs_view: !JOIN [*STAFF, "/bot/logs"]
# Snekbox
- snekbox_eval_api: "http://snekbox.default.svc.cluster.local/eval"
+ snekbox_eval_api: !ENV ["SNEKBOX_EVAL_API", "http://snekbox.default.svc.cluster.local/eval"]
# Discord API URLs
discord_api: &DISCORD_API "https://discordapp.com/api/v7/"
@@ -506,19 +513,16 @@ help_channels:
# Prefix for help channel names
name_prefix: 'help-'
- # Notify if more available channels are needed but there are no more dormant ones
- notify: true
-
- # Channel in which to send notifications
- notify_channel: *HELPERS
+ notify_channel: *HELPERS # Channel in which to send notifications messages
+ notify_minutes: 15 # Minimum interval between none_remaining or running_low notifications
- # Minimum interval between helper notifications
- notify_minutes: 15
-
- # Mention these roles in notifications
- notify_roles:
+ notify_none_remaining: true # Pinging notification for the Helper role when no dormant channels remain
+ notify_none_remaining_roles: # Mention these roles in the none_remaining notification
- *HELPERS_ROLE
+ notify_running_low: true # Non-pinging notification which is triggered when the channel count is equal or less than the threshold
+ notify_running_low_threshold: 4 # The amount of channels at which a running_low notification will be sent
+
redirect_output:
delete_delay: 15
diff --git a/docker-compose.yml b/docker-compose.yml
index b3ca6baa4..ce78f65aa 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -38,6 +38,7 @@ services:
metricity:
<< : *logging
+ << : *restart_policy
restart: on-failure # USE_METRICITY=false will stop the container, so this ensures it only restarts on error
depends_on:
postgres:
@@ -90,7 +91,6 @@ services:
context: .
dockerfile: Dockerfile
volumes:
- - ./logs:/bot/logs
- .:/bot:ro
tty: true
depends_on:
diff --git a/poetry.lock b/poetry.lock
index d91941d45..6d3bd44bb 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,10 +1,10 @@
[[package]]
name = "aio-pika"
-version = "6.8.0"
+version = "6.8.1"
description = "Wrapper for the aiormq for asyncio and humans."
category = "main"
optional = false
-python-versions = ">3.5.*, <4"
+python-versions = ">=3.5, <4"
[package.dependencies]
aiormq = ">=3.2.3,<4"
@@ -114,29 +114,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "attrs"
-version = "21.2.0"
+version = "21.4.0"
description = "Classes Without Boilerplate"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"]
+dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"]
-
-[[package]]
-name = "backports.entry-points-selectable"
-version = "1.1.0"
-description = "Compatibility shim providing selectable entry points for older implementations"
-category = "dev"
-optional = false
-python-versions = ">=2.7"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
name = "beautifulsoup4"
@@ -154,6 +142,21 @@ html5lib = ["html5lib"]
lxml = ["lxml"]
[[package]]
+name = "bot-core"
+version = "1.2.0"
+description = "Bot-Core provides the core functionality and utilities for the bots of the Python Discord community."
+category = "main"
+optional = false
+python-versions = "3.9.*"
+
+[package.dependencies]
+"discord.py" = {url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip"}
+
+[package.source]
+type = "url"
+url = "https://github.com/python-discord/bot-core/archive/511bcba1b0196cd498c707a525ea56921bd971db.zip"
+
+[[package]]
name = "certifi"
version = "2021.10.8"
description = "Python package for providing Mozilla's CA Bundle."
@@ -190,9 +193,9 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "charset-normalizer"
-version = "2.0.7"
+version = "2.0.10"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.5.0"
@@ -233,34 +236,32 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
toml = ["toml"]
[[package]]
-name = "coveralls"
-version = "2.2.0"
-description = "Show coverage stats online via coveralls.io"
-category = "dev"
+name = "deepdiff"
+version = "4.3.2"
+description = "Deep Difference and Search of any Python object/data."
+category = "main"
optional = false
-python-versions = ">= 3.5"
+python-versions = ">=3.5"
[package.dependencies]
-coverage = ">=4.1,<6.0"
-docopt = ">=0.6.1"
-requests = ">=1.0.0"
+ordered-set = ">=3.1.1"
[package.extras]
-yaml = ["PyYAML (>=3.10)"]
+murmur = ["mmh3"]
[[package]]
-name = "deepdiff"
-version = "4.3.2"
-description = "Deep Difference and Search of any Python object/data."
+name = "deprecated"
+version = "1.2.13"
+description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
category = "main"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
-ordered-set = ">=3.1.1"
+wrapt = ">=1.10,<2"
[package.extras]
-murmur = ["mmh3"]
+dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
[[package]]
name = "discord.py"
@@ -281,23 +282,16 @@ voice = ["PyNaCl (>=1.3.0,<1.5)"]
[package.source]
type = "url"
url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip"
+
[[package]]
name = "distlib"
-version = "0.3.3"
+version = "0.3.4"
description = "Distribution utilities"
category = "dev"
optional = false
python-versions = "*"
[[package]]
-name = "docopt"
-version = "0.6.2"
-description = "Pythonic argument parser, that will make you smile"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
name = "emoji"
version = "0.6.0"
description = "Emoji for Python"
@@ -321,7 +315,7 @@ testing = ["pre-commit"]
[[package]]
name = "fakeredis"
-version = "1.6.1"
+version = "1.7.0"
description = "Fake implementation of redis API for testing purposes."
category = "main"
optional = false
@@ -329,7 +323,7 @@ python-versions = ">=3.5"
[package.dependencies]
packaging = "*"
-redis = "<3.6.0"
+redis = "<4.1.0"
six = ">=1.12"
sortedcontainers = "*"
@@ -350,11 +344,11 @@ sgmllib3k = "*"
[[package]]
name = "filelock"
-version = "3.3.1"
+version = "3.4.2"
description = "A platform independent file lock."
-category = "dev"
+category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.extras]
docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
@@ -492,14 +486,14 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
[[package]]
name = "identify"
-version = "2.3.0"
+version = "2.4.2"
description = "File identification library for Python"
category = "dev"
optional = false
python-versions = ">=3.6.1"
[package.extras]
-license = ["editdistance-s"]
+license = ["ukkonen"]
[[package]]
name = "idna"
@@ -519,7 +513,7 @@ python-versions = "*"
[[package]]
name = "isort"
-version = "5.9.3"
+version = "5.10.1"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
@@ -533,7 +527,7 @@ plugins = ["setuptools"]
[[package]]
name = "lxml"
-version = "4.6.3"
+version = "4.7.1"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
category = "main"
optional = false
@@ -567,7 +561,7 @@ python-versions = "*"
[[package]]
name = "more-itertools"
-version = "8.10.0"
+version = "8.12.0"
description = "More routines for operating on iterables, beyond itertools"
category = "main"
optional = false
@@ -607,14 +601,14 @@ python-versions = ">=3.5"
[[package]]
name = "packaging"
-version = "21.0"
+version = "21.3"
description = "Core utilities for Python packages"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
-pyparsing = ">=2.0.2"
+pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
name = "pamqp"
@@ -655,11 +649,11 @@ test = ["docutils", "pytest-cov", "pytest-pycodestyle", "pytest-runner"]
[[package]]
name = "platformdirs"
-version = "2.4.0"
+version = "2.4.1"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.extras]
docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"]
@@ -679,7 +673,7 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
-version = "2.15.0"
+version = "2.16.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev"
optional = false
@@ -695,7 +689,7 @@ virtualenv = ">=20.0.8"
[[package]]
name = "psutil"
-version = "5.8.0"
+version = "5.9.0"
description = "Cross-platform lib for process and system monitoring in Python."
category = "dev"
optional = false
@@ -714,11 +708,11 @@ python-versions = "*"
[[package]]
name = "py"
-version = "1.10.0"
+version = "1.11.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycares"
@@ -744,7 +738,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pycparser"
-version = "2.20"
+version = "2.21"
description = "C parser in Python"
category = "main"
optional = false
@@ -774,11 +768,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pyparsing"
-version = "2.4.7"
+version = "3.0.6"
description = "Python parsing module"
category = "main"
optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = ">=3.6"
+
+[package.extras]
+diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pyreadline3"
@@ -827,11 +824,11 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale
[[package]]
name = "pytest-forked"
-version = "1.3.0"
+version = "1.4.0"
description = "run tests in isolated forked subprocesses"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
[package.dependencies]
py = "*"
@@ -902,7 +899,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[[package]]
name = "rapidfuzz"
-version = "1.8.0"
+version = "1.9.1"
description = "rapid fuzzy string matching"
category = "main"
optional = false
@@ -913,14 +910,17 @@ full = ["numpy"]
[[package]]
name = "redis"
-version = "3.5.3"
-description = "Python client for Redis key-value store"
+version = "4.0.2"
+description = "Python client for Redis database and key-value store"
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
+
+[package.dependencies]
+deprecated = "*"
[package.extras]
-hiredis = ["hiredis (>=0.1.3)"]
+hiredis = ["hiredis (>=1.0.0)"]
[[package]]
name = "regex"
@@ -932,9 +932,9 @@ python-versions = "*"
[[package]]
name = "requests"
-version = "2.26.0"
+version = "2.27.1"
description = "Python HTTP for Humans."
-category = "dev"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
@@ -949,8 +949,20 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
[[package]]
+name = "requests-file"
+version = "1.5.1"
+description = "File transport adapter for Requests"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+requests = ">=1.0.0"
+six = "*"
+
+[[package]]
name = "sentry-sdk"
-version = "1.4.3"
+version = "1.5.1"
description = "Python client for Sentry (https://sentry.io)"
category = "main"
optional = false
@@ -995,7 +1007,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "snowballstemmer"
-version = "2.1.0"
+version = "2.2.0"
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
category = "dev"
optional = false
@@ -1011,7 +1023,7 @@ python-versions = "*"
[[package]]
name = "soupsieve"
-version = "2.2.1"
+version = "2.3.1"
description = "A modern CSS selector implementation for Beautiful Soup."
category = "main"
optional = false
@@ -1052,6 +1064,20 @@ docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", "
test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"]
[[package]]
+name = "tldextract"
+version = "3.1.2"
+description = "Accurately separate the TLD from the registered domain and subdomains of a URL, using the Public Suffix List. By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+filelock = ">=3.0.8"
+idna = "*"
+requests = ">=2.1.0"
+requests-file = ">=1.4"
+
+[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
@@ -1061,15 +1087,15 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "typing-extensions"
-version = "3.10.0.2"
-description = "Backported and Experimental Type Hints for Python 3.5+"
+version = "4.0.1"
+description = "Backported and Experimental Type Hints for Python 3.6+"
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
[[package]]
name = "urllib3"
-version = "1.26.7"
+version = "1.26.8"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
@@ -1082,26 +1108,33 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
-version = "20.8.1"
+version = "20.13.0"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[package.dependencies]
-"backports.entry-points-selectable" = ">=1.0.4"
distlib = ">=0.3.1,<1"
-filelock = ">=3.0.0,<4"
+filelock = ">=3.2,<4"
platformdirs = ">=2,<3"
six = ">=1.9.0,<2"
[package.extras]
-docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"]
+docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
[[package]]
+name = "wrapt"
+version = "1.13.3"
+description = "Module for decorators, wrappers and monkey patching."
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[[package]]
name = "yarl"
-version = "1.7.0"
+version = "1.7.2"
description = "Yet another URL library"
category = "main"
optional = false
@@ -1114,12 +1147,12 @@ multidict = ">=4.0"
[metadata]
lock-version = "1.1"
python-versions = "3.9.*"
-content-hash = "da321f13297501e62dd1eb362eccb586ea1a9c21ddb395e11a91b93a2f92e9d4"
+content-hash = "0248fc7488c79af0cdb3a6db9528f4c3129db50b3a8d1dd3ba57dbc31b381c31"
[metadata.files]
aio-pika = [
- {file = "aio-pika-6.8.0.tar.gz", hash = "sha256:1d4305a5f78af3857310b4fe48348cdcf6c097e0e275ea88c2cd08570531a369"},
- {file = "aio_pika-6.8.0-py3-none-any.whl", hash = "sha256:e69afef8695f47c5d107bbdba21bdb845d5c249acb3be53ef5c2d497b02657c0"},
+ {file = "aio-pika-6.8.1.tar.gz", hash = "sha256:c2b2b46949a34252ff0e64c3bc208eef1893e5791b51aeefabf1676788d56b66"},
+ {file = "aio_pika-6.8.1-py3-none-any.whl", hash = "sha256:059ab8ecc03d73997f64ed28df7269105984232174d0e6406389c4e8ed30941c"},
]
aiodns = [
{file = "aiodns-2.0.0-py2.py3-none-any.whl", hash = "sha256:aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de"},
@@ -1189,17 +1222,14 @@ atomicwrites = [
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
]
attrs = [
- {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"},
- {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"},
-]
-"backports.entry-points-selectable" = [
- {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"},
- {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"},
+ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
+ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
]
beautifulsoup4 = [
{file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"},
{file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"},
]
+bot-core = []
certifi = [
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
@@ -1265,8 +1295,8 @@ chardet = [
{file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"},
]
charset-normalizer = [
- {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
- {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"},
+ {file = "charset-normalizer-2.0.10.tar.gz", hash = "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"},
+ {file = "charset_normalizer-2.0.10-py3-none-any.whl", hash = "sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
@@ -1330,21 +1360,18 @@ coverage = [
{file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"},
{file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
]
-coveralls = [
- {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"},
- {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"},
-]
deepdiff = [
{file = "deepdiff-4.3.2-py3-none-any.whl", hash = "sha256:59fc1e3e7a28dd0147b0f2b00e3e27181f0f0ef4286b251d5f214a5bcd9a9bc4"},
{file = "deepdiff-4.3.2.tar.gz", hash = "sha256:91360be1d9d93b1d9c13ae9c5048fa83d9cff17a88eb30afaa0d7ff2d0fee17d"},
]
+deprecated = [
+ {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"},
+ {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"},
+]
"discord.py" = []
distlib = [
- {file = "distlib-0.3.3-py2.py3-none-any.whl", hash = "sha256:c8b54e8454e5bf6237cc84c20e8264c3e991e824ef27e8f1e81049867d861e31"},
- {file = "distlib-0.3.3.zip", hash = "sha256:d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05"},
-]
-docopt = [
- {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"},
+ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
+ {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
]
emoji = [
{file = "emoji-0.6.0.tar.gz", hash = "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11"},
@@ -1354,16 +1381,16 @@ execnet = [
{file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
]
fakeredis = [
- {file = "fakeredis-1.6.1-py3-none-any.whl", hash = "sha256:5eb1516f1fe1813e9da8f6c482178fc067af09f53de587ae03887ef5d9d13024"},
- {file = "fakeredis-1.6.1.tar.gz", hash = "sha256:0d06a9384fb79da9f2164ce96e34eb9d4e2ea46215070805ea6fd3c174590b47"},
+ {file = "fakeredis-1.7.0-py3-none-any.whl", hash = "sha256:6f1e04f64557ad3b6835bdc6e5a8d022cbace4bdc24a47ad58f6a72e0fbff760"},
+ {file = "fakeredis-1.7.0.tar.gz", hash = "sha256:c9bd12e430336cbd3e189fae0e91eb99997b93e76dbfdd6ed67fa352dc684c71"},
]
feedparser = [
{file = "feedparser-6.0.8-py3-none-any.whl", hash = "sha256:1b7f57841d9cf85074deb316ed2c795091a238adb79846bc46dccdaf80f9c59a"},
{file = "feedparser-6.0.8.tar.gz", hash = "sha256:5ce0410a05ab248c8c7cfca3a0ea2203968ee9ff4486067379af4827a59f9661"},
]
filelock = [
- {file = "filelock-3.3.1-py3-none-any.whl", hash = "sha256:2b5eb3589e7fdda14599e7eb1a50e09b4cc14f34ed98b8ba56d33bfaafcbef2f"},
- {file = "filelock-3.3.1.tar.gz", hash = "sha256:34a9f35f95c441e7b38209775d6e0337f9a3759f3565f6c5798f19618527c76f"},
+ {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"},
+ {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"},
]
flake8 = [
{file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
@@ -1448,8 +1475,8 @@ humanfriendly = [
{file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
]
identify = [
- {file = "identify-2.3.0-py2.py3-none-any.whl", hash = "sha256:d1e82c83d063571bb88087676f81261a4eae913c492dafde184067c584bc7c05"},
- {file = "identify-2.3.0.tar.gz", hash = "sha256:fd08c97f23ceee72784081f1ce5125c8f53a02d3f2716dde79a6ab8f1039fea5"},
+ {file = "identify-2.4.2-py2.py3-none-any.whl", hash = "sha256:67c1e66225870dce721228176637a8ef965e8dd58450bcc7592249d0dfc4da6c"},
+ {file = "identify-2.4.2.tar.gz", hash = "sha256:93e8ec965e888f2212aa5c24b2b662f4832c39acb1d7196a70ea45acb626a05e"},
]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
@@ -1460,58 +1487,70 @@ iniconfig = [
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
isort = [
- {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
- {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"},
+ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
+ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
]
lxml = [
- {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"},
- {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"},
- {file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"},
- {file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"},
- {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"},
- {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"},
- {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"},
- {file = "lxml-4.6.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:64812391546a18896adaa86c77c59a4998f33c24788cadc35789e55b727a37f4"},
- {file = "lxml-4.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c1a40c06fd5ba37ad39caa0b3144eb3772e813b5fb5b084198a985431c2f1e8d"},
- {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"},
- {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"},
- {file = "lxml-4.6.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354"},
- {file = "lxml-4.6.3-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16"},
- {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"},
- {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"},
- {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"},
- {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"},
- {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"},
- {file = "lxml-4.6.3-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d916d31fd85b2f78c76400d625076d9124de3e4bda8b016d25a050cc7d603f24"},
- {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"},
- {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617"},
- {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"},
- {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"},
- {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"},
- {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"},
- {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"},
- {file = "lxml-4.6.3-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:36108c73739985979bf302006527cf8a20515ce444ba916281d1c43938b8bb96"},
- {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"},
- {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92"},
- {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"},
- {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"},
- {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"},
- {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"},
- {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"},
- {file = "lxml-4.6.3-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:e1cbd3f19a61e27e011e02f9600837b921ac661f0c40560eefb366e4e4fb275e"},
- {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"},
- {file = "lxml-4.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:1b38116b6e628118dea5b2186ee6820ab138dbb1e24a13e478490c7db2f326ae"},
- {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"},
- {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"},
- {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"},
- {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"},
- {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"},
- {file = "lxml-4.6.3-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:3082c518be8e97324390614dacd041bb1358c882d77108ca1957ba47738d9d59"},
- {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"},
- {file = "lxml-4.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a"},
- {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"},
- {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"},
- {file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"},
+ {file = "lxml-4.7.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f"},
+ {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e"},
+ {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17"},
+ {file = "lxml-4.7.1-cp27-cp27m-win32.whl", hash = "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419"},
+ {file = "lxml-4.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2"},
+ {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5"},
+ {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d"},
+ {file = "lxml-4.7.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc"},
+ {file = "lxml-4.7.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e"},
+ {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3"},
+ {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175"},
+ {file = "lxml-4.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6"},
+ {file = "lxml-4.7.1-cp310-cp310-win32.whl", hash = "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6"},
+ {file = "lxml-4.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e"},
+ {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675"},
+ {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7"},
+ {file = "lxml-4.7.1-cp35-cp35m-win32.whl", hash = "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5"},
+ {file = "lxml-4.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03"},
+ {file = "lxml-4.7.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b"},
+ {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55"},
+ {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d"},
+ {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d"},
+ {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6"},
+ {file = "lxml-4.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d"},
+ {file = "lxml-4.7.1-cp36-cp36m-win32.whl", hash = "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d"},
+ {file = "lxml-4.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a"},
+ {file = "lxml-4.7.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8"},
+ {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868"},
+ {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a"},
+ {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986"},
+ {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3"},
+ {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944"},
+ {file = "lxml-4.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b"},
+ {file = "lxml-4.7.1-cp37-cp37m-win32.whl", hash = "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4"},
+ {file = "lxml-4.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1"},
+ {file = "lxml-4.7.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"},
+ {file = "lxml-4.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e"},
+ {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440"},
+ {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f"},
+ {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e"},
+ {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e"},
+ {file = "lxml-4.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9"},
+ {file = "lxml-4.7.1-cp38-cp38-win32.whl", hash = "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd"},
+ {file = "lxml-4.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d"},
+ {file = "lxml-4.7.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9"},
+ {file = "lxml-4.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489"},
+ {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36"},
+ {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42"},
+ {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db"},
+ {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851"},
+ {file = "lxml-4.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4"},
+ {file = "lxml-4.7.1-cp39-cp39-win32.whl", hash = "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0"},
+ {file = "lxml-4.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2"},
+ {file = "lxml-4.7.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4"},
+ {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f"},
+ {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a"},
+ {file = "lxml-4.7.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7"},
+ {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267"},
+ {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60"},
+ {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"},
]
markdownify = [
{file = "markdownify-0.6.1-py3-none-any.whl", hash = "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"},
@@ -1522,8 +1561,8 @@ mccabe = [
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
more-itertools = [
- {file = "more-itertools-8.10.0.tar.gz", hash = "sha256:1debcabeb1df793814859d64a81ad7cb10504c24349368ccf214c664c474f41f"},
- {file = "more_itertools-8.10.0-py3-none-any.whl", hash = "sha256:56ddac45541718ba332db05f464bebfb0768110111affd27f66e0051f276fa43"},
+ {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"},
+ {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"},
]
mslex = [
{file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"},
@@ -1611,8 +1650,8 @@ ordered-set = [
{file = "ordered-set-4.0.2.tar.gz", hash = "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95"},
]
packaging = [
- {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"},
- {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"},
+ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pamqp = [
{file = "pamqp-2.3.0-py2.py3-none-any.whl", hash = "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02"},
@@ -1627,53 +1666,57 @@ pip-licenses = [
{file = "pip_licenses-3.5.3-py3-none-any.whl", hash = "sha256:59c148d6a03784bf945d232c0dc0e9de4272a3675acaa0361ad7712398ca86ba"},
]
platformdirs = [
- {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"},
- {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"},
+ {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"},
+ {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
pre-commit = [
- {file = "pre_commit-2.15.0-py2.py3-none-any.whl", hash = "sha256:a4ed01000afcb484d9eb8d504272e642c4c4099bbad3a6b27e519bd6a3e928a6"},
- {file = "pre_commit-2.15.0.tar.gz", hash = "sha256:3c25add78dbdfb6a28a651780d5c311ac40dd17f160eb3954a0c59da40a505a7"},
+ {file = "pre_commit-2.16.0-py2.py3-none-any.whl", hash = "sha256:758d1dc9b62c2ed8881585c254976d66eae0889919ab9b859064fc2fe3c7743e"},
+ {file = "pre_commit-2.16.0.tar.gz", hash = "sha256:fe9897cac830aa7164dbd02a4e7b90cae49630451ce88464bca73db486ba9f65"},
]
psutil = [
- {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"},
- {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"},
- {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"},
- {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"},
- {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"},
- {file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"},
- {file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"},
- {file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"},
- {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"},
- {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"},
- {file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"},
- {file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"},
- {file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"},
- {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"},
- {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"},
- {file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"},
- {file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"},
- {file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"},
- {file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"},
- {file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"},
- {file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"},
- {file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"},
- {file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"},
- {file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"},
- {file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"},
- {file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"},
- {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"},
- {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"},
+ {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"},
+ {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"},
+ {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"},
+ {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"},
+ {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"},
+ {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"},
+ {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"},
+ {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"},
+ {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"},
+ {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"},
+ {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"},
+ {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"},
+ {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"},
+ {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"},
+ {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"},
+ {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"},
+ {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"},
+ {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"},
+ {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"},
+ {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"},
+ {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"},
+ {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"},
+ {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"},
+ {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"},
+ {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"},
+ {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"},
+ {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"},
+ {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"},
+ {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"},
+ {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"},
+ {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"},
+ {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"},
]
ptable = [
{file = "PTable-0.9.2.tar.gz", hash = "sha256:aa7fc151cb40f2dabcd2275ba6f7fd0ff8577a86be3365cd3fb297cbe09cc292"},
]
py = [
- {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
- {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pycares = [
{file = "pycares-4.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71b99b9e041ae3356b859822c511f286f84c8889ec9ed1fbf6ac30fb4da13e4c"},
@@ -1713,8 +1756,8 @@ pycodestyle = [
{file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
]
pycparser = [
- {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"},
- {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
pydocstyle = [
{file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
@@ -1725,8 +1768,8 @@ pyflakes = [
{file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
]
pyparsing = [
- {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
- {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
+ {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"},
+ {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"},
]
pyreadline3 = [
{file = "pyreadline3-3.3-py3-none-any.whl", hash = "sha256:0003fd0079d152ecbd8111202c5a7dfa6a5569ffd65b235e45f3c2ecbee337b4"},
@@ -1741,8 +1784,8 @@ pytest-cov = [
{file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"},
]
pytest-forked = [
- {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"},
- {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"},
+ {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"},
+ {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"},
]
pytest-xdist = [
{file = "pytest-xdist-2.3.0.tar.gz", hash = "sha256:e8ecde2f85d88fbcadb7d28cb33da0fa29bca5cf7d5967fa89fc0e97e5299ea5"},
@@ -1792,68 +1835,62 @@ pyyaml = [
{file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"},
]
rapidfuzz = [
- {file = "rapidfuzz-1.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:91f094562c683802e6c972bce27a692dad70d6cd1114e626b29d990c3704c653"},
- {file = "rapidfuzz-1.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4a20682121e245cf5ad2dbdd771360763ea11b77520632a1034c4bb9ad1e854c"},
- {file = "rapidfuzz-1.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8810e75d8f9c4453bbd6209c372bf97514359b0b5efff555caf85b15f8a9d862"},
- {file = "rapidfuzz-1.8.0-cp27-cp27m-win32.whl", hash = "sha256:00cf713d843735b5958d87294f08b05c653a593ced7c4120be34f5d26d7a320a"},
- {file = "rapidfuzz-1.8.0-cp27-cp27m-win_amd64.whl", hash = "sha256:2baca64e23a623e077f57e5470de21af2765af15aa1088676eb2d475e664eed0"},
- {file = "rapidfuzz-1.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:9bf7a6c61bacedd84023be356e057e1d209dd6997cfaa3c1cee77aa21d642f88"},
- {file = "rapidfuzz-1.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:61b6434e3341ca5158ecb371b1ceb4c1f6110563a72d28bdce4eb2a084493e47"},
- {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e425e690383f6cf308e8c2e8d630fa9596f67d233344efd8fae11e70a9f5635f"},
- {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93db5e693b76d616b09df27ca5c79e0dda169af7f1b8f5ab3262826d981e37e2"},
- {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a8c4f76ed1c8a65892d98dc2913027c9acdb219d18f3a441cfa427a32861af9"},
- {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71e217fd30901214cc96c0c15057278bafb7072aa9b2be4c97459c1fedf3e731"},
- {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d579dd447b8e851462e79054b68f94b66b09df8b3abb2aa5ca07fe00912ef5e8"},
- {file = "rapidfuzz-1.8.0-cp310-cp310-win32.whl", hash = "sha256:5808064555273496dcd594d659bd28ee8d399149dd31575321034424455dc955"},
- {file = "rapidfuzz-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:798fef1671ca66c78b47802228e9583f7ab32b99bdfe3984ebb1f96e93e38b5f"},
- {file = "rapidfuzz-1.8.0-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:c9e0ed210831f5c73533bf11099ea7897db491e76c3443bef281d9c1c67d7f3a"},
- {file = "rapidfuzz-1.8.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:c819bb19eb615a31ddc9cb8248a285bf04f58158b53ce096451178631f99b652"},
- {file = "rapidfuzz-1.8.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:942ee45564f28ef70320d1229f02dc998bd93e3519c1f3a80f33ce144b51039c"},
- {file = "rapidfuzz-1.8.0-cp35-cp35m-win32.whl", hash = "sha256:7e6ae2e5a3bc9acc51e118f25d32b8efcd431c5d8deb408336dd2ed0f21d087c"},
- {file = "rapidfuzz-1.8.0-cp35-cp35m-win_amd64.whl", hash = "sha256:98901fba67c89ad2506f3946642cf6eb8f489592fb7eb307ebdf8bdb0c4e97f9"},
- {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e1686f406a0c77ef323cdb7369b7cf9e68f2abfcb83ff5f1e0a5b21f5a534"},
- {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da0c5fe5fdbbd74206c1778af6b8c5ff8dfbe2dd04ae12bbe96642b358acefce"},
- {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:535253bc9224215131ae450aad6c9f7ef1b24f15c685045eab2b52511268bd06"},
- {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acdad83f07d886705fce164b0d1f4e3b56788a205602ed3a7fc8b10ceaf05fbf"},
- {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35097f649831f8375d6c65a237deccac3aceb573aa7fae1e5d3fa942e89de1c8"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6f4db142e5b4b44314166a90e11603220db659bd2f9c23dd5db402c13eac8eb7"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19a3f55f27411d68360540484874beda0b428b062596d5f0f141663ef0738bfd"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22b4c1a7f6fe29bd8dae49f7d5ab085dc42c3964f1a78b6dca22fdf83b5c9bfa"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8bfb2fbc147904b78d5c510ee75dc8704b606e956df23f33a9e89abc03f45c3"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6dc5111ebfed2c4f2e4d120a9b280ea13ea4fbb60b6915dd239817b4fc092ed"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db5ee2457d97cb967ffe08446a8c595c03fe747fdc2e145266713f9c516d1c4a"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:12c1b78cc15fc26f555a4bf66088d5afb6354b5a5aa149a123f01a15af6c411b"},
- {file = "rapidfuzz-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:693e9579048d8db4ff020715dd6f25aa315fd6445bc94e7400d7a94a227dad27"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b4fe19df3edcf7de359448b872aec08e6592b4ca2d3df4d8ee57b5812d68bebf"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3670b9df0e1f479637cad1577afca7766a02775dc08c14837cf495c82861d7c"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61d118f36eb942649b0db344f7b7a19ad7e9b5749d831788187eb03b57ce1bfa"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fce3a2c8a1d10da12aff4a0d367624e8ae9e15c1b84a5144843681d39be0c355"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1577ef26e3647ccc4cc9754c34ffaa731639779f4d7779e91a761c72adac093e"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fec9b7e60fde51990c3b48fc1aa9dba9ac3acaf78f623dbb645a6fe21a9654e"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b954469d93858bc8b48129bc63fd644382a4df5f3fb1b4b290f48eac1d00a2da"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:190ba709069a7e5a6b39b7c8bc413a08cfa7f1f4defec5d974c4128b510e0234"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-win32.whl", hash = "sha256:97b2d13d6323649b43d1b113681e4013ba230bd6e9827cc832dcebee447d7250"},
- {file = "rapidfuzz-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:81c3091209b75f6611efe2af18834180946d4ce28f41ca8d44fce816187840d2"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d610afa33e92aa0481a514ffda3ec51ca5df3c684c1c1c795307589c62025931"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d976f33ca6b5fabbb095c0a662f5b86baf706184fc24c7f125d4ddb54b8bf036"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f5ca7bca2af598d4ddcf5b93b64b50654a9ff684e6f18d865f6e13fee442b3e"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2aac5ea6b0306dcd28a6d1a89d35ed2c6ac426f2673ee1b92cf3f1d0fd5cd"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f145c9831c0454a696a3136a6380ea4e01434e9cc2f2bc10d032864c16d1d0e5"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ce53291575b56c9d45add73ea013f43bafcea55eee9d5139aa759918d7685f"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de5773a39c00a0f23cfc5da9e0e5fd0fb512b0ebe23dc7289a38e1f9a4b5cefc"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87a802e55792bfbe192e2d557f38867dbe3671b49b3d5ecd873859c7460746ba"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-win32.whl", hash = "sha256:9391abf1121df831316222f28cea37397a0f72bd7978f3be6e7da29a7821e4e5"},
- {file = "rapidfuzz-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:9eeca1b436042b5523dcf314f5822b1131597898c1d967f140d1917541a8a3d1"},
- {file = "rapidfuzz-1.8.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:a01f2495aca479b49d3b3a8863d6ba9bea2043447a1ced74ae5ec5270059cbc1"},
- {file = "rapidfuzz-1.8.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:b7d4b1a5d16817f8cdb34365c7b58ae22d5cf1b3207720bb2fa0b55968bdb034"},
- {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c738d0d7f1744646d48d19b4c775926082bcefebd2460f45ca383a0e882f5672"},
- {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0fb9c6078c17c12b52e66b7d0a2a1674f6bbbdc6a76e454c8479b95147018123"},
- {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1482b385d83670eb069577c9667f72b41eec4f005aee32f1a4ff4e71e88afde2"},
- {file = "rapidfuzz-1.8.0.tar.gz", hash = "sha256:83fff37acf0367314879231264169dcbc5e7de969a94f4b82055d06a7fddab9a"},
+ {file = "rapidfuzz-1.9.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:68227a8b25291d6a2140aef049271ea30a77be5ef672a58e582a55a5cc1fce93"},
+ {file = "rapidfuzz-1.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c33541995b96ff40025c1456b8c74b7dd2ab9cbf91943fc35a7bb621f48940e2"},
+ {file = "rapidfuzz-1.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:c2fafbbf97a4632822248f4201601b691e2eac5fdb30e5d7a96d07a6d058a7d4"},
+ {file = "rapidfuzz-1.9.1-cp27-cp27m-win32.whl", hash = "sha256:364795f617a99e1dbb55ac3947ab8366588b72531cb2d6152666287d20610706"},
+ {file = "rapidfuzz-1.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:f171d9e66144b0647f9b998ef10bdd919a640e4b1357250c8ef6259deb5ffe0d"},
+ {file = "rapidfuzz-1.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:c83801a7c5209663aa120b815a4f2c39e95fe8e0b774ec58a1e0affd6a2fcfc6"},
+ {file = "rapidfuzz-1.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:67e61c2baa6bb1848c4a33752f1781124dcc90bf3f31b18b44db1ae4e4e26634"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8ab7eb003a18991347174910f11d38ff40399081185d9e3199ec277535f7828b"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5ad450badf06ddf98a246140b5059ba895ee8445e8102a5a289908327f551f81"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:402b2174bded62a793c5f7d9aec16bc32c661402360a934819ae72b54cfbce1e"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92066ccb054efc2e17afb4049c98b550969653cd58f71dd756cfcc8e6864630a"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8dc0bf1814accee08a9c9bace6672ef06eae6b0446fce88e3e97e23dfaf3ea10"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbd387efb8478605951344f327dd03bf053c138d757369a43404305b99e55db"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-win32.whl", hash = "sha256:b1c54807e556dbcc6caf4ce0f24446c01b195f3cc46e2a6e74b82d3a21eaa45d"},
+ {file = "rapidfuzz-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:ac3273364cd1619cab3bf0ba731efea5405833f9eba362da7dcd70bd42073d8e"},
+ {file = "rapidfuzz-1.9.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:d9faf62606c08a0a6992dd480c72b6a068733ae02688dc35f2e36ba0d44673f4"},
+ {file = "rapidfuzz-1.9.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6a56a48be047637b1b0b2459a11cf7cd5aa7bbe16a439bd4f73b4af39e620e4"},
+ {file = "rapidfuzz-1.9.1-cp35-cp35m-win32.whl", hash = "sha256:aa91609979e9d2700f0ff100df99b36e7d700b70169ee385d43d5de9e471ae97"},
+ {file = "rapidfuzz-1.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b4cfdd0915ab4cec86c2ff6bab9f01b03454f3de0963c37f9f219df2ddf42b95"},
+ {file = "rapidfuzz-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6bfa4ad0158a093cd304f795ceefdc3861ae6942a61432b2a50858be6de88ca"},
+ {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:eb0ea02295d9278bd2dcd2df4760b0f2887b6c3f2f374005ec5af320d8d3a37e"},
+ {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d5187cd5cd6273e9fee07de493a42a2153134a4914df74cb1abb0744551c548a"},
+ {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6e5b8af63f9c05b64454460759ed84a715d581d598ec4484f4ec512f398e8b1"},
+ {file = "rapidfuzz-1.9.1-cp36-cp36m-win32.whl", hash = "sha256:36137f88f2b28115af506118e64e11c816611eab2434293af7fdacd1290ffb9d"},
+ {file = "rapidfuzz-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:fcc420cad46be7c9887110edf04cdee545f26dbf22650a443d89790fc35f7b88"},
+ {file = "rapidfuzz-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b06de314f426aebff8a44319016bbe2b22f7848c84e44224f80b0690b7b08b18"},
+ {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e5de44e719faea79e45322b037f0d4a141d750b80d2204fa68f43a42a24f0fbc"},
+ {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f9439df09a782afd01b67005a3b110c70bbf9e1cf06d2ac9b293ce2d02d3c549"},
+ {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e903d4702647465721e2d0431c95f04fd56a06577f06f41e2960c83fd63c1bad"},
+ {file = "rapidfuzz-1.9.1-cp37-cp37m-win32.whl", hash = "sha256:a5298f4ac1975edcbb15583eab659a44b33aebaf3bccf172e185cfea68771c08"},
+ {file = "rapidfuzz-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:103193a01921b54fcdad6b01cfda3a68e00aeafca236b7ecd5b1b2c2e7e96337"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1d98a3187040dca855e02179a35c137f72ef83ce243783d44ea59efa86b94b3a"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb92bf7fc911b787055a88d9295ca3b4fe8576e3b59271f070f1b1b181eb087d"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3f014a0f5f8159a94c6ee884fedd1c30e07fb866a5d76ff2c18091bc6363b76f"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:31474074a99f72289ac325fbd77983e7d355d48860bfe7a4f6f6396fdb24410a"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec67d79af5a2d7b0cf67b570a5579710e461cadda4120478e813b63491f394dd"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebc0d3d15ed32f98f0052cf6e3e9c9b8010fb93c04fb74d2022e3c51ec540e2"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-win32.whl", hash = "sha256:477ab1a3044bab89db45caabc562b158f68765ecaa638b73ba17e92f09dfa5ff"},
+ {file = "rapidfuzz-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:8e872763dc0367d7544aa585d2e8b27af233323b8a7cd2f9b78cafa05bae5018"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8401c41e219ae36ca7a88762776a6270511650d4cc70d024ae61561e96d67e47"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea10bd8e0436801c3264f7084a5ea194f12ba9fe1ba898aa4a2107d276501292"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:433737914b46c1ffa0c678eceae1c260dc6b7fb5b6cad4c725d3e3607c764b32"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c3b08e90e45acbc469d1f456681643256e952bf84ec7714f58979baba0c8a1c"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bbcd265b3c86176e5db4cbba7b4364d7333c214ee80e2d259c7085929934ca9d"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d69fabcd635783cd842e7d5ee4b77164314c5124b82df5a0c436ab3d698f8a9"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-win32.whl", hash = "sha256:01f16b6f3fa5d1a26c12f5da5de0032f1e12c919d876005b57492a8ec9a5c043"},
+ {file = "rapidfuzz-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:0bcc5bbfdbe6068cc2cf0029ab6cde08dceac498d232fa3a61dd34fbfa0b3f36"},
+ {file = "rapidfuzz-1.9.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:de869c8f4e8edb9b2f7b8232a04896645501defcbd9d85bc0202ff3ec6285f6b"},
+ {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:db5978e970fb0955974d51021da4b929e2e4890fef17792989ee32658e2b159c"},
+ {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:33479f75f36ac3a1d8421365d4fa906e013490790730a89caba31d06e6f71738"},
+ {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:af991cb333ec526d894923163050931b3a870b7694bf7687aaa6154d341a98f5"},
+ {file = "rapidfuzz-1.9.1.tar.gz", hash = "sha256:bd7a4fe33ba49db3417f0f57a8af02462554f1296dedcf35b026cd3525efef74"},
]
redis = [
- {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"},
- {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"},
+ {file = "redis-4.0.2-py3-none-any.whl", hash = "sha256:c8481cf414474e3497ec7971a1ba9b998c8efad0f0d289a009a5bbef040894f9"},
+ {file = "redis-4.0.2.tar.gz", hash = "sha256:ccf692811f2c1fc7a92b466aa2599e4a6d2d73d5f736a2c70be600657c0da34a"},
]
regex = [
{file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"},
@@ -1899,12 +1936,16 @@ regex = [
{file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"},
]
requests = [
- {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
- {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"},
+ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
+ {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
+]
+requests-file = [
+ {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"},
+ {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"},
]
sentry-sdk = [
- {file = "sentry-sdk-1.4.3.tar.gz", hash = "sha256:b9844751e40710e84a457c5bc29b21c383ccb2b63d76eeaad72f7f1c808c8828"},
- {file = "sentry_sdk-1.4.3-py2.py3-none-any.whl", hash = "sha256:c091cc7115ff25fe3a0e410dbecd7a996f81a3f6137d2272daef32d6c3cfa6dc"},
+ {file = "sentry-sdk-1.5.1.tar.gz", hash = "sha256:2a1757d6611e4bec7d672c2b7ef45afef79fed201d064f53994753303944f5a8"},
+ {file = "sentry_sdk-1.5.1-py2.py3-none-any.whl", hash = "sha256:e4cb107e305b2c1b919414775fa73a9997f996447417d22b98e7610ded1e9eb5"},
]
sgmllib3k = [
{file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
@@ -1914,16 +1955,16 @@ six = [
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
snowballstemmer = [
- {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"},
- {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"},
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
]
sortedcontainers = [
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
]
soupsieve = [
- {file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"},
- {file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"},
+ {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"},
+ {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"},
]
statsd = [
{file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"},
@@ -1937,94 +1978,150 @@ testfixtures = [
{file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"},
{file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"},
]
+tldextract = [
+ {file = "tldextract-3.1.2-py2.py3-none-any.whl", hash = "sha256:f55e05f6bf4cc952a87d13594386d32ad2dd265630a8bdfc3df03bd60425c6b0"},
+ {file = "tldextract-3.1.2.tar.gz", hash = "sha256:d2034c3558651f7d8fdadea83fb681050b2d662dc67a00d950326dc902029444"},
+]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
typing-extensions = [
- {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"},
- {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"},
- {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"},
+ {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"},
+ {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"},
]
urllib3 = [
- {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"},
- {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"},
+ {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"},
+ {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"},
]
virtualenv = [
- {file = "virtualenv-20.8.1-py2.py3-none-any.whl", hash = "sha256:10062e34c204b5e4ec5f62e6ef2473f8ba76513a9a617e873f1f8fb4a519d300"},
- {file = "virtualenv-20.8.1.tar.gz", hash = "sha256:bcc17f0b3a29670dd777d6f0755a4c04f28815395bca279cdcb213b97199a6b8"},
+ {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"},
+ {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"},
+]
+wrapt = [
+ {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"},
+ {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"},
+ {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"},
+ {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"},
+ {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"},
+ {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"},
+ {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"},
+ {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"},
+ {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"},
+ {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"},
+ {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"},
+ {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"},
+ {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"},
+ {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"},
+ {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"},
+ {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"},
+ {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"},
+ {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"},
+ {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"},
+ {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"},
+ {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"},
+ {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"},
+ {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"},
+ {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"},
+ {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"},
+ {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"},
+ {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"},
+ {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"},
+ {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"},
+ {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"},
+ {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"},
+ {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"},
+ {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"},
+ {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"},
+ {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"},
+ {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"},
+ {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"},
+ {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"},
+ {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"},
]
yarl = [
- {file = "yarl-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e35d8230e4b08d86ea65c32450533b906a8267a87b873f2954adeaecede85169"},
- {file = "yarl-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb4b3f277880c314e47720b4b6bb2c85114ab3c04c5442c9bc7006b3787904d8"},
- {file = "yarl-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7015dcedb91d90a138eebdc7e432aec8966e0147ab2a55f2df27b1904fa7291"},
- {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb3e478175e15e00d659fb0354a6a8db71a7811a2a5052aed98048bc972e5d2b"},
- {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8c409aa3a7966647e7c1c524846b362a6bcbbe120bf8a176431f940d2b9a2e"},
- {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b22ea41c7e98170474a01e3eded1377d46b2dfaef45888a0005c683eaaa49285"},
- {file = "yarl-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a7dfc46add4cfe5578013dbc4127893edc69fe19132d2836ff2f6e49edc5ecd6"},
- {file = "yarl-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:82ff6f85f67500a4f74885d81659cd270eb24dfe692fe44e622b8a2fd57e7279"},
- {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f3cd2158b2ed0fb25c6811adfdcc47224efe075f2d68a750071dacc03a7a66e4"},
- {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:59c0f13f9592820c51280d1cf811294d753e4a18baf90f0139d1dc93d4b6fc5f"},
- {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7f7655ad83d1a8afa48435a449bf2f3009293da1604f5dd95b5ddcf5f673bd69"},
- {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aa9f0d9b62d15182341b3e9816582f46182cab91c1a57b2d308b9a3c4e2c4f78"},
- {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fdd1b90c225a653b1bd1c0cae8edf1957892b9a09c8bf7ee6321eeb8208eac0f"},
- {file = "yarl-1.7.0-cp310-cp310-win32.whl", hash = "sha256:7c8d0bb76eabc5299db203e952ec55f8f4c53f08e0df4285aac8c92bd9e12675"},
- {file = "yarl-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:622a36fa779efb4ff9eff5fe52730ff17521431379851a31e040958fc251670c"},
- {file = "yarl-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d461b7a8e139b9e4b41f62eb417ffa0b98d1c46d4caf14c845e6a3b349c0bb1"},
- {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81cfacdd1e40bc931b5519499342efa388d24d262c30a3d31187bfa04f4a7001"},
- {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:821b978f2152be7695d4331ef0621d207aedf9bbd591ba23a63412a3efc29a01"},
- {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b64bd24c8c9a487f4a12260dc26732bf41028816dbf0c458f17864fbebdb3131"},
- {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:98c9ddb92b60a83c21be42c776d3d9d5ec632a762a094c41bda37b7dfbd2cd83"},
- {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a532d75ca74431c053a88a802e161fb3d651b8bf5821a3440bc3616e38754583"},
- {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:053e09817eafb892e94e172d05406c1b3a22a93bc68f6eff5198363a3d764459"},
- {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:98c51f02d542945d306c8e934aa2c1e66ba5e9c1c86b5bf37f3a51c8a747067e"},
- {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:15ec41a5a5fdb7bace6d7b16701f9440007a82734f69127c0fbf6d87e10f4a1e"},
- {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a7f08819dba1e1255d6991ed37448a1bf4b1352c004bcd899b9da0c47958513d"},
- {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8e3ffab21db0542ffd1887f3b9575ddd58961f2cf61429cb6458afc00c4581e0"},
- {file = "yarl-1.7.0-cp36-cp36m-win32.whl", hash = "sha256:50127634f519b2956005891507e3aa4ac345f66a7ea7bbc2d7dcba7401f41898"},
- {file = "yarl-1.7.0-cp36-cp36m-win_amd64.whl", hash = "sha256:36ec44f15193f6d5288d42ebb8e751b967ebdfb72d6830983838d45ab18edb4f"},
- {file = "yarl-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ec1b5a25a25c880c976d0bb3d107def085bb08dbb3db7f4442e0a2b980359d24"},
- {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b36f5a63c891f813c6f04ef19675b382efc190fd5ce7e10ab19386d2548bca06"},
- {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38173b8c3a29945e7ecade9a3f6ff39581eee8201338ee6a2c8882db5df3e806"},
- {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba402f32184f0b405fb281b93bd0d8ab7e3257735b57b62a6ed2e94cdf4fe50"},
- {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:be52bc5208d767cdd8308a9e93059b3b36d1e048fecbea0e0346d0d24a76adc0"},
- {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:08c2044a956f4ef30405f2f433ce77f1f57c2c773bf81ae43201917831044d5a"},
- {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:484d61c047c45670ef5967653a1d0783e232c54bf9dd786a7737036828fa8d54"},
- {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b7de92a4af85cfcaf4081f8aa6165b1d63ee5de150af3ee85f954145f93105a7"},
- {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:376e41775aab79c5575534924a386c8e0f1a5d91db69fc6133fd27a489bcaf10"},
- {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:8a8b10d0e7bac154f959b709fcea593cda527b234119311eb950096653816a86"},
- {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f46cd4c43e6175030e2a56def8f1d83b64e6706eeb2bb9ab0ef4756f65eab23f"},
- {file = "yarl-1.7.0-cp37-cp37m-win32.whl", hash = "sha256:b28cfb46140efe1a6092b8c5c4994a1fe70dc83c38fbcea4992401e0c6fb9cce"},
- {file = "yarl-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9624154ec9c02a776802da1086eed7f5034bd1971977f5146233869c2ac80297"},
- {file = "yarl-1.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:69945d13e1bbf81784a9bc48824feb9cd66491e6a503d4e83f6cd7c7cc861361"},
- {file = "yarl-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:46a742ed9e363bd01be64160ce7520e92e11989bd4cb224403cfd31c101cc83d"},
- {file = "yarl-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb4ff1ac7cb4500f43581b3f4cbd627d702143aa6be1fdc1fa3ebffaf4dc1be5"},
- {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ad51e17cd65ea3debb0e10f0120cf8dd987c741fe423ed2285087368090b33d"},
- {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e37786ea89a5d3ffbbf318ea9790926f8dfda83858544f128553c347ad143c6"},
- {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c63c1e208f800daad71715786bfeb1cecdc595d87e2e9b1cd234fd6e597fd71d"},
- {file = "yarl-1.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91cbe24300c11835ef186436363352b3257db7af165e0a767f4f17aa25761388"},
- {file = "yarl-1.7.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e510dbec7c59d32eaa61ffa48173d5e3d7170a67f4a03e8f5e2e9e3971aca622"},
- {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3def6e681cc02397e5d8141ee97b41d02932b2bcf0fb34532ad62855eab7c60e"},
- {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:263c81b94e6431942b27f6f671fa62f430a0a5c14bb255f2ab69eeb9b2b66ff7"},
- {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e78c91faefe88d601ddd16e3882918dbde20577a2438e2320f8239c8b7507b8f"},
- {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:22b2430c49713bfb2f0a0dd4a8d7aab218b28476ba86fd1c78ad8899462cbcf2"},
- {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e7ad9db939082f5d0b9269cfd92c025cb8f2fbbb1f1b9dc5a393c639db5bd92"},
- {file = "yarl-1.7.0-cp38-cp38-win32.whl", hash = "sha256:3a31e4a8dcb1beaf167b7e7af61b88cb961b220db8d3ba1c839723630e57eef7"},
- {file = "yarl-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:d579957439933d752358c6a300c93110f84aae67b63dd0c19dde6ecbf4056f6b"},
- {file = "yarl-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:87721b549505a546eb003252185103b5ec8147de6d3ad3714d148a5a67b6fe53"},
- {file = "yarl-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1fa866fa24d9f4108f9e58ea8a2135655419885cdb443e36b39a346e1181532"},
- {file = "yarl-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d3b8449dfedfe94eaff2b77954258b09b24949f6818dfa444b05dbb05ae1b7e"},
- {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db2372e350794ce8b9f810feb094c606b7e0e4aa6807141ac4fadfe5ddd75bb0"},
- {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a06d9d0b9a97fa99b84fee71d9dd11e69e21ac8a27229089f07b5e5e50e8d63c"},
- {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3455c2456d6307bcfa80bc1157b8603f7d93573291f5bdc7144489ca0df4628"},
- {file = "yarl-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d30d67e3486aea61bb2cbf7cf81385364c2e4f7ce7469a76ed72af76a5cdfe6b"},
- {file = "yarl-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c18a4b286e8d780c3a40c31d7b79836aa93b720f71d5743f20c08b7e049ca073"},
- {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d54c925396e7891666cabc0199366ca55b27d003393465acef63fd29b8b7aa92"},
- {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:64773840952de17851a1c7346ad7f71688c77e74248d1f0bc230e96680f84028"},
- {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:acbf1756d9dc7cd0ae943d883be72e84e04396f6c2ff93a6ddeca929d562039f"},
- {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2e48f27936aa838939c798f466c851ba4ae79e347e8dfce43b009c64b930df12"},
- {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1beef4734ca1ad40a9d8c6b20a76ab46e3a2ed09f38561f01e4aa2ea82cafcef"},
- {file = "yarl-1.7.0-cp39-cp39-win32.whl", hash = "sha256:8ee78c9a5f3c642219d4607680a4693b59239c27a3aa608b64ef79ddc9698039"},
- {file = "yarl-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:d750503682605088a14d29a4701548c15c510da4f13c8b17409c4097d5b04c52"},
- {file = "yarl-1.7.0.tar.gz", hash = "sha256:8e7ebaf62e19c2feb097ffb7c94deb0f0c9fab52590784c8cd679d30ab009162"},
+ {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"},
+ {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"},
+ {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"},
+ {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"},
+ {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"},
+ {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"},
+ {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"},
+ {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"},
+ {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"},
+ {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"},
+ {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"},
+ {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"},
+ {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"},
+ {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"},
+ {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"},
+ {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"},
+ {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"},
+ {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"},
+ {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"},
+ {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"},
+ {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"},
+ {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"},
]
diff --git a/pyproject.toml b/pyproject.toml
index 563bf4a27..c764910c2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,6 +8,8 @@ license = "MIT"
[tool.poetry.dependencies]
python = "3.9.*"
"discord.py" = {url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip"}
+# See https://bot-core.pythondiscord.com/ for docs.
+bot-core = {url = "https://github.com/python-discord/bot-core/archive/511bcba1b0196cd498c707a525ea56921bd971db.zip"}
aio-pika = "~=6.1"
aiodns = "~=2.0"
aiohttp = "~=3.7"
@@ -21,7 +23,7 @@ deepdiff = "~=4.0"
emoji = "~=0.6"
feedparser = "~=6.0.2"
rapidfuzz = "~=1.4"
-lxml = "~=4.4"
+lxml = "~=4.6"
markdownify = "==0.6.1"
more_itertools = "~=8.2"
python-dateutil = "~=2.8"
@@ -30,10 +32,10 @@ pyyaml = "~=5.1"
regex = "==2021.4.4"
sentry-sdk = "~=1.3"
statsd = "~=3.3"
+tldextract = "^3.1.2"
[tool.poetry.dev-dependencies]
coverage = "~=5.0"
-coveralls = "~=2.1"
flake8 = "~=3.8"
flake8-annotations = "~=2.0"
flake8-bugbear = "~=20.1"
diff --git a/tests/bot/exts/backend/test_error_handler.py b/tests/bot/exts/backend/test_error_handler.py
index 462f718e6..35fa0ee59 100644
--- a/tests/bot/exts/backend/test_error_handler.py
+++ b/tests/bot/exts/backend/test_error_handler.py
@@ -337,14 +337,12 @@ class TryGetTagTests(unittest.IsolatedAsyncioTestCase):
async def test_try_get_tag_get_command(self):
"""Should call `Bot.get_command` with `tags get` argument."""
self.bot.get_command.reset_mock()
- self.ctx.invoked_with = "foo"
await self.cog.try_get_tag(self.ctx)
self.bot.get_command.assert_called_once_with("tags get")
async def test_try_get_tag_invoked_from_error_handler(self):
"""`self.ctx` should have `invoked_from_error_handler` `True`."""
self.ctx.invoked_from_error_handler = False
- self.ctx.invoked_with = "foo"
await self.cog.try_get_tag(self.ctx)
self.assertTrue(self.ctx.invoked_from_error_handler)
@@ -359,38 +357,12 @@ class TryGetTagTests(unittest.IsolatedAsyncioTestCase):
err = errors.CommandError()
self.tag.get_command.can_run = AsyncMock(side_effect=err)
self.cog.on_command_error = AsyncMock()
- self.ctx.invoked_with = "foo"
self.assertIsNone(await self.cog.try_get_tag(self.ctx))
self.cog.on_command_error.assert_awaited_once_with(self.ctx, err)
- @patch("bot.exts.backend.error_handler.TagNameConverter")
- async def test_try_get_tag_convert_success(self, tag_converter):
- """Converting tag should successful."""
- self.ctx.invoked_with = "foo"
- tag_converter.convert = AsyncMock(return_value="foo")
- self.assertIsNone(await self.cog.try_get_tag(self.ctx))
- tag_converter.convert.assert_awaited_once_with(self.ctx, "foo")
- self.ctx.invoke.assert_awaited_once()
-
- @patch("bot.exts.backend.error_handler.TagNameConverter")
- async def test_try_get_tag_convert_fail(self, tag_converter):
- """Converting tag should raise `BadArgument`."""
- self.ctx.reset_mock()
- self.ctx.invoked_with = "bar"
- tag_converter.convert = AsyncMock(side_effect=errors.BadArgument())
- self.assertIsNone(await self.cog.try_get_tag(self.ctx))
- self.ctx.invoke.assert_not_awaited()
-
- async def test_try_get_tag_ctx_invoke(self):
- """Should call `ctx.invoke` with proper args/kwargs."""
- self.ctx.reset_mock()
- self.ctx.invoked_with = "foo"
- self.assertIsNone(await self.cog.try_get_tag(self.ctx))
- self.ctx.invoke.assert_awaited_once_with(self.tag.get_command, tag_name="foo")
-
async def test_dont_call_suggestion_tag_sent(self):
"""Should never call command suggestion if tag is already sent."""
- self.ctx.invoked_with = "foo"
+ self.ctx.message = MagicMock(content="foo")
self.ctx.invoke = AsyncMock(return_value=True)
self.cog.send_command_suggestion = AsyncMock()
@@ -572,38 +544,6 @@ class IndividualErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
push_scope_mock.set_extra.has_calls(set_extra_calls)
-class OtherErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
- """Other `ErrorHandler` tests."""
-
- def setUp(self):
- self.bot = MockBot()
- self.ctx = MockContext()
-
- async def test_get_help_command_command_specified(self):
- """Should return coroutine of help command of specified command."""
- self.ctx.command = "foo"
- result = ErrorHandler.get_help_command(self.ctx)
- expected = self.ctx.send_help("foo")
- self.assertEqual(result.__qualname__, expected.__qualname__)
- self.assertEqual(result.cr_frame.f_locals, expected.cr_frame.f_locals)
-
- # Await coroutines to avoid warnings
- await result
- await expected
-
- async def test_get_help_command_no_command_specified(self):
- """Should return coroutine of help command."""
- self.ctx.command = None
- result = ErrorHandler.get_help_command(self.ctx)
- expected = self.ctx.send_help()
- self.assertEqual(result.__qualname__, expected.__qualname__)
- self.assertEqual(result.cr_frame.f_locals, expected.cr_frame.f_locals)
-
- # Await coroutines to avoid warnings
- await result
- await expected
-
-
class ErrorHandlerSetupTests(unittest.TestCase):
"""Tests for `ErrorHandler` `setup` function."""
diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py
index 632287322..d896b7652 100644
--- a/tests/bot/exts/info/test_information.py
+++ b/tests/bot/exts/info/test_information.py
@@ -1,6 +1,7 @@
import textwrap
import unittest
import unittest.mock
+from datetime import datetime
import discord
@@ -276,6 +277,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
f"{COG_PATH}.basic_user_infraction_counts",
new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions"))
)
+ @unittest.mock.patch(
+ f"{COG_PATH}.user_messages",
+ new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count"))
+ )
async def test_create_user_embed_uses_string_representation_of_user_in_title_if_nick_is_not_available(self):
"""The embed should use the string representation of the user if they don't have a nick."""
ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1))
@@ -284,8 +289,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
user.nick = None
user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock")
user.colour = 0
+ user.created_at = user.joined_at = datetime.utcnow()
- embed = await self.cog.create_user_embed(ctx, user)
+ embed = await self.cog.create_user_embed(ctx, user, False)
self.assertEqual(embed.title, "Mr. Hemlock")
@@ -293,6 +299,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
f"{COG_PATH}.basic_user_infraction_counts",
new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions"))
)
+ @unittest.mock.patch(
+ f"{COG_PATH}.user_messages",
+ new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count"))
+ )
async def test_create_user_embed_uses_nick_in_title_if_available(self):
"""The embed should use the nick if it's available."""
ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1))
@@ -301,8 +311,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
user.nick = "Cat lover"
user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock")
user.colour = 0
+ user.created_at = user.joined_at = datetime.utcnow()
- embed = await self.cog.create_user_embed(ctx, user)
+ embed = await self.cog.create_user_embed(ctx, user, False)
self.assertEqual(embed.title, "Cat lover (Mr. Hemlock)")
@@ -310,6 +321,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
f"{COG_PATH}.basic_user_infraction_counts",
new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions"))
)
+ @unittest.mock.patch(
+ f"{COG_PATH}.user_messages",
+ new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count"))
+ )
async def test_create_user_embed_ignores_everyone_role(self):
"""Created `!user` embeds should not contain mention of the @everyone-role."""
ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1))
@@ -317,14 +332,19 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
# A `MockMember` has the @Everyone role by default; we add the Admins to that.
user = helpers.MockMember(roles=[admins_role], colour=100)
+ user.created_at = user.joined_at = datetime.utcnow()
- embed = await self.cog.create_user_embed(ctx, user)
+ embed = await self.cog.create_user_embed(ctx, user, False)
self.assertIn("&Admins", embed.fields[1].value)
self.assertNotIn("&Everyone", embed.fields[1].value)
@unittest.mock.patch(f"{COG_PATH}.expanded_user_infraction_counts", new_callable=unittest.mock.AsyncMock)
@unittest.mock.patch(f"{COG_PATH}.user_nomination_counts", new_callable=unittest.mock.AsyncMock)
+ @unittest.mock.patch(
+ f"{COG_PATH}.user_messages",
+ new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count"))
+ )
async def test_create_user_embed_expanded_information_in_moderation_channels(
self,
nomination_counts,
@@ -339,7 +359,8 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
nomination_counts.return_value = ("Nominations", "nomination info")
user = helpers.MockMember(id=314, roles=[moderators_role], colour=100)
- embed = await self.cog.create_user_embed(ctx, user)
+ user.created_at = user.joined_at = datetime.utcfromtimestamp(1)
+ embed = await self.cog.create_user_embed(ctx, user, False)
infraction_counts.assert_called_once_with(user)
nomination_counts.assert_called_once_with(user)
@@ -363,16 +384,23 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
)
@unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new_callable=unittest.mock.AsyncMock)
- async def test_create_user_embed_basic_information_outside_of_moderation_channels(self, infraction_counts):
+ @unittest.mock.patch(f"{COG_PATH}.user_messages", new_callable=unittest.mock.AsyncMock)
+ async def test_create_user_embed_basic_information_outside_of_moderation_channels(
+ self,
+ user_messages,
+ infraction_counts,
+ ):
"""The embed should contain only basic infraction data outside of mod channels."""
ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=100))
moderators_role = helpers.MockRole(name='Moderators')
infraction_counts.return_value = ("Infractions", "basic infractions info")
+ user_messages.return_value = ("Messages", "user message counts")
user = helpers.MockMember(id=314, roles=[moderators_role], colour=100)
- embed = await self.cog.create_user_embed(ctx, user)
+ user.created_at = user.joined_at = datetime.utcfromtimestamp(1)
+ embed = await self.cog.create_user_embed(ctx, user, False)
infraction_counts.assert_called_once_with(user)
@@ -394,14 +422,23 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
)
self.assertEqual(
- "basic infractions info",
+ "user message counts",
embed.fields[2].value
)
+ self.assertEqual(
+ "basic infractions info",
+ embed.fields[3].value
+ )
+
@unittest.mock.patch(
f"{COG_PATH}.basic_user_infraction_counts",
new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions"))
)
+ @unittest.mock.patch(
+ f"{COG_PATH}.user_messages",
+ new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count"))
+ )
async def test_create_user_embed_uses_top_role_colour_when_user_has_roles(self):
"""The embed should be created with the colour of the top role, if a top role is available."""
ctx = helpers.MockContext()
@@ -409,7 +446,8 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
moderators_role = helpers.MockRole(name='Moderators')
user = helpers.MockMember(id=314, roles=[moderators_role], colour=100)
- embed = await self.cog.create_user_embed(ctx, user)
+ user.created_at = user.joined_at = datetime.utcnow()
+ embed = await self.cog.create_user_embed(ctx, user, False)
self.assertEqual(embed.colour, discord.Colour(100))
@@ -417,12 +455,17 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
f"{COG_PATH}.basic_user_infraction_counts",
new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions"))
)
+ @unittest.mock.patch(
+ f"{COG_PATH}.user_messages",
+ new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count"))
+ )
async def test_create_user_embed_uses_og_blurple_colour_when_user_has_no_roles(self):
"""The embed should be created with the og blurple colour if the user has no assigned roles."""
ctx = helpers.MockContext()
user = helpers.MockMember(id=217, colour=discord.Colour.default())
- embed = await self.cog.create_user_embed(ctx, user)
+ user.created_at = user.joined_at = datetime.utcnow()
+ embed = await self.cog.create_user_embed(ctx, user, False)
self.assertEqual(embed.colour, discord.Colour.og_blurple())
@@ -430,13 +473,18 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
f"{COG_PATH}.basic_user_infraction_counts",
new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions"))
)
+ @unittest.mock.patch(
+ f"{COG_PATH}.user_messages",
+ new=unittest.mock.AsyncMock(return_value=("Messsages", "user message count"))
+ )
async def test_create_user_embed_uses_png_format_of_user_avatar_as_thumbnail(self):
"""The embed thumbnail should be set to the user's avatar in `png` format."""
ctx = helpers.MockContext()
user = helpers.MockMember(id=217, colour=0)
+ user.created_at = user.joined_at = datetime.utcnow()
user.display_avatar.url = "avatar url"
- embed = await self.cog.create_user_embed(ctx, user)
+ embed = await self.cog.create_user_embed(ctx, user, False)
self.assertEqual(embed.thumbnail.url, "avatar url")
@@ -489,7 +537,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase):
await self.cog.user_info(self.cog, ctx)
- create_embed.assert_called_once_with(ctx, self.author)
+ create_embed.assert_called_once_with(ctx, self.author, False)
ctx.send.assert_called_once()
@unittest.mock.patch("bot.exts.info.information.Information.create_user_embed")
@@ -500,7 +548,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase):
await self.cog.user_info(self.cog, ctx, self.author)
- create_embed.assert_called_once_with(ctx, self.author)
+ create_embed.assert_called_once_with(ctx, self.author, False)
ctx.send.assert_called_once()
@unittest.mock.patch("bot.exts.info.information.Information.create_user_embed")
@@ -511,7 +559,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase):
await self.cog.user_info(self.cog, ctx)
- create_embed.assert_called_once_with(ctx, self.moderator)
+ create_embed.assert_called_once_with(ctx, self.moderator, False)
ctx.send.assert_called_once()
@unittest.mock.patch("bot.exts.info.information.Information.create_user_embed")
@@ -523,5 +571,5 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase):
await self.cog.user_info(self.cog, ctx, self.target)
- create_embed.assert_called_once_with(ctx, self.target)
+ create_embed.assert_called_once_with(ctx, self.target, False)
ctx.send.assert_called_once()
diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py
index 4d01e18a5..052048053 100644
--- a/tests/bot/exts/moderation/infraction/test_infractions.py
+++ b/tests/bot/exts/moderation/infraction/test_infractions.py
@@ -1,13 +1,15 @@
import inspect
import textwrap
import unittest
-from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch
+from unittest.mock import ANY, AsyncMock, DEFAULT, MagicMock, Mock, patch
from discord.errors import NotFound
from bot.constants import Event
+from bot.exts.moderation.clean import Clean
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction.infractions import Infractions
+from bot.exts.moderation.infraction.management import ModManagement
from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockRole, MockUser, autospec
@@ -62,8 +64,8 @@ class TruncationTests(unittest.IsolatedAsyncioTestCase):
@patch("bot.exts.moderation.infraction.infractions.constants.Roles.voice_verified", new=123456)
-class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
- """Tests for voice ban related functions and commands."""
+class VoiceMuteTests(unittest.IsolatedAsyncioTestCase):
+ """Tests for voice mute related functions and commands."""
def setUp(self):
self.bot = MockBot()
@@ -73,59 +75,59 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
self.ctx = MockContext(bot=self.bot, author=self.mod)
self.cog = Infractions(self.bot)
- async def test_permanent_voice_ban(self):
- """Should call voice ban applying function without expiry."""
- self.cog.apply_voice_ban = AsyncMock()
- self.assertIsNone(await self.cog.voiceban(self.cog, self.ctx, self.user, reason="foobar"))
- self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at=None)
+ async def test_permanent_voice_mute(self):
+ """Should call voice mute applying function without expiry."""
+ self.cog.apply_voice_mute = AsyncMock()
+ self.assertIsNone(await self.cog.voicemute(self.cog, self.ctx, self.user, reason="foobar"))
+ self.cog.apply_voice_mute.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at=None)
- async def test_temporary_voice_ban(self):
- """Should call voice ban applying function with expiry."""
- self.cog.apply_voice_ban = AsyncMock()
- self.assertIsNone(await self.cog.tempvoiceban(self.cog, self.ctx, self.user, "baz", reason="foobar"))
- self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at="baz")
+ async def test_temporary_voice_mute(self):
+ """Should call voice mute applying function with expiry."""
+ self.cog.apply_voice_mute = AsyncMock()
+ self.assertIsNone(await self.cog.tempvoicemute(self.cog, self.ctx, self.user, "baz", reason="foobar"))
+ self.cog.apply_voice_mute.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at="baz")
- async def test_voice_unban(self):
+ async def test_voice_unmute(self):
"""Should call infraction pardoning function."""
self.cog.pardon_infraction = AsyncMock()
- self.assertIsNone(await self.cog.unvoiceban(self.cog, self.ctx, self.user))
- self.cog.pardon_infraction.assert_awaited_once_with(self.ctx, "voice_ban", self.user)
+ self.assertIsNone(await self.cog.unvoicemute(self.cog, self.ctx, self.user))
+ self.cog.pardon_infraction.assert_awaited_once_with(self.ctx, "voice_mute", self.user)
@patch("bot.exts.moderation.infraction.infractions._utils.post_infraction")
@patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction")
- async def test_voice_ban_user_have_active_infraction(self, get_active_infraction, post_infraction_mock):
- """Should return early when user already have Voice Ban infraction."""
+ async def test_voice_mute_user_have_active_infraction(self, get_active_infraction, post_infraction_mock):
+ """Should return early when user already have Voice Mute infraction."""
get_active_infraction.return_value = {"foo": "bar"}
- self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar"))
- get_active_infraction.assert_awaited_once_with(self.ctx, self.user, "voice_ban")
+ self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar"))
+ get_active_infraction.assert_awaited_once_with(self.ctx, self.user, "voice_mute")
post_infraction_mock.assert_not_awaited()
@patch("bot.exts.moderation.infraction.infractions._utils.post_infraction")
@patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction")
- async def test_voice_ban_infraction_post_failed(self, get_active_infraction, post_infraction_mock):
+ async def test_voice_mute_infraction_post_failed(self, get_active_infraction, post_infraction_mock):
"""Should return early when posting infraction fails."""
self.cog.mod_log.ignore = MagicMock()
get_active_infraction.return_value = None
post_infraction_mock.return_value = None
- self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar"))
+ self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar"))
post_infraction_mock.assert_awaited_once()
self.cog.mod_log.ignore.assert_not_called()
@patch("bot.exts.moderation.infraction.infractions._utils.post_infraction")
@patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction")
- async def test_voice_ban_infraction_post_add_kwargs(self, get_active_infraction, post_infraction_mock):
- """Should pass all kwargs passed to apply_voice_ban to post_infraction."""
+ async def test_voice_mute_infraction_post_add_kwargs(self, get_active_infraction, post_infraction_mock):
+ """Should pass all kwargs passed to apply_voice_mute to post_infraction."""
get_active_infraction.return_value = None
# We don't want that this continue yet
post_infraction_mock.return_value = None
- self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar", my_kwarg=23))
+ self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar", my_kwarg=23))
post_infraction_mock.assert_awaited_once_with(
- self.ctx, self.user, "voice_ban", "foobar", active=True, my_kwarg=23
+ self.ctx, self.user, "voice_mute", "foobar", active=True, my_kwarg=23
)
@patch("bot.exts.moderation.infraction.infractions._utils.post_infraction")
@patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction")
- async def test_voice_ban_mod_log_ignore(self, get_active_infraction, post_infraction_mock):
+ async def test_voice_mute_mod_log_ignore(self, get_active_infraction, post_infraction_mock):
"""Should ignore Voice Verified role removing."""
self.cog.mod_log.ignore = MagicMock()
self.cog.apply_infraction = AsyncMock()
@@ -134,11 +136,11 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
get_active_infraction.return_value = None
post_infraction_mock.return_value = {"foo": "bar"}
- self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar"))
+ self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar"))
self.cog.mod_log.ignore.assert_called_once_with(Event.member_update, self.user.id)
async def action_tester(self, action, reason: str) -> None:
- """Helper method to test voice ban action."""
+ """Helper method to test voice mute action."""
self.assertTrue(inspect.iscoroutine(action))
await action
@@ -147,7 +149,7 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
@patch("bot.exts.moderation.infraction.infractions._utils.post_infraction")
@patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction")
- async def test_voice_ban_apply_infraction(self, get_active_infraction, post_infraction_mock):
+ async def test_voice_mute_apply_infraction(self, get_active_infraction, post_infraction_mock):
"""Should ignore Voice Verified role removing."""
self.cog.mod_log.ignore = MagicMock()
self.cog.apply_infraction = AsyncMock()
@@ -156,22 +158,22 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
post_infraction_mock.return_value = {"foo": "bar"}
reason = "foobar"
- self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, reason))
+ self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, reason))
self.cog.apply_infraction.assert_awaited_once_with(self.ctx, {"foo": "bar"}, self.user, ANY)
await self.action_tester(self.cog.apply_infraction.call_args[0][-1], reason)
@patch("bot.exts.moderation.infraction.infractions._utils.post_infraction")
@patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction")
- async def test_voice_ban_truncate_reason(self, get_active_infraction, post_infraction_mock):
- """Should truncate reason for voice ban."""
+ async def test_voice_mute_truncate_reason(self, get_active_infraction, post_infraction_mock):
+ """Should truncate reason for voice mute."""
self.cog.mod_log.ignore = MagicMock()
self.cog.apply_infraction = AsyncMock()
get_active_infraction.return_value = None
post_infraction_mock.return_value = {"foo": "bar"}
- self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar" * 3000))
+ self.assertIsNone(await self.cog.apply_voice_mute(self.ctx, self.user, "foobar" * 3000))
self.cog.apply_infraction.assert_awaited_once_with(self.ctx, {"foo": "bar"}, self.user, ANY)
# Test action
@@ -180,14 +182,14 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
@autospec(_utils, "post_infraction", "get_active_infraction", return_value=None)
@autospec(Infractions, "apply_infraction")
- async def test_voice_ban_user_left_guild(self, apply_infraction_mock, post_infraction_mock, _):
- """Should voice ban user that left the guild without throwing an error."""
+ async def test_voice_mute_user_left_guild(self, apply_infraction_mock, post_infraction_mock, _):
+ """Should voice mute user that left the guild without throwing an error."""
infraction = {"foo": "bar"}
post_infraction_mock.return_value = {"foo": "bar"}
user = MockUser()
- await self.cog.voiceban(self.cog, self.ctx, user, reason=None)
- post_infraction_mock.assert_called_once_with(self.ctx, user, "voice_ban", None, active=True, expires_at=None)
+ await self.cog.voicemute(self.cog, self.ctx, user, reason=None)
+ post_infraction_mock.assert_called_once_with(self.ctx, user, "voice_mute", None, active=True, expires_at=None)
apply_infraction_mock.assert_called_once_with(self.cog, self.ctx, infraction, user, ANY)
# Test action
@@ -195,22 +197,22 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
self.assertTrue(inspect.iscoroutine(action))
await action
- async def test_voice_unban_user_not_found(self):
+ async def test_voice_unmute_user_not_found(self):
"""Should include info to return dict when user was not found from guild."""
self.guild.get_member.return_value = None
self.guild.fetch_member.side_effect = NotFound(Mock(status=404), "Not found")
- result = await self.cog.pardon_voice_ban(self.user.id, self.guild)
+ result = await self.cog.pardon_voice_mute(self.user.id, self.guild)
self.assertEqual(result, {"Info": "User was not found in the guild."})
@patch("bot.exts.moderation.infraction.infractions._utils.notify_pardon")
@patch("bot.exts.moderation.infraction.infractions.format_user")
- async def test_voice_unban_user_found(self, format_user_mock, notify_pardon_mock):
+ async def test_voice_unmute_user_found(self, format_user_mock, notify_pardon_mock):
"""Should add role back with ignoring, notify user and return log dictionary.."""
self.guild.get_member.return_value = self.user
notify_pardon_mock.return_value = True
format_user_mock.return_value = "my-user"
- result = await self.cog.pardon_voice_ban(self.user.id, self.guild)
+ result = await self.cog.pardon_voice_mute(self.user.id, self.guild)
self.assertEqual(result, {
"Member": "my-user",
"DM": "Sent"
@@ -219,15 +221,100 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase):
@patch("bot.exts.moderation.infraction.infractions._utils.notify_pardon")
@patch("bot.exts.moderation.infraction.infractions.format_user")
- async def test_voice_unban_dm_fail(self, format_user_mock, notify_pardon_mock):
+ async def test_voice_unmute_dm_fail(self, format_user_mock, notify_pardon_mock):
"""Should add role back with ignoring, notify user and return log dictionary.."""
self.guild.get_member.return_value = self.user
notify_pardon_mock.return_value = False
format_user_mock.return_value = "my-user"
- result = await self.cog.pardon_voice_ban(self.user.id, self.guild)
+ result = await self.cog.pardon_voice_mute(self.user.id, self.guild)
self.assertEqual(result, {
"Member": "my-user",
"DM": "**Failed**"
})
notify_pardon_mock.assert_awaited_once()
+
+
+class CleanBanTests(unittest.IsolatedAsyncioTestCase):
+ """Tests for cleanban functionality."""
+
+ def setUp(self):
+ self.bot = MockBot()
+ self.mod = MockMember(roles=[MockRole(id=7890123, position=10)])
+ self.user = MockMember(roles=[MockRole(id=123456, position=1)])
+ self.guild = MockGuild()
+ self.ctx = MockContext(bot=self.bot, author=self.mod)
+ self.cog = Infractions(self.bot)
+ self.clean_cog = Clean(self.bot)
+ self.management_cog = ModManagement(self.bot)
+
+ self.cog.apply_ban = AsyncMock(return_value={"id": 42})
+ self.log_url = "https://www.youtube.com/watch?v=dQw4w9WgXcQ"
+ self.clean_cog._clean_messages = AsyncMock(return_value=self.log_url)
+
+ def mock_get_cog(self, enable_clean, enable_manage):
+ """Mock get cog factory that allows the user to specify whether clean and manage cogs are enabled."""
+ def inner(name):
+ if name == "ModManagement":
+ return self.management_cog if enable_manage else None
+ elif name == "Clean":
+ return self.clean_cog if enable_clean else None
+ else:
+ return DEFAULT
+ return inner
+
+ async def test_cleanban_falls_back_to_native_purge_without_clean_cog(self):
+ """Should fallback to native purge if the Clean cog is not available."""
+ self.bot.get_cog.side_effect = self.mock_get_cog(False, False)
+
+ self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar"))
+ self.cog.apply_ban.assert_awaited_once_with(
+ self.ctx,
+ self.user,
+ "FooBar",
+ purge_days=1,
+ expires_at=None,
+ )
+
+ async def test_cleanban_doesnt_purge_messages_if_clean_cog_available(self):
+ """Cleanban command should use the native purge messages if the clean cog is available."""
+ self.bot.get_cog.side_effect = self.mock_get_cog(True, False)
+
+ self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar"))
+ self.cog.apply_ban.assert_awaited_once_with(
+ self.ctx,
+ self.user,
+ "FooBar",
+ expires_at=None,
+ )
+
+ @patch("bot.exts.moderation.infraction.infractions.Age")
+ async def test_cleanban_uses_clean_cog_when_available(self, mocked_age_converter):
+ """Test cleanban uses the clean cog to clean messages if it's available."""
+ self.bot.api_client.patch = AsyncMock()
+ self.bot.get_cog.side_effect = self.mock_get_cog(True, False)
+
+ mocked_age_converter.return_value.convert = AsyncMock(return_value="81M")
+ self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar"))
+
+ self.clean_cog._clean_messages.assert_awaited_once_with(
+ self.ctx,
+ users=[self.user],
+ channels="*",
+ first_limit="81M",
+ attempt_delete_invocation=False,
+ )
+
+ async def test_cleanban_edits_infraction_reason(self):
+ """Ensure cleanban edits the ban reason with a link to the clean log."""
+ self.bot.get_cog.side_effect = self.mock_get_cog(True, True)
+
+ self.management_cog.infraction_append = AsyncMock()
+ self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar"))
+
+ self.management_cog.infraction_append.assert_awaited_once_with(
+ self.ctx,
+ {"id": 42},
+ None,
+ reason=f"[Clean log]({self.log_url})"
+ )
diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py
index 72eebb254..ff81ddd65 100644
--- a/tests/bot/exts/moderation/infraction/test_utils.py
+++ b/tests/bot/exts/moderation/infraction/test_utils.py
@@ -15,7 +15,10 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
"""Tests Moderation utils."""
def setUp(self):
- self.bot = MockBot()
+ patcher = patch("bot.instance", new=MockBot())
+ self.bot = patcher.start()
+ self.addCleanup(patcher.stop)
+
self.member = MockMember(id=1234)
self.user = MockUser(id=1234)
self.ctx = MockContext(bot=self.bot, author=self.member)
@@ -123,8 +126,9 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
else:
self.ctx.send.assert_not_awaited()
+ @unittest.skip("Current time needs to be patched so infraction duration is correct.")
@patch("bot.exts.moderation.infraction._utils.send_private_embed")
- async def test_notify_infraction(self, send_private_embed_mock):
+ async def test_send_infraction_embed(self, send_private_embed_mock):
"""
Should send an embed of a certain format as a DM and return `True` if DM successful.
@@ -132,7 +136,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
"""
test_cases = [
{
- "args": (self.user, "ban", "2020-02-26 09:20 (23 hours and 59 minutes)"),
+ "args": (dict(id=0, type="ban", reason=None, expires_at=datetime(2020, 2, 26, 9, 20)), self.user),
"expected_output": Embed(
title=utils.INFRACTION_TITLE,
description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format(
@@ -145,12 +149,12 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
).set_author(
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
- icon_url=Icons.token_removed
+ icon_url=Icons.user_ban
),
"send_result": True
},
{
- "args": (self.user, "warning", None, "Test reason."),
+ "args": (dict(id=0, type="warning", reason="Test reason.", expires_at=None), self.user),
"expected_output": Embed(
title=utils.INFRACTION_TITLE,
description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format(
@@ -163,14 +167,14 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
).set_author(
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
- icon_url=Icons.token_removed
+ icon_url=Icons.user_warn
),
"send_result": False
},
# Note that this test case asserts that the DM that *would* get sent to the user is formatted
# correctly, even though that message is deliberately never sent.
{
- "args": (self.user, "note", None, None, Icons.defcon_denied),
+ "args": (dict(id=0, type="note", reason=None, expires_at=None), self.user),
"expected_output": Embed(
title=utils.INFRACTION_TITLE,
description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format(
@@ -183,12 +187,12 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
).set_author(
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
- icon_url=Icons.defcon_denied
+ icon_url=Icons.user_warn
),
"send_result": False
},
{
- "args": (self.user, "mute", "2020-02-26 09:20 (23 hours and 59 minutes)", "Test", Icons.defcon_denied),
+ "args": (dict(id=0, type="mute", reason="Test", expires_at=datetime(2020, 2, 26, 9, 20)), self.user),
"expected_output": Embed(
title=utils.INFRACTION_TITLE,
description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format(
@@ -201,12 +205,12 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
).set_author(
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
- icon_url=Icons.defcon_denied
+ icon_url=Icons.user_mute
),
"send_result": False
},
{
- "args": (self.user, "mute", None, "foo bar" * 4000, Icons.defcon_denied),
+ "args": (dict(id=0, type="mute", reason="foo bar" * 4000, expires_at=None), self.user),
"expected_output": Embed(
title=utils.INFRACTION_TITLE,
description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format(
@@ -219,7 +223,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
).set_author(
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
- icon_url=Icons.defcon_denied
+ icon_url=Icons.user_mute
),
"send_result": True
}
@@ -238,7 +242,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
self.assertEqual(embed.to_dict(), case["expected_output"].to_dict())
- send_private_embed_mock.assert_awaited_once_with(case["args"][0], embed)
+ send_private_embed_mock.assert_awaited_once_with(case["args"][1], embed)
@patch("bot.exts.moderation.infraction._utils.send_private_embed")
async def test_notify_pardon(self, send_private_embed_mock):
@@ -313,7 +317,8 @@ class TestPostInfraction(unittest.IsolatedAsyncioTestCase):
"type": "ban",
"user": self.member.id,
"active": False,
- "expires_at": now.isoformat()
+ "expires_at": now.isoformat(),
+ "dm_sent": False
}
self.ctx.bot.api_client.post.return_value = "foo"
@@ -350,7 +355,8 @@ class TestPostInfraction(unittest.IsolatedAsyncioTestCase):
"reason": "Test reason",
"type": "mute",
"user": self.user.id,
- "active": True
+ "active": True,
+ "dm_sent": False
}
self.bot.api_client.post.side_effect = [ResponseCodeError(MagicMock(status=400), {"user": "foo"}), "foo"]
diff --git a/tests/bot/exts/moderation/test_clean.py b/tests/bot/exts/moderation/test_clean.py
new file mode 100644
index 000000000..d7647fa48
--- /dev/null
+++ b/tests/bot/exts/moderation/test_clean.py
@@ -0,0 +1,104 @@
+import unittest
+from unittest.mock import AsyncMock, MagicMock, patch
+
+from bot.exts.moderation.clean import Clean
+from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockMessage, MockRole, MockTextChannel
+
+
+class CleanTests(unittest.IsolatedAsyncioTestCase):
+ """Tests for clean cog functionality."""
+
+ def setUp(self):
+ self.bot = MockBot()
+ self.mod = MockMember(roles=[MockRole(id=7890123, position=10)])
+ self.user = MockMember(roles=[MockRole(id=123456, position=1)])
+ self.guild = MockGuild()
+ self.ctx = MockContext(bot=self.bot, author=self.mod)
+ self.cog = Clean(self.bot)
+
+ self.log_url = "https://www.youtube.com/watch?v=dQw4w9WgXcQ"
+ self.cog._modlog_cleaned_messages = AsyncMock(return_value=self.log_url)
+
+ self.cog._use_cache = MagicMock(return_value=True)
+ self.cog._delete_found = AsyncMock(return_value=[42, 84])
+
+ @patch("bot.exts.moderation.clean.is_mod_channel")
+ async def test_clean_deletes_invocation_in_non_mod_channel(self, mod_channel_check):
+ """Clean command should delete the invocation message if ran in a non mod channel."""
+ mod_channel_check.return_value = False
+ self.ctx.message.delete = AsyncMock()
+
+ self.assertIsNone(await self.cog._delete_invocation(self.ctx))
+
+ self.ctx.message.delete.assert_awaited_once()
+
+ @patch("bot.exts.moderation.clean.is_mod_channel")
+ async def test_clean_doesnt_delete_invocation_in_mod_channel(self, mod_channel_check):
+ """Clean command should not delete the invocation message if ran in a mod channel."""
+ mod_channel_check.return_value = True
+ self.ctx.message.delete = AsyncMock()
+
+ self.assertIsNone(await self.cog._delete_invocation(self.ctx))
+
+ self.ctx.message.delete.assert_not_awaited()
+
+ async def test_clean_doesnt_attempt_deletion_when_attempt_delete_invocation_is_false(self):
+ """Clean command should not attempt to delete the invocation message if attempt_delete_invocation is false."""
+ self.cog._delete_invocation = AsyncMock()
+ self.bot.get_channel = MagicMock(return_value=False)
+
+ self.assertEqual(
+ await self.cog._clean_messages(
+ self.ctx,
+ None,
+ first_limit=MockMessage(),
+ attempt_delete_invocation=False,
+ ),
+ self.log_url,
+ )
+
+ self.cog._delete_invocation.assert_not_awaited()
+
+ @patch("bot.exts.moderation.clean.is_mod_channel")
+ async def test_clean_replies_with_success_message_when_ran_in_mod_channel(self, mod_channel_check):
+ """Clean command should reply to the message with a confirmation message if invoked in a mod channel."""
+ mod_channel_check.return_value = True
+ self.ctx.reply = AsyncMock()
+
+ self.assertEqual(
+ await self.cog._clean_messages(
+ self.ctx,
+ None,
+ first_limit=MockMessage(),
+ attempt_delete_invocation=False,
+ ),
+ self.log_url,
+ )
+
+ self.ctx.reply.assert_awaited_once()
+ sent_message = self.ctx.reply.await_args[0][0]
+ self.assertIn(self.log_url, sent_message)
+ self.assertIn("2 messages", sent_message)
+
+ @patch("bot.exts.moderation.clean.is_mod_channel")
+ async def test_clean_send_success_message_to_mods_when_ran_in_non_mod_channel(self, mod_channel_check):
+ """Clean command should send a confirmation message to #mods if invoked in a non-mod channel."""
+ mod_channel_check.return_value = False
+ mocked_mods = MockTextChannel(id=1234567)
+ mocked_mods.send = AsyncMock()
+ self.bot.get_channel = MagicMock(return_value=mocked_mods)
+
+ self.assertEqual(
+ await self.cog._clean_messages(
+ self.ctx,
+ None,
+ first_limit=MockMessage(),
+ attempt_delete_invocation=False,
+ ),
+ self.log_url,
+ )
+
+ mocked_mods.send.assert_awaited_once()
+ sent_message = mocked_mods.send.await_args[0][0]
+ self.assertIn(self.log_url, sent_message)
+ self.assertIn("2 messages", sent_message)
diff --git a/tests/bot/exts/moderation/test_incidents.py b/tests/bot/exts/moderation/test_incidents.py
index ccc842050..cfe0c4b03 100644
--- a/tests/bot/exts/moderation/test_incidents.py
+++ b/tests/bot/exts/moderation/test_incidents.py
@@ -3,13 +3,16 @@ import enum
import logging
import typing as t
import unittest
+from unittest import mock
from unittest.mock import AsyncMock, MagicMock, Mock, call, patch
import aiohttp
import discord
+from async_rediscache import RedisSession
from bot.constants import Colours
from bot.exts.moderation import incidents
+from bot.utils.messages import format_user
from tests.helpers import (
MockAsyncWebhook, MockAttachment, MockBot, MockMember, MockMessage, MockReaction, MockRole, MockTextChannel,
MockUser
@@ -276,6 +279,22 @@ class TestIncidents(unittest.IsolatedAsyncioTestCase):
the instance as they wish.
"""
+ session = None
+
+ async def flush(self):
+ """Flush everything from the database to prevent carry-overs between tests."""
+ with await self.session.pool as connection:
+ await connection.flushall()
+
+ async def asyncSetUp(self): # noqa: N802
+ self.session = RedisSession(use_fakeredis=True)
+ await self.session.connect()
+ await self.flush()
+
+ async def asyncTearDown(self): # noqa: N802
+ if self.session:
+ await self.session.close()
+
def setUp(self):
"""
Prepare a fresh `Incidents` instance for each test.
@@ -506,7 +525,7 @@ class TestProcessEvent(TestIncidents):
with patch("bot.exts.moderation.incidents.Incidents.make_confirmation_task", mock_task):
await self.cog_instance.process_event(
reaction=incidents.Signal.ACTIONED.value,
- incident=MockMessage(),
+ incident=MockMessage(id=123),
member=MockMember(roles=[MockRole(id=1)])
)
@@ -526,7 +545,7 @@ class TestProcessEvent(TestIncidents):
with patch("bot.exts.moderation.incidents.Incidents.make_confirmation_task", mock_task):
await self.cog_instance.process_event(
reaction=incidents.Signal.ACTIONED.value,
- incident=MockMessage(),
+ incident=MockMessage(id=123),
member=MockMember(roles=[MockRole(id=1)])
)
except asyncio.TimeoutError:
@@ -761,3 +780,74 @@ class TestOnMessage(TestIncidents):
await self.cog_instance.on_message(MockMessage())
mock_add_signals.assert_not_called()
+
+
+class TestMessageLinkEmbeds(TestIncidents):
+ """Tests for `extract_message_links` coroutine."""
+
+ async def test_shorten_text(self):
+ """Test all cases of text shortening by mocking messages."""
+ tests = {
+ "thisisasingleword"*10: "thisisasinglewordthisisasinglewordthisisasinglewor...",
+
+ "\n".join("Lets make a new line test".split()): "Lets\nmake\na...",
+
+ 'Hello, World!' * 300: (
+ "Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!"
+ "Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!"
+ "Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!Hello, World!"
+ "Hello, World!Hello, World!H..."
+ )
+ }
+
+ for content, expected_conversion in tests.items():
+ with self.subTest(content=content, expected_conversion=expected_conversion):
+ conversion = incidents.shorten_text(content)
+ self.assertEqual(conversion, expected_conversion)
+
+ async def extract_and_form_message_link_embeds(self):
+ """
+ Extract message links from a mocked message and form the message link embed.
+
+ Considers all types of message links, discord supports.
+ """
+ self.guild_id_patcher = mock.patch("bot.exts.backend.sync._cog.constants.Guild.id", 5)
+ self.guild_id = self.guild_id_patcher.start()
+
+ msg = MockMessage(id=555, content="Hello, World!" * 3000)
+ msg.channel.mention = "#lemonade-stand"
+
+ msg_links = [
+ # Valid Message links
+ f"https://discord.com/channels/{self.guild_id}/{msg.channel.discord_id}/{msg.discord_id}",
+ f"http://canary.discord.com/channels/{self.guild_id}/{msg.channel.discord_id}/{msg.discord_id}",
+
+ # Invalid Message links
+ f"https://discord.com/channels/{msg.channel.discord_id}/{msg.discord_id}",
+ f"https://discord.com/channels/{self.guild_id}/{msg.channel.discord_id}000/{msg.discord_id}",
+ ]
+
+ incident_msg = MockMessage(
+ id=777,
+ content=(
+ f"I would like to report the following messages, "
+ f"as they break our rules: \n{', '.join(msg_links)}"
+ )
+ )
+
+ with patch(
+ "bot.exts.moderation.incidents.Incidents.extract_message_links", AsyncMock()
+ ) as mock_extract_message_links:
+ embeds = mock_extract_message_links(incident_msg)
+ description = (
+ f"**Author:** {format_user(msg.author)}\n"
+ f"**Channel:** {msg.channel.mention} ({msg.channel.category}/#{msg.channel.name})\n"
+ f"**Content:** {('Hello, World!' * 3000)[:300] + '...'}\n"
+ )
+
+ # Check number of embeds returned with number of valid links
+ self.assertEqual(len(embeds), 2)
+
+ # Check for the embed descriptions
+ for embed in embeds:
+ self.assertEqual(embed.description, description)
diff --git a/tests/bot/exts/utils/test_snekbox.py b/tests/bot/exts/utils/test_snekbox.py
index 321a92445..f68a20089 100644
--- a/tests/bot/exts/utils/test_snekbox.py
+++ b/tests/bot/exts/utils/test_snekbox.py
@@ -2,6 +2,7 @@ import asyncio
import unittest
from unittest.mock import AsyncMock, MagicMock, Mock, call, create_autospec, patch
+from discord import AllowedMentions
from discord.ext import commands
from bot import constants
@@ -16,7 +17,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
self.bot = MockBot()
self.cog = Snekbox(bot=self.bot)
- async def test_post_eval(self):
+ async def test_post_job(self):
"""Post the eval code to the URLs.snekbox_eval_api endpoint."""
resp = MagicMock()
resp.json = AsyncMock(return_value="return")
@@ -25,7 +26,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
context_manager.__aenter__.return_value = resp
self.bot.http_session.post.return_value = context_manager
- self.assertEqual(await self.cog.post_eval("import random"), "return")
+ self.assertEqual(await self.cog.post_job("import random"), "return")
self.bot.http_session.post.assert_called_with(
constants.URLs.snekbox_eval_api,
json={"input": "import random"},
@@ -44,7 +45,8 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
await self.cog.upload_output("Test output.")
mock_paste_util.assert_called_once_with("Test output.", extension="txt")
- def test_prepare_input(self):
+ async def test_codeblock_converter(self):
+ ctx = MockContext()
cases = (
('print("Hello world!")', 'print("Hello world!")', 'non-formatted'),
('`print("Hello world!")`', 'print("Hello world!")', 'one line code block'),
@@ -60,7 +62,24 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
)
for case, expected, testname in cases:
with self.subTest(msg=f'Extract code from {testname}.'):
- self.assertEqual(self.cog.prepare_input(case), expected)
+ self.assertEqual(
+ '\n'.join(await snekbox.CodeblockConverter.convert(ctx, case)), expected
+ )
+
+ def test_prepare_timeit_input(self):
+ """Test the prepare_timeit_input codeblock detection."""
+ base_args = ('-m', 'timeit', '-s')
+ cases = (
+ (['print("Hello World")'], '', 'single block of code'),
+ (['x = 1', 'print(x)'], 'x = 1', 'two blocks of code'),
+ (['x = 1', 'print(x)', 'print("Some other code.")'], 'x = 1', 'three blocks of code')
+ )
+
+ for case, setup_code, testname in cases:
+ setup = snekbox.TIMEIT_SETUP_WRAPPER.format(setup=setup_code)
+ expected = ('\n'.join(case[1:] if setup_code else case), [*base_args, setup])
+ with self.subTest(msg=f'Test with {testname} and expected return {expected}'):
+ self.assertEqual(self.cog.prepare_timeit_input(case), expected)
def test_get_results_message(self):
"""Return error and message according to the eval result."""
@@ -71,13 +90,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
)
for stdout, returncode, expected in cases:
with self.subTest(stdout=stdout, returncode=returncode, expected=expected):
- actual = self.cog.get_results_message({'stdout': stdout, 'returncode': returncode})
+ actual = self.cog.get_results_message({'stdout': stdout, 'returncode': returncode}, 'eval')
self.assertEqual(actual, expected)
@patch('bot.exts.utils.snekbox.Signals', side_effect=ValueError)
def test_get_results_message_invalid_signal(self, mock_signals: Mock):
self.assertEqual(
- self.cog.get_results_message({'stdout': '', 'returncode': 127}),
+ self.cog.get_results_message({'stdout': '', 'returncode': 127}, 'eval'),
('Your eval job has completed with return code 127', '')
)
@@ -85,7 +104,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
def test_get_results_message_valid_signal(self, mock_signals: Mock):
mock_signals.return_value.name = 'SIGTEST'
self.assertEqual(
- self.cog.get_results_message({'stdout': '', 'returncode': 127}),
+ self.cog.get_results_message({'stdout': '', 'returncode': 127}, 'eval'),
('Your eval job has completed with return code 127 (SIGTEST)', '')
)
@@ -155,28 +174,29 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
"""Test the eval command procedure."""
ctx = MockContext()
response = MockMessage()
- self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode')
- self.cog.send_eval = AsyncMock(return_value=response)
- self.cog.continue_eval = AsyncMock(return_value=None)
+ ctx.command = MagicMock()
- await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode')
- self.cog.prepare_input.assert_called_once_with('MyAwesomeCode')
- self.cog.send_eval.assert_called_once_with(ctx, 'MyAwesomeFormattedCode')
- self.cog.continue_eval.assert_called_once_with(ctx, response)
+ self.cog.send_job = AsyncMock(return_value=response)
+ self.cog.continue_job = AsyncMock(return_value=(None, None))
+
+ await self.cog.eval_command(self.cog, ctx=ctx, code=['MyAwesomeCode'])
+ self.cog.send_job.assert_called_once_with(ctx, 'MyAwesomeCode', args=None, job_name='eval')
+ self.cog.continue_job.assert_called_once_with(ctx, response, ctx.command)
async def test_eval_command_evaluate_twice(self):
"""Test the eval and re-eval command procedure."""
ctx = MockContext()
response = MockMessage()
- self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode')
- self.cog.send_eval = AsyncMock(return_value=response)
- self.cog.continue_eval = AsyncMock()
- self.cog.continue_eval.side_effect = ('MyAwesomeCode-2', None)
-
- await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode')
- self.cog.prepare_input.has_calls(call('MyAwesomeCode'), call('MyAwesomeCode-2'))
- self.cog.send_eval.assert_called_with(ctx, 'MyAwesomeFormattedCode')
- self.cog.continue_eval.assert_called_with(ctx, response)
+ ctx.command = MagicMock()
+ self.cog.send_job = AsyncMock(return_value=response)
+ self.cog.continue_job = AsyncMock()
+ self.cog.continue_job.side_effect = (('MyAwesomeFormattedCode', None), (None, None))
+
+ await self.cog.eval_command(self.cog, ctx=ctx, code=['MyAwesomeCode'])
+ self.cog.send_job.assert_called_with(
+ ctx, 'MyAwesomeFormattedCode', args=None, job_name='eval'
+ )
+ self.cog.continue_job.assert_called_with(ctx, response, ctx.command)
async def test_eval_command_reject_two_eval_at_the_same_time(self):
"""Test if the eval command rejects an eval if the author already have a running eval."""
@@ -190,90 +210,99 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
"@LemonLemonishBeard#0042 You've already got a job running - please wait for it to finish!"
)
- async def test_eval_command_call_help(self):
- """Test if the eval command call the help command if no code is provided."""
- ctx = MockContext(command="sentinel")
- await self.cog.eval_command(self.cog, ctx=ctx, code='')
- ctx.send_help.assert_called_once_with(ctx.command)
-
- async def test_send_eval(self):
- """Test the send_eval function."""
+ async def test_send_job(self):
+ """Test the send_job function."""
ctx = MockContext()
ctx.message = MockMessage()
ctx.send = AsyncMock()
- ctx.author.mention = '@LemonLemonishBeard#0042'
+ ctx.author = MockUser(mention='@LemonLemonishBeard#0042')
- self.cog.post_eval = AsyncMock(return_value={'stdout': '', 'returncode': 0})
+ self.cog.post_job = AsyncMock(return_value={'stdout': '', 'returncode': 0})
self.cog.get_results_message = MagicMock(return_value=('Return code 0', ''))
self.cog.get_status_emoji = MagicMock(return_value=':yay!:')
self.cog.format_output = AsyncMock(return_value=('[No output]', None))
mocked_filter_cog = MagicMock()
- mocked_filter_cog.filter_eval = AsyncMock(return_value=False)
+ mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False)
self.bot.get_cog.return_value = mocked_filter_cog
- await self.cog.send_eval(ctx, 'MyAwesomeCode')
- ctx.send.assert_called_once_with(
+ await self.cog.send_job(ctx, 'MyAwesomeCode', job_name='eval')
+
+ ctx.send.assert_called_once()
+ self.assertEqual(
+ ctx.send.call_args.args[0],
'@LemonLemonishBeard#0042 :yay!: Return code 0.\n\n```\n[No output]\n```'
)
- self.cog.post_eval.assert_called_once_with('MyAwesomeCode')
+ allowed_mentions = ctx.send.call_args.kwargs['allowed_mentions']
+ expected_allowed_mentions = AllowedMentions(everyone=False, roles=False, users=[ctx.author])
+ self.assertEqual(allowed_mentions.to_dict(), expected_allowed_mentions.to_dict())
+
+ self.cog.post_job.assert_called_once_with('MyAwesomeCode', args=None)
self.cog.get_status_emoji.assert_called_once_with({'stdout': '', 'returncode': 0})
- self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0})
+ self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0}, 'eval')
self.cog.format_output.assert_called_once_with('')
- async def test_send_eval_with_paste_link(self):
- """Test the send_eval function with a too long output that generate a paste link."""
+ async def test_send_job_with_paste_link(self):
+ """Test the send_job function with a too long output that generate a paste link."""
ctx = MockContext()
ctx.message = MockMessage()
ctx.send = AsyncMock()
ctx.author.mention = '@LemonLemonishBeard#0042'
- self.cog.post_eval = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0})
+ self.cog.post_job = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0})
self.cog.get_results_message = MagicMock(return_value=('Return code 0', ''))
self.cog.get_status_emoji = MagicMock(return_value=':yay!:')
self.cog.format_output = AsyncMock(return_value=('Way too long beard', 'lookatmybeard.com'))
mocked_filter_cog = MagicMock()
- mocked_filter_cog.filter_eval = AsyncMock(return_value=False)
+ mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False)
self.bot.get_cog.return_value = mocked_filter_cog
- await self.cog.send_eval(ctx, 'MyAwesomeCode')
- ctx.send.assert_called_once_with(
+ await self.cog.send_job(ctx, 'MyAwesomeCode', job_name='eval')
+
+ ctx.send.assert_called_once()
+ self.assertEqual(
+ ctx.send.call_args.args[0],
'@LemonLemonishBeard#0042 :yay!: Return code 0.'
'\n\n```\nWay too long beard\n```\nFull output: lookatmybeard.com'
)
- self.cog.post_eval.assert_called_once_with('MyAwesomeCode')
+
+ self.cog.post_job.assert_called_once_with('MyAwesomeCode', args=None)
self.cog.get_status_emoji.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0})
- self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0})
+ self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}, 'eval')
self.cog.format_output.assert_called_once_with('Way too long beard')
- async def test_send_eval_with_non_zero_eval(self):
- """Test the send_eval function with a code returning a non-zero code."""
+ async def test_send_job_with_non_zero_eval(self):
+ """Test the send_job function with a code returning a non-zero code."""
ctx = MockContext()
ctx.message = MockMessage()
ctx.send = AsyncMock()
ctx.author.mention = '@LemonLemonishBeard#0042'
- self.cog.post_eval = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127})
+ self.cog.post_job = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127})
self.cog.get_results_message = MagicMock(return_value=('Return code 127', 'Beard got stuck in the eval'))
self.cog.get_status_emoji = MagicMock(return_value=':nope!:')
self.cog.format_output = AsyncMock() # This function isn't called
mocked_filter_cog = MagicMock()
- mocked_filter_cog.filter_eval = AsyncMock(return_value=False)
+ mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False)
self.bot.get_cog.return_value = mocked_filter_cog
- await self.cog.send_eval(ctx, 'MyAwesomeCode')
- ctx.send.assert_called_once_with(
+ await self.cog.send_job(ctx, 'MyAwesomeCode', job_name='eval')
+
+ ctx.send.assert_called_once()
+ self.assertEqual(
+ ctx.send.call_args.args[0],
'@LemonLemonishBeard#0042 :nope!: Return code 127.\n\n```\nBeard got stuck in the eval\n```'
)
- self.cog.post_eval.assert_called_once_with('MyAwesomeCode')
+
+ self.cog.post_job.assert_called_once_with('MyAwesomeCode', args=None)
self.cog.get_status_emoji.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127})
- self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127})
+ self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}, 'eval')
self.cog.format_output.assert_not_called()
@patch("bot.exts.utils.snekbox.partial")
- async def test_continue_eval_does_continue(self, partial_mock):
- """Test that the continue_eval function does continue if required conditions are met."""
+ async def test_continue_job_does_continue(self, partial_mock):
+ """Test that the continue_job function does continue if required conditions are met."""
ctx = MockContext(message=MockMessage(add_reaction=AsyncMock(), clear_reactions=AsyncMock()))
response = MockMessage(delete=AsyncMock())
new_msg = MockMessage()
@@ -281,30 +310,30 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
expected = "NewCode"
self.cog.get_code = create_autospec(self.cog.get_code, spec_set=True, return_value=expected)
- actual = await self.cog.continue_eval(ctx, response)
- self.cog.get_code.assert_awaited_once_with(new_msg)
- self.assertEqual(actual, expected)
+ actual = await self.cog.continue_job(ctx, response, self.cog.eval_command)
+ self.cog.get_code.assert_awaited_once_with(new_msg, ctx.command)
+ self.assertEqual(actual, (expected, None))
self.bot.wait_for.assert_has_awaits(
(
call(
'message_edit',
- check=partial_mock(snekbox.predicate_eval_message_edit, ctx),
- timeout=snekbox.REEVAL_TIMEOUT,
+ check=partial_mock(snekbox.predicate_message_edit, ctx),
+ timeout=snekbox.REDO_TIMEOUT,
),
- call('reaction_add', check=partial_mock(snekbox.predicate_eval_emoji_reaction, ctx), timeout=10)
+ call('reaction_add', check=partial_mock(snekbox.predicate_emoji_reaction, ctx), timeout=10)
)
)
- ctx.message.add_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI)
- ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI)
+ ctx.message.add_reaction.assert_called_once_with(snekbox.REDO_EMOJI)
+ ctx.message.clear_reaction.assert_called_once_with(snekbox.REDO_EMOJI)
response.delete.assert_called_once()
- async def test_continue_eval_does_not_continue(self):
+ async def test_continue_job_does_not_continue(self):
ctx = MockContext(message=MockMessage(clear_reactions=AsyncMock()))
self.bot.wait_for.side_effect = asyncio.TimeoutError
- actual = await self.cog.continue_eval(ctx, MockMessage())
- self.assertEqual(actual, None)
- ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI)
+ actual = await self.cog.continue_job(ctx, MockMessage(), self.cog.eval_command)
+ self.assertEqual(actual, (None, None))
+ ctx.message.clear_reaction.assert_called_once_with(snekbox.REDO_EMOJI)
async def test_get_code(self):
"""Should return 1st arg (or None) if eval cmd in message, otherwise return full content."""
@@ -327,13 +356,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
self.bot.get_context.return_value = MockContext(command=command)
message = MockMessage(content=content)
- actual_code = await self.cog.get_code(message)
+ actual_code = await self.cog.get_code(message, self.cog.eval_command)
self.bot.get_context.assert_awaited_once_with(message)
self.assertEqual(actual_code, expected_code)
- def test_predicate_eval_message_edit(self):
- """Test the predicate_eval_message_edit function."""
+ def test_predicate_message_edit(self):
+ """Test the predicate_message_edit function."""
msg0 = MockMessage(id=1, content='abc')
msg1 = MockMessage(id=2, content='abcdef')
msg2 = MockMessage(id=1, content='abcdef')
@@ -346,18 +375,18 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
for ctx_msg, new_msg, expected, testname in cases:
with self.subTest(msg=f'Messages with {testname} return {expected}'):
ctx = MockContext(message=ctx_msg)
- actual = snekbox.predicate_eval_message_edit(ctx, ctx_msg, new_msg)
+ actual = snekbox.predicate_message_edit(ctx, ctx_msg, new_msg)
self.assertEqual(actual, expected)
- def test_predicate_eval_emoji_reaction(self):
- """Test the predicate_eval_emoji_reaction function."""
+ def test_predicate_emoji_reaction(self):
+ """Test the predicate_emoji_reaction function."""
valid_reaction = MockReaction(message=MockMessage(id=1))
- valid_reaction.__str__.return_value = snekbox.REEVAL_EMOJI
+ valid_reaction.__str__.return_value = snekbox.REDO_EMOJI
valid_ctx = MockContext(message=MockMessage(id=1), author=MockUser(id=2))
valid_user = MockUser(id=2)
invalid_reaction_id = MockReaction(message=MockMessage(id=42))
- invalid_reaction_id.__str__.return_value = snekbox.REEVAL_EMOJI
+ invalid_reaction_id.__str__.return_value = snekbox.REDO_EMOJI
invalid_user_id = MockUser(id=42)
invalid_reaction_str = MockReaction(message=MockMessage(id=1))
invalid_reaction_str.__str__.return_value = ':longbeard:'
@@ -370,7 +399,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
)
for reaction, user, expected, testname in cases:
with self.subTest(msg=f'Test with {testname} and expected return {expected}'):
- actual = snekbox.predicate_eval_emoji_reaction(valid_ctx, reaction, user)
+ actual = snekbox.predicate_emoji_reaction(valid_ctx, reaction, user)
self.assertEqual(actual, expected)
diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py
index 988b3857b..1bb678db2 100644
--- a/tests/bot/test_converters.py
+++ b/tests/bot/test_converters.py
@@ -6,7 +6,7 @@ from unittest.mock import MagicMock, patch
from dateutil.relativedelta import relativedelta
from discord.ext.commands import BadArgument
-from bot.converters import Duration, HushDurationConverter, ISODateTime, PackageName, TagNameConverter
+from bot.converters import Duration, HushDurationConverter, ISODateTime, PackageName
class ConverterTests(unittest.IsolatedAsyncioTestCase):
@@ -19,21 +19,6 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase):
cls.fixed_utc_now = datetime.fromisoformat('2019-01-01T00:00:00+00:00')
- async def test_tag_name_converter_for_invalid(self):
- """TagNameConverter should raise the correct exception for invalid tag names."""
- test_values = (
- ('👋', "Don't be ridiculous, you can't use that character!"),
- ('', "Tag names should not be empty, or filled with whitespace."),
- (' ', "Tag names should not be empty, or filled with whitespace."),
- ('42', "Tag names must contain at least one letter."),
- ('x' * 128, "Are you insane? That's way too long!"),
- )
-
- for invalid_name, exception_message in test_values:
- with self.subTest(invalid_name=invalid_name, exception_message=exception_message):
- with self.assertRaisesRegex(BadArgument, re.escape(exception_message)):
- await TagNameConverter.convert(self.context, invalid_name)
-
async def test_package_name_for_valid(self):
"""PackageName returns valid package names unchanged."""
test_values = ('foo', 'le_mon', 'num83r')
diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py
index a3dcbfc0a..120d65176 100644
--- a/tests/bot/utils/test_time.py
+++ b/tests/bot/utils/test_time.py
@@ -13,13 +13,15 @@ class TimeTests(unittest.TestCase):
"""humanize_delta should be able to handle unknown units, and will not abort."""
# Does not abort for unknown units, as the unit name is checked
# against the attribute of the relativedelta instance.
- self.assertEqual(time.humanize_delta(relativedelta(days=2, hours=2), 'elephants', 2), '2 days and 2 hours')
+ actual = time.humanize_delta(relativedelta(days=2, hours=2), precision='elephants', max_units=2)
+ self.assertEqual(actual, '2 days and 2 hours')
def test_humanize_delta_handle_high_units(self):
"""humanize_delta should be able to handle very high units."""
# Very high maximum units, but it only ever iterates over
# each value the relativedelta might have.
- self.assertEqual(time.humanize_delta(relativedelta(days=2, hours=2), 'hours', 20), '2 days and 2 hours')
+ actual = time.humanize_delta(relativedelta(days=2, hours=2), precision='hours', max_units=20)
+ self.assertEqual(actual, '2 days and 2 hours')
def test_humanize_delta_should_normal_usage(self):
"""Testing humanize delta."""
@@ -32,7 +34,8 @@ class TimeTests(unittest.TestCase):
for delta, precision, max_units, expected in test_cases:
with self.subTest(delta=delta, precision=precision, max_units=max_units, expected=expected):
- self.assertEqual(time.humanize_delta(delta, precision, max_units), expected)
+ actual = time.humanize_delta(delta, precision=precision, max_units=max_units)
+ self.assertEqual(actual, expected)
def test_humanize_delta_raises_for_invalid_max_units(self):
"""humanize_delta should raises ValueError('max_units must be positive') for invalid max_units."""
@@ -40,22 +43,11 @@ class TimeTests(unittest.TestCase):
for max_units in test_cases:
with self.subTest(max_units=max_units), self.assertRaises(ValueError) as error:
- time.humanize_delta(relativedelta(days=2, hours=2), 'hours', max_units)
- self.assertEqual(str(error.exception), 'max_units must be positive')
-
- def test_parse_rfc1123(self):
- """Testing parse_rfc1123."""
- self.assertEqual(
- time.parse_rfc1123('Sun, 15 Sep 2019 12:00:00 GMT'),
- datetime(2019, 9, 15, 12, 0, 0, tzinfo=timezone.utc)
- )
-
- def test_format_infraction(self):
- """Testing format_infraction."""
- self.assertEqual(time.format_infraction('2019-12-12T00:01:00Z'), '<t:1576108860:f>')
+ time.humanize_delta(relativedelta(days=2, hours=2), precision='hours', max_units=max_units)
+ self.assertEqual(str(error.exception), 'max_units must be positive.')
- def test_format_infraction_with_duration_none_expiry(self):
- """format_infraction_with_duration should work for None expiry."""
+ def test_format_with_duration_none_expiry(self):
+ """format_with_duration should work for None expiry."""
test_cases = (
(None, None, None, None),
@@ -67,10 +59,10 @@ class TimeTests(unittest.TestCase):
for expiry, date_from, max_units, expected in test_cases:
with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected):
- self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected)
+ self.assertEqual(time.format_with_duration(expiry, date_from, max_units), expected)
- def test_format_infraction_with_duration_custom_units(self):
- """format_infraction_with_duration should work for custom max_units."""
+ def test_format_with_duration_custom_units(self):
+ """format_with_duration should work for custom max_units."""
test_cases = (
('3000-12-12T00:01:00Z', datetime(3000, 12, 11, 12, 5, 5, tzinfo=timezone.utc), 6,
'<t:32533488060:f> (11 hours, 55 minutes and 55 seconds)'),
@@ -80,10 +72,10 @@ class TimeTests(unittest.TestCase):
for expiry, date_from, max_units, expected in test_cases:
with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected):
- self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected)
+ self.assertEqual(time.format_with_duration(expiry, date_from, max_units), expected)
- def test_format_infraction_with_duration_normal_usage(self):
- """format_infraction_with_duration should work for normal usage, across various durations."""
+ def test_format_with_duration_normal_usage(self):
+ """format_with_duration should work for normal usage, across various durations."""
utc = timezone.utc
test_cases = (
('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5, tzinfo=utc), 2,
@@ -105,11 +97,11 @@ class TimeTests(unittest.TestCase):
for expiry, date_from, max_units, expected in test_cases:
with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected):
- self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected)
+ self.assertEqual(time.format_with_duration(expiry, date_from, max_units), expected)
def test_until_expiration_with_duration_none_expiry(self):
- """until_expiration should work for None expiry."""
- self.assertEqual(time.until_expiration(None), None)
+ """until_expiration should return "Permanent" is expiry is None."""
+ self.assertEqual(time.until_expiration(None), "Permanent")
def test_until_expiration_with_duration_custom_units(self):
"""until_expiration should work for custom max_units."""
@@ -130,7 +122,6 @@ class TimeTests(unittest.TestCase):
('3000-12-12T00:00:00Z', '<t:32533488000:R>'),
('3000-11-23T20:09:00Z', '<t:32531918940:R>'),
('3000-11-23T20:09:00Z', '<t:32531918940:R>'),
- (None, None),
)
for expiry, expected in test_cases: