diff options
author | 2021-10-19 05:33:18 +0530 | |
---|---|---|
committer | 2021-10-19 05:33:18 +0530 | |
commit | 16fe94c94858d2238ff71c199ac1fcbf966ab505 (patch) | |
tree | bd815cc0170343c87b74ed2a5570a21c048b425c | |
parent | Fix bugs when scheduling from cache (diff) | |
parent | Merge pull request #1890 from python-discord/Migrate-to-socket_event_type (diff) |
Merge remote-tracking branch 'upstream/main' into modpings-schedule
159 files changed, 5369 insertions, 2584 deletions
diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index d572bd705..000000000 --- a/.coveragerc +++ /dev/null @@ -1,5 +0,0 @@ -[run] -branch = true -source = - bot - tests diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 84a671917..f8f2c8888 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,6 +8,10 @@ on: types: - completed +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: build: if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'push' diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 8b809b777..88abe6fb6 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -8,6 +8,10 @@ on: types: - completed +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: build: environment: production @@ -38,6 +42,6 @@ jobs: uses: Azure/k8s-deploy@v1 with: manifests: | - bot/deployment.yaml + namespaces/default/bot/deployment.yaml images: 'ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }}' kubectl-version: 'latest' diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index d96f324ec..f2c9dfb6c 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -6,11 +6,25 @@ on: - main pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: lint-test: runs-on: ubuntu-latest env: + # List of licenses that are compatible with the MIT License and + # can be used in our project + ALLOWED_LICENSE: Apache Software License; + BSD License; + GNU Library or Lesser General Public License (LGPL); + ISC License (ISCL); + MIT License; + Mozilla Public License 2.0 (MPL 2.0); + Public Domain; + Python Software Foundation License + # Dummy values for required bot environment variables BOT_API_KEY: foo BOT_SENTRY_DSN: blah @@ -67,6 +81,15 @@ jobs: pip install poetry poetry install + # Check all of our non-dev dependencies are compatible with the MIT license. + # If you added a new dependencies that is being rejected, + # please make sure it is compatible with the license for this project, + # and add it to the ALLOWED_LICENSE variable + - name: Check Dependencies License + run: | + pip-licenses --allow-only="$ALLOWED_LICENSE" \ + --package $(poetry export -f requirements.txt --without-hashes | sed "s/==.*//g" | tr "\n" " ") + # This step caches our pre-commit environment. To make sure we # do create a new environment when our pre-commit setup changes, # we create a cache key based on relevant factors. @@ -97,19 +120,8 @@ jobs: --format='::error file=%(path)s,line=%(row)d,col=%(col)d::\ [flake8] %(code)s: %(text)s'" - # We run `coverage` using the `python` command so we can suppress - # irrelevant warnings in our CI output. - name: Run tests and generate coverage report - run: | - python -Wignore -m coverage run -m unittest - coverage report -m - - # This step will publish the coverage reports coveralls.io and - # print a "job" link in the output of the GitHub Action - - name: Publish coverage report to coveralls.io - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: coveralls + run: pytest -n auto --cov --disable-warnings -q # Prepare the Pull Request Payload artifact. If this fails, we # we fail silently using the `continue-on-error` option. It's diff --git a/.github/workflows/sentry_release.yml b/.github/workflows/sentry_release.yml index f6a1e1f0e..48f5e50f4 100644 --- a/.github/workflows/sentry_release.yml +++ b/.github/workflows/sentry_release.yml @@ -5,6 +5,10 @@ on: branches: - main +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: create_sentry_release: runs-on: ubuntu-latest diff --git a/.github/workflows/status_embed.yaml b/.github/workflows/status_embed.yaml index b6a71b887..4178c366d 100644 --- a/.github/workflows/status_embed.yaml +++ b/.github/workflows/status_embed.yaml @@ -9,6 +9,10 @@ on: types: - completed +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: status_embed: # We need to send a status embed whenever the workflow diff --git a/.gitignore b/.gitignore index f74a142f3..177345908 100644 --- a/.gitignore +++ b/.gitignore @@ -116,6 +116,7 @@ log.* # Custom user configuration config.yml docker-compose.override.yml +metricity-config.toml # xmlrunner unittest XML reports TEST-**.xml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9412f07d..d8a90ac00 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,6 +13,11 @@ repos: rev: v1.5.1 hooks: - id: python-check-blanket-noqa + - repo: https://github.com/pycqa/isort + rev: 5.8.0 + hooks: + - id: isort + name: isort (python) - repo: local hooks: - id: flake8 diff --git a/Dockerfile b/Dockerfile index c285898dc..30bf8a361 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.5-slim +FROM --platform=linux/amd64 python:3.9-slim # Set pip to have no saved cache ENV PIP_NO_CACHE_DIR=false \ diff --git a/bot/__init__.py b/bot/__init__.py index 8f880b8e6..a1c4466f1 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -5,8 +5,7 @@ from typing import TYPE_CHECKING from discord.ext import commands -from bot import log -from bot.command import Command +from bot import log, monkey_patches if TYPE_CHECKING: from bot.bot import Bot @@ -17,9 +16,11 @@ log.setup() if os.name == "nt": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) +monkey_patches.patch_typing() + # Monkey-patch discord.py decorators to use the Command subclass which supports root aliases. # Must be patched before any cogs are added. -commands.command = partial(commands.command, cls=Command) -commands.GroupMixin.command = partialmethod(commands.GroupMixin.command, cls=Command) +commands.command = partial(commands.command, cls=monkey_patches.Command) +commands.GroupMixin.command = partialmethod(commands.GroupMixin.command, cls=monkey_patches.Command) instance: "Bot" = None # Global Bot instance. diff --git a/bot/__main__.py b/bot/__main__.py index 9317563c8..0d3fce180 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,11 +1,9 @@ -import logging - import aiohttp import bot from bot import constants from bot.bot import Bot, StartupError -from bot.log import setup_sentry +from bot.log import get_logger, setup_sentry setup_sentry() @@ -21,7 +19,7 @@ except StartupError as e: message = "Could not connect to Redis. Is it running?" # The exception is logged with an empty message so the actual message is visible at the bottom - log = logging.getLogger("bot") + log = get_logger("bot") log.fatal("", exc_info=e.exception) log.fatal(message) diff --git a/bot/api.py b/bot/api.py index 6ce9481f4..856f7c865 100644 --- a/bot/api.py +++ b/bot/api.py @@ -1,13 +1,14 @@ import asyncio -import logging from typing import Optional from urllib.parse import quote as quote_url import aiohttp +from bot.log import get_logger + from .constants import Keys, URLs -log = logging.getLogger(__name__) +log = get_logger(__name__) class ResponseCodeError(ValueError): diff --git a/bot/async_stats.py b/bot/async_stats.py index 58a80f528..2af832e5b 100644 --- a/bot/async_stats.py +++ b/bot/async_stats.py @@ -3,6 +3,8 @@ import socket from statsd.client.base import StatsClientBase +from bot.utils import scheduling + class AsyncStatsClient(StatsClientBase): """An async transport method for statsd communication.""" @@ -32,7 +34,7 @@ class AsyncStatsClient(StatsClientBase): def _send(self, data: str) -> None: """Start an async task to send data to statsd.""" - self._loop.create_task(self._async_send(data)) + scheduling.create_task(self._async_send(data), event_loop=self._loop) async def _async_send(self, data: str) -> None: """Send data to the statsd server using the async transport.""" diff --git a/bot/bot.py b/bot/bot.py index 914da9c98..94783a466 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -1,5 +1,4 @@ import asyncio -import logging import socket import warnings from collections import defaultdict @@ -14,8 +13,9 @@ from sentry_sdk import push_scope from bot import api, constants from bot.async_stats import AsyncStatsClient +from bot.log import get_logger -log = logging.getLogger('bot') +log = get_logger('bot') LOCALHOST = "127.0.0.1" @@ -109,7 +109,7 @@ class Bot(commands.Bot): def create(cls) -> "Bot": """Create and return an instance of a Bot.""" loop = asyncio.get_event_loop() - allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES] + allowed_roles = list({discord.Object(id_) for id_ in constants.MODERATION_ROLES}) intents = discord.Intents.all() intents.presences = False diff --git a/bot/command.py b/bot/command.py deleted file mode 100644 index 0fb900f7b..000000000 --- a/bot/command.py +++ /dev/null @@ -1,18 +0,0 @@ -from discord.ext import commands - - -class Command(commands.Command): - """ - A `discord.ext.commands.Command` subclass which supports root aliases. - - A `root_aliases` keyword argument is added, which is a sequence of alias names that will act as - top-level commands rather than being aliases of the command's group. It's stored as an attribute - also named `root_aliases`. - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.root_aliases = kwargs.get("root_aliases", []) - - if not isinstance(self.root_aliases, (list, tuple)): - raise TypeError("Root aliases of a command must be a list or a tuple of strings.") diff --git a/bot/constants.py b/bot/constants.py index 3d960f22b..f704c9e6a 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -9,8 +9,6 @@ the custom configuration. Any settings left out in the custom user configuration will stay their default values from `config-default.yml`. """ - -import logging import os from collections.abc import Mapping from enum import Enum @@ -25,8 +23,6 @@ try: except ModuleNotFoundError: pass -log = logging.getLogger(__name__) - def _env_var_constructor(loader, node): """ @@ -104,7 +100,7 @@ def _recursive_update(original, new): if Path("config.yml").exists(): - log.info("Found `config.yml` file, loading constants from it.") + print("Found `config.yml` file, loading constants from it.") with open("config.yml", encoding="UTF-8") as f: user_config = yaml.safe_load(f) _recursive_update(_CONFIG_YAML, user_config) @@ -123,11 +119,10 @@ def check_required_keys(keys): if lookup is None: raise KeyError(key) except KeyError: - log.critical( + raise KeyError( f"A configuration for `{key_path}` is required, but was not found. " "Please set it in `config.yml` or setup an environment variable and try again." ) - raise try: @@ -186,8 +181,7 @@ class YAMLGetter(type): (cls.section, cls.subsection, name) if cls.subsection is not None else (cls.section, name) ) - # Only an INFO log since this can be caught through `hasattr` or `getattr`. - log.info(f"Tried accessing configuration variable at `{dotted_path}`, but it could not be found.") + print(f"Tried accessing configuration variable at `{dotted_path}`, but it could not be found.") raise AttributeError(repr(name)) from e def __getitem__(cls, name): @@ -400,6 +394,8 @@ class Categories(metaclass=YAMLGetter): modmail: int voice: int + # 2021 Summer Code Jam + summer_code_jam: int class Channels(metaclass=YAMLGetter): section = "guild" @@ -439,6 +435,7 @@ class Channels(metaclass=YAMLGetter): discord_py: int esoteric: int voice_gate: int + code_jam_planning: int admins: int admin_spam: int @@ -456,18 +453,19 @@ class Channels(metaclass=YAMLGetter): staff_announcements: int admins_voice: int + code_help_voice_0: int code_help_voice_1: int - code_help_voice_2: int - general_voice: int + general_voice_0: int + general_voice_1: int staff_voice: int + code_help_chat_0: int code_help_chat_1: int - code_help_chat_2: int staff_voice_chat: int - voice_chat: int + voice_chat_0: int + voice_chat_1: int big_brother_logs: int - talent_pool: int class Webhooks(metaclass=YAMLGetter): @@ -478,7 +476,6 @@ class Webhooks(metaclass=YAMLGetter): dev_log: int duck_pond: int incidents_archive: int - talent_pool: int class Roles(metaclass=YAMLGetter): @@ -497,8 +494,10 @@ class Roles(metaclass=YAMLGetter): admins: int core_developers: int + code_jam_event_team: int devops: int domain_leads: int + events_lead: int helpers: int moderators: int mod_team: int @@ -506,7 +505,6 @@ class Roles(metaclass=YAMLGetter): project_leads: int jammers: int - team_leaders: int class Guild(metaclass=YAMLGetter): @@ -562,13 +560,16 @@ class Metabase(metaclass=YAMLGetter): username: Optional[str] password: Optional[str] - url: str + base_url: str + public_url: str max_session_age: int class AntiSpam(metaclass=YAMLGetter): section = 'anti_spam' + cache_size: int + clean_offending: bool ping_everyone: bool @@ -681,7 +682,7 @@ class VideoPermission(metaclass=YAMLGetter): # Debug mode -DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") +DEBUG_MODE: bool = _CONFIG_YAML["debug"] == "true" # Paths BOT_DIR = os.path.dirname(__file__) @@ -690,6 +691,7 @@ PROJECT_ROOT = os.path.abspath(os.path.join(BOT_DIR, os.pardir)) # Default role combinations MODERATION_ROLES = Guild.moderation_roles STAFF_ROLES = Guild.staff_roles +STAFF_PARTNERS_COMMUNITY_ROLES = STAFF_ROLES + [Roles.partners, Roles.python_community] # Channel combinations MODERATION_CHANNELS = Guild.moderation_channels diff --git a/bot/converters.py b/bot/converters.py index 2a3943831..dd02f6ae6 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -1,8 +1,8 @@ -import logging +from __future__ import annotations + import re import typing as t from datetime import datetime -from functools import partial from ssl import CertificateError import dateutil.parser @@ -10,16 +10,23 @@ import dateutil.tz import discord from aiohttp import ClientConnectorError from dateutil.relativedelta import relativedelta -from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, UserConverter -from discord.utils import DISCORD_EPOCH, snowflake_time +from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter +from discord.utils import DISCORD_EPOCH, escape_markdown, snowflake_time +from bot import exts from bot.api import ResponseCodeError from bot.constants import URLs +from bot.errors import InvalidInfraction from bot.exts.info.doc import _inventory_parser +from bot.log import get_logger +from bot.utils.extensions import EXTENSIONS, unqualify from bot.utils.regex import INVITE_RE from bot.utils.time import parse_duration_string -log = logging.getLogger(__name__) +if t.TYPE_CHECKING: + from bot.exts.info.source import SourceType + +log = get_logger(__name__) DISCORD_EPOCH_DT = datetime.utcfromtimestamp(DISCORD_EPOCH / 1000) RE_USER_MENTION = re.compile(r"<@!?([0-9]+)>$") @@ -64,10 +71,10 @@ class ValidDiscordServerInvite(Converter): async def convert(self, ctx: Context, server_invite: str) -> dict: """Check whether the string is a valid Discord server invite.""" - invite_code = INVITE_RE.search(server_invite) + invite_code = INVITE_RE.match(server_invite) if invite_code: response = await ctx.bot.http_session.get( - f"{URLs.discord_invite_api}/{invite_code[1]}" + f"{URLs.discord_invite_api}/{invite_code.group('invite')}" ) if response.status != 404: invite_data = await response.json() @@ -128,6 +135,44 @@ class ValidFilterListType(Converter): return list_type +class Extension(Converter): + """ + Fully qualify the name of an extension and ensure it exists. + + The * and ** values bypass this when used with the reload command. + """ + + async def convert(self, ctx: Context, argument: str) -> str: + """Fully qualify the name of an extension and ensure it exists.""" + # Special values to reload all extensions + if argument == "*" or argument == "**": + return argument + + argument = argument.lower() + + if argument in EXTENSIONS: + return argument + elif (qualified_arg := f"{exts.__name__}.{argument}") in EXTENSIONS: + return qualified_arg + + matches = [] + for ext in EXTENSIONS: + if argument == unqualify(ext): + matches.append(ext) + + if len(matches) > 1: + matches.sort() + names = "\n".join(matches) + raise BadArgument( + f":x: `{argument}` is an ambiguous extension name. " + f"Please use one of the following fully-qualified names.```\n{names}```" + ) + elif matches: + return matches[0] + else: + raise BadArgument(f":x: Could not find the extension `{argument}`.") + + class PackageName(Converter): """ A converter that checks whether the given string is a valid package name. @@ -191,11 +236,16 @@ class Inventory(Converter): async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.InventoryDict]: """Convert url to Intersphinx inventory URL.""" await ctx.trigger_typing() - if (inventory := await _inventory_parser.fetch_inventory(url)) is None: - raise BadArgument( - f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS} attempts." - ) - return url, inventory + try: + inventory = await _inventory_parser.fetch_inventory(url) + except _inventory_parser.InvalidHeaderError: + raise BadArgument("Unable to parse inventory because of invalid header, check if URL is correct.") + else: + if inventory is None: + raise BadArgument( + f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS} attempts." + ) + return url, inventory class Snowflake(IDConverter): @@ -223,7 +273,7 @@ class Snowflake(IDConverter): snowflake = int(arg) try: - time = snowflake_time(snowflake) + time = snowflake_time(snowflake).replace(tzinfo=None) except (OverflowError, OSError) as e: # Not sure if this can ever even happen, but let's be safe. raise BadArgument(f"{error}: {e}") @@ -271,23 +321,36 @@ class TagNameConverter(Converter): return tag_name -class TagContentConverter(Converter): - """Ensure proposed tag content is not empty and contains at least one non-whitespace character.""" +class SourceConverter(Converter): + """Convert an argument into a help command, tag, command, or cog.""" @staticmethod - async def convert(ctx: Context, tag_content: str) -> str: - """ - Ensure tag_content is non-empty and contains at least one non-whitespace character. + async def convert(ctx: Context, argument: str) -> SourceType: + """Convert argument into source object.""" + if argument.lower() == "help": + return ctx.bot.help_command - If tag_content is valid, return the stripped version. - """ - tag_content = tag_content.strip() + cog = ctx.bot.get_cog(argument) + if cog: + return cog + + cmd = ctx.bot.get_command(argument) + if cmd: + return cmd + + tags_cog = ctx.bot.get_cog("Tags") + show_tag = True - # The tag contents should not be empty, or filled with whitespace. - if not tag_content: - raise BadArgument("Tag contents should not be empty, or filled with whitespace.") + if not tags_cog: + show_tag = False + elif argument.lower() in tags_cog._cache: + return argument.lower() - return tag_content + escaped_arg = escape_markdown(argument) + + raise BadArgument( + f"Unable to convert '{escaped_arg}' to valid command{', tag,' if show_tag else ''} or Cog." + ) class DurationDelta(Converter): @@ -335,7 +398,8 @@ class Duration(DurationDelta): class OffTopicName(Converter): """A converter that ensures an added off-topic name is valid.""" - ALLOWED_CHARACTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-" + ALLOWED_CHARACTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-<>" + TRANSLATED_CHARACTERS = "𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-<>" @classmethod def translate_name(cls, name: str, *, from_unicode: bool = True) -> str: @@ -345,9 +409,9 @@ class OffTopicName(Converter): If `from_unicode` is True, the name is translated from a discord-safe format, back to normalized text. """ if from_unicode: - table = str.maketrans(cls.ALLOWED_CHARACTERS, '𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-') + table = str.maketrans(cls.ALLOWED_CHARACTERS, cls.TRANSLATED_CHARACTERS) else: - table = str.maketrans('𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-', cls.ALLOWED_CHARACTERS) + table = str.maketrans(cls.TRANSLATED_CHARACTERS, cls.ALLOWED_CHARACTERS) return name.translate(table) @@ -416,11 +480,11 @@ class HushDurationConverter(Converter): MINUTES_RE = re.compile(r"(\d+)(?:M|m|$)") - async def convert(self, ctx: Context, argument: str) -> t.Optional[int]: + async def convert(self, ctx: Context, argument: str) -> int: """ Convert `argument` to a duration that's max 15 minutes or None. - If `"forever"` is passed, None is returned; otherwise an int of the extracted time. + If `"forever"` is passed, -1 is returned; otherwise an int of the extracted time. Accepted formats are: * <duration>, * <duration>m, @@ -428,7 +492,7 @@ class HushDurationConverter(Converter): * forever. """ if argument == "forever": - return None + return -1 match = self.MINUTES_RE.match(argument) if not match: raise BadArgument(f"{argument} is not a valid minutes duration.") @@ -439,103 +503,51 @@ class HushDurationConverter(Converter): return duration -def proxy_user(user_id: str) -> discord.Object: - """ - Create a proxy user object from the given id. +def _is_an_unambiguous_user_argument(argument: str) -> bool: + """Check if the provided argument is a user mention, user id, or username (name#discrim).""" + has_id_or_mention = bool(IDConverter()._get_id_match(argument) or RE_USER_MENTION.match(argument)) - Used when a Member or User object cannot be resolved. - """ - log.trace(f"Attempting to create a proxy user for the user id {user_id}.") + # Check to see if the author passed a username (a discriminator exists) + argument = argument.removeprefix('@') + has_username = len(argument) > 5 and argument[-5] == '#' - try: - user_id = int(user_id) - except ValueError: - log.debug(f"Failed to create proxy user {user_id}: could not convert to int.") - raise BadArgument(f"User ID `{user_id}` is invalid - could not convert to an integer.") + return has_id_or_mention or has_username - user = discord.Object(user_id) - user.mention = user.id - user.display_name = f"<@{user.id}>" - user.avatar_url_as = lambda static_format: None - user.bot = False - return user +AMBIGUOUS_ARGUMENT_MSG = ("`{argument}` is not a User mention, a User ID or a Username in the format" + " `name#discriminator`.") -class UserMentionOrID(UserConverter): +class UnambiguousUser(UserConverter): """ - Converts to a `discord.User`, but only if a mention or userID is provided. + Converts to a `discord.User`, but only if a mention, userID or a username (name#discrim) is provided. - Unlike the default `UserConverter`, it doesn't allow conversion from a name or name#descrim. - This is useful in cases where that lookup strategy would lead to ambiguity. + Unlike the default `UserConverter`, it doesn't allow conversion from a name. + This is useful in cases where that lookup strategy would lead to too much ambiguity. """ async def convert(self, ctx: Context, argument: str) -> discord.User: - """Convert the `arg` to a `discord.User`.""" - match = self._get_id_match(argument) or RE_USER_MENTION.match(argument) - - if match is not None: + """Convert the `argument` to a `discord.User`.""" + if _is_an_unambiguous_user_argument(argument): return await super().convert(ctx, argument) else: - raise BadArgument(f"`{argument}` is not a User mention or a User ID.") - - -class FetchedUser(UserConverter): - """ - Converts to a `discord.User` or, if it fails, a `discord.Object`. - - Unlike the default `UserConverter`, which only does lookups via the global user cache, this - converter attempts to fetch the user via an API call to Discord when the using the cache is - unsuccessful. - - If the fetch also fails and the error doesn't imply the user doesn't exist, then a - `discord.Object` is returned via the `user_proxy` converter. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name#discrim - 4. Lookup by name - 5. Lookup via API - 6. Create a proxy user with discord.Object - """ - - async def convert(self, ctx: Context, arg: str) -> t.Union[discord.User, discord.Object]: - """Convert the `arg` to a `discord.User` or `discord.Object`.""" - try: - return await super().convert(ctx, arg) - except BadArgument: - pass - - try: - user_id = int(arg) - log.trace(f"Fetching user {user_id}...") - return await ctx.bot.fetch_user(user_id) - except ValueError: - log.debug(f"Failed to fetch user {arg}: could not convert to int.") - raise BadArgument(f"The provided argument can't be turned into integer: `{arg}`") - except discord.HTTPException as e: - # If the Discord error isn't `Unknown user`, return a proxy instead - if e.code != 10013: - log.info(f"Failed to fetch user, returning a proxy instead: status {e.status}") - return proxy_user(arg) + raise BadArgument(AMBIGUOUS_ARGUMENT_MSG.format(argument=argument)) - log.debug(f"Failed to fetch user {arg}: user does not exist.") - raise BadArgument(f"User `{arg}` does not exist") - -def _snowflake_from_regex(pattern: t.Pattern, arg: str) -> int: +class UnambiguousMember(MemberConverter): """ - Extract the snowflake from `arg` using a regex `pattern` and return it as an int. + Converts to a `discord.Member`, but only if a mention, userID or a username (name#discrim) is provided. - The snowflake is expected to be within the first capture group in `pattern`. + Unlike the default `MemberConverter`, it doesn't allow conversion from a name or nickname. + This is useful in cases where that lookup strategy would lead to too much ambiguity. """ - match = pattern.match(arg) - if not match: - raise BadArgument(f"Mention {str!r} is invalid.") - return int(match.group(1)) + async def convert(self, ctx: Context, argument: str) -> discord.Member: + """Convert the `argument` to a `discord.Member`.""" + if _is_an_unambiguous_user_argument(argument): + return await super().convert(ctx, argument) + else: + raise BadArgument(AMBIGUOUS_ARGUMENT_MSG.format(argument=argument)) class Infraction(Converter): @@ -554,7 +566,7 @@ class Infraction(Converter): "ordering": "-inserted_at" } - infractions = await ctx.bot.api_client.get("bot/infractions", params=params) + infractions = await ctx.bot.api_client.get("bot/infractions/expanded", params=params) if not infractions: raise BadArgument( @@ -564,9 +576,37 @@ class Infraction(Converter): return infractions[0] else: - return await ctx.bot.api_client.get(f"bot/infractions/{arg}") - + try: + return await ctx.bot.api_client.get(f"bot/infractions/{arg}/expanded") + except ResponseCodeError as e: + if e.status == 404: + raise InvalidInfraction( + converter=Infraction, + original=e, + infraction_arg=arg + ) + raise e + + +if t.TYPE_CHECKING: + ValidDiscordServerInvite = dict # noqa: F811 + ValidFilterListType = str # noqa: F811 + Extension = str # noqa: F811 + PackageName = str # noqa: F811 + ValidURL = str # noqa: F811 + Inventory = t.Tuple[str, _inventory_parser.InventoryDict] # noqa: F811 + Snowflake = int # noqa: F811 + TagNameConverter = str # noqa: F811 + SourceConverter = SourceType # noqa: F811 + DurationDelta = relativedelta # noqa: F811 + Duration = datetime # noqa: F811 + OffTopicName = str # noqa: F811 + ISODateTime = datetime # noqa: F811 + HushDurationConverter = int # noqa: F811 + UnambiguousUser = discord.User # noqa: F811 + UnambiguousMember = discord.Member # noqa: F811 + Infraction = t.Optional[dict] # noqa: F811 Expiry = t.Union[Duration, ISODateTime] -FetchedMember = t.Union[discord.Member, FetchedUser] -UserMention = partial(_snowflake_from_regex, RE_USER_MENTION) +MemberOrUser = t.Union[discord.Member, discord.User] +UnambiguousMemberOrUser = t.Union[UnambiguousMember, UnambiguousUser] diff --git a/bot/decorators.py b/bot/decorators.py index f65ec4103..048a2a09a 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -1,6 +1,5 @@ import asyncio import functools -import logging import types import typing as t from contextlib import suppress @@ -10,11 +9,12 @@ from discord.ext import commands from discord.ext.commands import Cog, Context from bot.constants import Channels, DEBUG_MODE, RedirectOutput -from bot.utils import function +from bot.log import get_logger +from bot.utils import function, scheduling from bot.utils.checks import ContextCheckFailure, in_whitelist_check from bot.utils.function import command_wraps -log = logging.getLogger(__name__) +log = get_logger(__name__) def in_whitelist( @@ -154,7 +154,7 @@ def redirect_output( if ping_user: await ctx.send(f"Here's the output of your command, {ctx.author.mention}") - asyncio.create_task(func(self, ctx, *args, **kwargs)) + scheduling.create_task(func(self, ctx, *args, **kwargs)) message = await old_channel.send( f"Hey, {ctx.author.mention}, you can find the output of your command here: " diff --git a/bot/errors.py b/bot/errors.py index 3544c6320..078b645f1 100644 --- a/bot/errors.py +++ b/bot/errors.py @@ -1,6 +1,11 @@ -from typing import Hashable, Union +from __future__ import annotations -from discord import Member, User +from typing import Hashable, TYPE_CHECKING, Union + +from discord.ext.commands import ConversionError, Converter + +if TYPE_CHECKING: + from bot.converters import MemberOrUser class LockedResourceError(RuntimeError): @@ -22,7 +27,7 @@ class LockedResourceError(RuntimeError): ) -class InvalidInfractedUser(Exception): +class InvalidInfractedUserError(Exception): """ Exception raised upon attempt of infracting an invalid user. @@ -30,14 +35,43 @@ class InvalidInfractedUser(Exception): `user` -- User or Member which is invalid """ - def __init__(self, user: Union[Member, User], reason: str = "User infracted is a bot."): + def __init__(self, user: MemberOrUser, reason: str = "User infracted is a bot."): + self.user = user self.reason = reason super().__init__(reason) +class InvalidInfraction(ConversionError): + """ + Raised by the Infraction converter when trying to fetch an invalid infraction id. + + Attributes: + `infraction_arg` -- the value that we attempted to convert into an Infraction + """ + + def __init__(self, converter: Converter, original: Exception, infraction_arg: Union[int, str]): + + self.infraction_arg = infraction_arg + super().__init__(converter, original) + + class BrandingMisconfiguration(RuntimeError): """Raised by the Branding cog when a misconfigured event is encountered.""" pass + + +class NonExistentRoleError(ValueError): + """ + Raised by the Information Cog when encountering a Role that does not exist. + + Attributes: + `role_id` -- the ID of the role that does not exist + """ + + def __init__(self, role_id: int): + super().__init__(f"Could not fetch data for role {role_id}") + + self.role_id = role_id diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 47c379a34..9c5bdbb4e 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -1,6 +1,5 @@ import asyncio import contextlib -import logging import random import typing as t from datetime import timedelta @@ -17,8 +16,10 @@ from bot.bot import Bot from bot.constants import Branding as BrandingConfig, Channels, Colours, Guild, MODERATION_ROLES from bot.decorators import mock_in_debug from bot.exts.backend.branding._repository import BrandingRepository, Event, RemoteObject +from bot.log import get_logger +from bot.utils import scheduling -log = logging.getLogger(__name__) +log = get_logger(__name__) class AssetType(Enum): @@ -50,7 +51,7 @@ def make_embed(title: str, description: str, *, success: bool) -> discord.Embed: For both `title` and `description`, empty string are valid values ~ fields will be empty. """ colour = Colours.soft_green if success else Colours.soft_red - return discord.Embed(title=title[:256], description=description[:2048], colour=colour) + return discord.Embed(title=title[:256], description=description[:4096], colour=colour) def extract_event_duration(event: Event) -> str: @@ -126,7 +127,7 @@ class Branding(commands.Cog): self.bot = bot self.repository = BrandingRepository(bot) - self.bot.loop.create_task(self.maybe_start_daemon()) # Start depending on cache. + scheduling.create_task(self.maybe_start_daemon(), event_loop=self.bot.loop) # Start depending on cache. # region: Internal logic & state management @@ -293,8 +294,8 @@ class Branding(commands.Cog): else: content = "Python Discord is entering a new event!" if is_notification else None - embed = discord.Embed(description=description[:2048], colour=discord.Colour.blurple()) - embed.set_footer(text=duration[:2048]) + embed = discord.Embed(description=description[:4096], colour=discord.Colour.blurple()) + embed.set_footer(text=duration[:4096]) await channel.send(content=content, embed=embed) diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py index 7b09d4641..d88ea67f3 100644 --- a/bot/exts/backend/branding/_repository.py +++ b/bot/exts/backend/branding/_repository.py @@ -1,4 +1,3 @@ -import logging import typing as t from datetime import date, datetime @@ -7,6 +6,7 @@ import frontmatter from bot.bot import Bot from bot.constants import Keys from bot.errors import BrandingMisconfiguration +from bot.log import get_logger # Base URL for requests into the branding repository. BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents" @@ -25,7 +25,7 @@ ARBITRARY_YEAR = 2020 # Format used to parse date strings after we inject `ARBITRARY_YEAR` at the end. DATE_FMT = "%B %d %Y" # Ex: July 10 2020 -log = logging.getLogger(__name__) +log = get_logger(__name__) class RemoteObject: diff --git a/bot/exts/backend/config_verifier.py b/bot/exts/backend/config_verifier.py index d72c6c22e..dc85a65a2 100644 --- a/bot/exts/backend/config_verifier.py +++ b/bot/exts/backend/config_verifier.py @@ -1,12 +1,11 @@ -import logging - from discord.ext.commands import Cog from bot import constants from bot.bot import Bot +from bot.log import get_logger +from bot.utils import scheduling - -log = logging.getLogger(__name__) +log = get_logger(__name__) class ConfigVerifier(Cog): @@ -14,7 +13,7 @@ class ConfigVerifier(Cog): def __init__(self, bot: Bot): self.bot = bot - self.channel_verify_task = self.bot.loop.create_task(self.verify_channels()) + self.channel_verify_task = scheduling.create_task(self.verify_channels(), event_loop=self.bot.loop) async def verify_channels(self) -> None: """ diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py index d8de177f5..6ab6634a6 100644 --- a/bot/exts/backend/error_handler.py +++ b/bot/exts/backend/error_handler.py @@ -1,19 +1,19 @@ import difflib -import logging import typing as t from discord import Embed -from discord.ext.commands import Cog, Context, errors +from discord.ext.commands import ChannelNotFound, Cog, Context, TextChannelConverter, VoiceChannelConverter, errors from sentry_sdk import push_scope from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import Colours, Icons, MODERATION_ROLES from bot.converters import TagNameConverter -from bot.errors import InvalidInfractedUser, LockedResourceError +from bot.errors import InvalidInfractedUserError, LockedResourceError +from bot.log import get_logger from bot.utils.checks import ContextCheckFailure -log = logging.getLogger(__name__) +log = get_logger(__name__) class ErrorHandler(Cog): @@ -59,43 +59,43 @@ class ErrorHandler(Cog): log.trace(f"Command {command} had its error already handled locally; ignoring.") return + debug_message = ( + f"Command {command} invoked by {ctx.message.author} with error " + f"{e.__class__.__name__}: {e}" + ) + if isinstance(e, errors.CommandNotFound) and not getattr(ctx, "invoked_from_error_handler", False): if await self.try_silence(ctx): return - # Try to look for a tag with the command's name - await self.try_get_tag(ctx) - return # Exit early to avoid logging. + await self.try_get_tag(ctx) # Try to look for a tag with the command's name elif isinstance(e, errors.UserInputError): + log.debug(debug_message) await self.handle_user_input_error(ctx, e) elif isinstance(e, errors.CheckFailure): + log.debug(debug_message) await self.handle_check_failure(ctx, e) elif isinstance(e, errors.CommandOnCooldown): + log.debug(debug_message) await ctx.send(e) elif isinstance(e, errors.CommandInvokeError): if isinstance(e.original, ResponseCodeError): await self.handle_api_error(ctx, e.original) elif isinstance(e.original, LockedResourceError): await ctx.send(f"{e.original} Please wait for it to finish and try again later.") - elif isinstance(e.original, InvalidInfractedUser): + elif isinstance(e.original, InvalidInfractedUserError): await ctx.send(f"Cannot infract that user. {e.original.reason}") else: await self.handle_unexpected_error(ctx, e.original) - return # Exit early to avoid logging. elif isinstance(e, errors.ConversionError): if isinstance(e.original, ResponseCodeError): await self.handle_api_error(ctx, e.original) else: await self.handle_unexpected_error(ctx, e.original) - return # Exit early to avoid logging. - elif not isinstance(e, errors.DisabledCommand): + elif isinstance(e, errors.DisabledCommand): + log.debug(debug_message) + else: # MaxConcurrencyReached, ExtensionError await self.handle_unexpected_error(ctx, e) - return # Exit early to avoid logging. - - log.debug( - f"Command {command} invoked by {ctx.message.author} with error " - f"{e.__class__.__name__}: {e}" - ) @staticmethod def get_help_command(ctx: Context) -> t.Coroutine: @@ -115,8 +115,10 @@ class ErrorHandler(Cog): Return bool depending on success of command. """ command = ctx.invoked_with.lower() + args = ctx.message.content.lower().split(" ") silence_command = self.bot.get_command("silence") ctx.invoked_from_error_handler = True + try: if not await silence_command.can_run(ctx): log.debug("Cancelling attempt to invoke silence/unsilence due to failed checks.") @@ -124,11 +126,30 @@ class ErrorHandler(Cog): except errors.CommandError: log.debug("Cancelling attempt to invoke silence/unsilence due to failed checks.") return False + + # Parse optional args + channel = None + duration = min(command.count("h") * 2, 15) + kick = False + + if len(args) > 1: + # Parse channel + for converter in (TextChannelConverter(), VoiceChannelConverter()): + try: + channel = await converter.convert(ctx, args[1]) + break + except ChannelNotFound: + continue + + if len(args) > 2 and channel is not None: + # Parse kick + kick = args[2].lower() == "true" + if command.startswith("shh"): - await ctx.invoke(silence_command, duration=min(command.count("h")*2, 15)) + await ctx.invoke(silence_command, duration_or_channel=channel, duration=duration, kick=kick) return True elif command.startswith("unshh"): - await ctx.invoke(self.bot.get_command("unsilence")) + await ctx.invoke(self.bot.get_command("unsilence"), channel=channel) return True return False @@ -167,9 +188,6 @@ class ErrorHandler(Cog): if not any(role.id in MODERATION_ROLES for role in ctx.author.roles): await self.send_command_suggestion(ctx, ctx.invoked_with) - # Return to not raise the exception - return - async def send_command_suggestion(self, ctx: Context, command_name: str) -> None: """Sends user similar commands if any can be found.""" # No similar tag found, or tag on cooldown - @@ -214,38 +232,32 @@ class ErrorHandler(Cog): """ if isinstance(e, errors.MissingRequiredArgument): embed = self._get_error_embed("Missing required argument", e.param.name) - await ctx.send(embed=embed) - await self.get_help_command(ctx) self.bot.stats.incr("errors.missing_required_argument") elif isinstance(e, errors.TooManyArguments): embed = self._get_error_embed("Too many arguments", str(e)) - await ctx.send(embed=embed) - await self.get_help_command(ctx) self.bot.stats.incr("errors.too_many_arguments") elif isinstance(e, errors.BadArgument): embed = self._get_error_embed("Bad argument", str(e)) - await ctx.send(embed=embed) - await self.get_help_command(ctx) self.bot.stats.incr("errors.bad_argument") elif isinstance(e, errors.BadUnionArgument): embed = self._get_error_embed("Bad argument", f"{e}\n{e.errors[-1]}") - await ctx.send(embed=embed) - await self.get_help_command(ctx) self.bot.stats.incr("errors.bad_union_argument") elif isinstance(e, errors.ArgumentParsingError): embed = self._get_error_embed("Argument parsing error", str(e)) await ctx.send(embed=embed) self.get_help_command(ctx).close() self.bot.stats.incr("errors.argument_parsing_error") + return else: embed = self._get_error_embed( "Input error", "Something about your input seems off. Check the arguments and try again." ) - await ctx.send(embed=embed) - await self.get_help_command(ctx) self.bot.stats.incr("errors.other_user_input_error") + await ctx.send(embed=embed) + await self.get_help_command(ctx) + @staticmethod async def handle_check_failure(ctx: Context, e: errors.CheckFailure) -> None: """ @@ -278,8 +290,8 @@ class ErrorHandler(Cog): async def handle_api_error(ctx: Context, e: ResponseCodeError) -> None: """Send an error message in `ctx` for ResponseCodeError and log it.""" if e.status == 404: - await ctx.send("There does not seem to be anything matching your query.") log.debug(f"API responded with 404 for command {ctx.command}") + await ctx.send("There does not seem to be anything matching your query.") ctx.bot.stats.incr("errors.api_error_404") elif e.status == 400: content = await e.response.json() @@ -287,12 +299,12 @@ class ErrorHandler(Cog): await ctx.send("According to the API, your request is malformed.") ctx.bot.stats.incr("errors.api_error_400") elif 500 <= e.status < 600: - await ctx.send("Sorry, there seems to be an internal issue with the API.") log.warning(f"API responded with {e.status} for command {ctx.command}") + await ctx.send("Sorry, there seems to be an internal issue with the API.") ctx.bot.stats.incr("errors.api_internal_server_error") else: - await ctx.send(f"Got an unexpected status code from the API (`{e.status}`).") log.warning(f"Unexpected API response for command {ctx.command}: {e.status}") + await ctx.send(f"Got an unexpected status code from the API (`{e.status}`).") ctx.bot.stats.incr(f"errors.api_error_{e.status}") @staticmethod diff --git a/bot/exts/backend/logging.py b/bot/exts/backend/logging.py index 823f14ea4..2d03cd580 100644 --- a/bot/exts/backend/logging.py +++ b/bot/exts/backend/logging.py @@ -1,13 +1,12 @@ -import logging - from discord import Embed from discord.ext.commands import Cog from bot.bot import Bot from bot.constants import Channels, DEBUG_MODE +from bot.log import get_logger +from bot.utils import scheduling - -log = logging.getLogger(__name__) +log = get_logger(__name__) class Logging(Cog): @@ -16,7 +15,7 @@ class Logging(Cog): def __init__(self, bot: Bot): self.bot = bot - self.bot.loop.create_task(self.startup_greeting()) + scheduling.create_task(self.startup_greeting(), event_loop=self.bot.loop) async def startup_greeting(self) -> None: """Announce our presence to the configured devlog channel.""" diff --git a/bot/exts/backend/sync/_cog.py b/bot/exts/backend/sync/_cog.py index 48d2b6f02..80f5750bc 100644 --- a/bot/exts/backend/sync/_cog.py +++ b/bot/exts/backend/sync/_cog.py @@ -1,4 +1,3 @@ -import logging from typing import Any, Dict from discord import Member, Role, User @@ -9,8 +8,10 @@ from bot import constants from bot.api import ResponseCodeError from bot.bot import Bot from bot.exts.backend.sync import _syncers +from bot.log import get_logger +from bot.utils import scheduling -log = logging.getLogger(__name__) +log = get_logger(__name__) class Sync(Cog): @@ -18,7 +19,7 @@ class Sync(Cog): def __init__(self, bot: Bot) -> None: self.bot = bot - self.bot.loop.create_task(self.sync_guild()) + scheduling.create_task(self.sync_guild(), event_loop=self.bot.loop) async def sync_guild(self) -> None: """Syncs the roles/users of the guild with the database.""" diff --git a/bot/exts/backend/sync/_syncers.py b/bot/exts/backend/sync/_syncers.py index c9f2d2da8..45301b098 100644 --- a/bot/exts/backend/sync/_syncers.py +++ b/bot/exts/backend/sync/_syncers.py @@ -1,5 +1,4 @@ import abc -import logging import typing as t from collections import namedtuple @@ -9,8 +8,10 @@ from more_itertools import chunked import bot from bot.api import ResponseCodeError +from bot.log import get_logger +from bot.utils.members import get_or_fetch_member -log = logging.getLogger(__name__) +log = get_logger(__name__) CHUNK_SIZE = 1000 @@ -156,7 +157,7 @@ class UserSyncer(Syncer): if db_user[db_field] != guild_value: updated_fields[db_field] = guild_value - if guild_user := guild.get_member(db_user["id"]): + if guild_user := await get_or_fetch_member(guild, db_user["id"]): seen_guild_users.add(guild_user.id) maybe_update("name", guild_user.name) diff --git a/bot/exts/events/__init__.py b/bot/exts/events/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/events/__init__.py diff --git a/bot/exts/events/code_jams/__init__.py b/bot/exts/events/code_jams/__init__.py new file mode 100644 index 000000000..16e81e365 --- /dev/null +++ b/bot/exts/events/code_jams/__init__.py @@ -0,0 +1,8 @@ +from bot.bot import Bot + + +def setup(bot: Bot) -> None: + """Load the CodeJams cog.""" + from bot.exts.events.code_jams._cog import CodeJams + + bot.add_cog(CodeJams(bot)) diff --git a/bot/exts/events/code_jams/_channels.py b/bot/exts/events/code_jams/_channels.py new file mode 100644 index 000000000..e8cf5f7bf --- /dev/null +++ b/bot/exts/events/code_jams/_channels.py @@ -0,0 +1,113 @@ +import typing as t + +import discord + +from bot.constants import Categories, Channels, Roles +from bot.log import get_logger + +log = get_logger(__name__) + +MAX_CHANNELS = 50 +CATEGORY_NAME = "Code Jam" + + +async def _get_category(guild: discord.Guild) -> discord.CategoryChannel: + """ + Return a code jam category. + + If all categories are full or none exist, create a new category. + """ + for category in guild.categories: + if category.name == CATEGORY_NAME and len(category.channels) < MAX_CHANNELS: + return category + + return await _create_category(guild) + + +async def _create_category(guild: discord.Guild) -> discord.CategoryChannel: + """Create a new code jam category and return it.""" + log.info("Creating a new code jam category.") + + category_overwrites = { + guild.default_role: discord.PermissionOverwrite(read_messages=False), + guild.me: discord.PermissionOverwrite(read_messages=True) + } + + category = await guild.create_category_channel( + CATEGORY_NAME, + overwrites=category_overwrites, + reason="It's code jam time!" + ) + + await _send_status_update( + guild, f"Created a new category with the ID {category.id} for this Code Jam's team channels." + ) + + return category + + +def _get_overwrites( + members: list[tuple[discord.Member, bool]], + guild: discord.Guild, +) -> dict[t.Union[discord.Member, discord.Role], discord.PermissionOverwrite]: + """Get code jam team channels permission overwrites.""" + team_channel_overwrites = { + guild.default_role: discord.PermissionOverwrite(read_messages=False), + guild.get_role(Roles.code_jam_event_team): discord.PermissionOverwrite(read_messages=True) + } + + for member, _ in members: + team_channel_overwrites[member] = discord.PermissionOverwrite( + read_messages=True + ) + + return team_channel_overwrites + + +async def create_team_channel( + guild: discord.Guild, + team_name: str, + members: list[tuple[discord.Member, bool]], + team_leaders: discord.Role +) -> None: + """Create the team's text channel.""" + await _add_team_leader_roles(members, team_leaders) + + # Get permission overwrites and category + team_channel_overwrites = _get_overwrites(members, guild) + code_jam_category = await _get_category(guild) + + # Create a text channel for the team + await code_jam_category.create_text_channel( + team_name, + overwrites=team_channel_overwrites, + ) + + +async def create_team_leader_channel(guild: discord.Guild, team_leaders: discord.Role) -> None: + """Create the Team Leader Chat channel for the Code Jam team leaders.""" + category: discord.CategoryChannel = guild.get_channel(Categories.summer_code_jam) + + team_leaders_chat = await category.create_text_channel( + name="team-leaders-chat", + overwrites={ + guild.default_role: discord.PermissionOverwrite(read_messages=False), + team_leaders: discord.PermissionOverwrite(read_messages=True) + } + ) + + await _send_status_update(guild, f"Created {team_leaders_chat.mention} in the {category} category.") + + +async def _send_status_update(guild: discord.Guild, message: str) -> None: + """Inform the events lead with a status update when the command is ran.""" + channel: discord.TextChannel = guild.get_channel(Channels.code_jam_planning) + + await channel.send(f"<@&{Roles.events_lead}>\n\n{message}") + + +async def _add_team_leader_roles(members: list[tuple[discord.Member, bool]], team_leaders: discord.Role) -> None: + """Assign the team leader role to the team leaders.""" + for member, is_leader in members: + if is_leader: + await member.add_roles(team_leaders) diff --git a/bot/exts/events/code_jams/_cog.py b/bot/exts/events/code_jams/_cog.py new file mode 100644 index 000000000..b31d628d5 --- /dev/null +++ b/bot/exts/events/code_jams/_cog.py @@ -0,0 +1,236 @@ +import asyncio +import csv +import typing as t +from collections import defaultdict + +import discord +from discord import Colour, Embed, Guild, Member +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import Emojis, Roles +from bot.exts.events.code_jams import _channels +from bot.log import get_logger +from bot.utils.members import get_or_fetch_member +from bot.utils.services import send_to_paste_service + +log = get_logger(__name__) + +TEAM_LEADERS_COLOUR = 0x11806a +DELETION_REACTION = "\U0001f4a5" + + +class CodeJams(commands.Cog): + """Manages the code-jam related parts of our server.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @commands.group(aliases=("cj", "jam")) + @commands.has_any_role(Roles.admins) + async def codejam(self, ctx: commands.Context) -> None: + """A Group of commands for managing Code Jams.""" + if ctx.invoked_subcommand is None: + await ctx.send_help(ctx.command) + + @codejam.command() + async def create(self, ctx: commands.Context, csv_file: t.Optional[str] = None) -> None: + """ + Create code-jam teams from a CSV file or a link to one, specifying the team names, leaders and members. + + The CSV file must have 3 columns: 'Team Name', 'Team Member Discord ID', and 'Team Leader'. + + This will create the text channels for the teams, and give the team leaders their roles. + """ + async with ctx.typing(): + if csv_file: + async with self.bot.http_session.get(csv_file) as response: + if response.status != 200: + await ctx.send(f"Got a bad response from the URL: {response.status}") + return + + csv_file = await response.text() + + elif ctx.message.attachments: + csv_file = (await ctx.message.attachments[0].read()).decode("utf8") + else: + raise commands.BadArgument("You must include either a CSV file or a link to one.") + + teams = defaultdict(list) + reader = csv.DictReader(csv_file.splitlines()) + + for row in reader: + member = await get_or_fetch_member(ctx.guild, int(row["Team Member Discord ID"])) + + if member is None: + log.trace(f"Got an invalid member ID: {row['Team Member Discord ID']}") + continue + + teams[row["Team Name"]].append((member, row["Team Leader"].upper() == "Y")) + + team_leaders = await ctx.guild.create_role(name="Code Jam Team Leaders", colour=TEAM_LEADERS_COLOUR) + + for team_name, team_members in teams.items(): + await _channels.create_team_channel(ctx.guild, team_name, team_members, team_leaders) + + await _channels.create_team_leader_channel(ctx.guild, team_leaders) + await ctx.send(f"{Emojis.check_mark} Created Code Jam with {len(teams)} teams.") + + @codejam.command() + @commands.has_any_role(Roles.admins) + async def end(self, ctx: commands.Context) -> None: + """ + Delete all code jam channels. + + A confirmation message is displayed with the categories and channels to be deleted.. Pressing the added reaction + deletes those channels. + """ + def predicate_deletion_emoji_reaction(reaction: discord.Reaction, user: discord.User) -> bool: + """Return True if the reaction :boom: was added by the context message author on this message.""" + return ( + reaction.message.id == message.id + and user.id == ctx.author.id + and str(reaction) == DELETION_REACTION + ) + + # A copy of the list of channels is stored. This is to make sure that we delete precisely the channels displayed + # in the confirmation message. + categories = self.jam_categories(ctx.guild) + category_channels = {category: category.channels.copy() for category in categories} + + confirmation_message = await self._build_confirmation_message(category_channels) + message = await ctx.send(confirmation_message) + await message.add_reaction(DELETION_REACTION) + try: + await self.bot.wait_for( + 'reaction_add', + check=predicate_deletion_emoji_reaction, + timeout=10 + ) + + except asyncio.TimeoutError: + await message.clear_reaction(DELETION_REACTION) + await ctx.send("Command timed out.", reference=message) + return + + else: + await message.clear_reaction(DELETION_REACTION) + for category, channels in category_channels.items(): + for channel in channels: + await channel.delete(reason="Code jam ended.") + await category.delete(reason="Code jam ended.") + + await message.add_reaction(Emojis.check_mark) + + @staticmethod + async def _build_confirmation_message( + categories: dict[discord.CategoryChannel, list[discord.abc.GuildChannel]] + ) -> str: + """Sends details of the channels to be deleted to the pasting service, and formats the confirmation message.""" + def channel_repr(channel: discord.abc.GuildChannel) -> str: + """Formats the channel name and ID and a readable format.""" + return f"{channel.name} ({channel.id})" + + def format_category_info(category: discord.CategoryChannel, channels: list[discord.abc.GuildChannel]) -> str: + """Displays the category and the channels within it in a readable format.""" + return f"{channel_repr(category)}:\n" + "\n".join(" - " + channel_repr(channel) for channel in channels) + + deletion_details = "\n\n".join( + format_category_info(category, channels) for category, channels in categories.items() + ) + + url = await send_to_paste_service(deletion_details) + if url is None: + url = "**Unable to send deletion details to the pasting service.**" + + return f"Are you sure you want to delete all code jam channels?\n\nThe channels to be deleted: {url}" + + @codejam.command() + @commands.has_any_role(Roles.admins, Roles.code_jam_event_team) + async def info(self, ctx: commands.Context, member: Member) -> None: + """ + Send an info embed about the member with the team they're in. + + The team is found by searching the permissions of the team channels. + """ + channel = self.team_channel(ctx.guild, member) + if not channel: + await ctx.send(":x: I can't find the team channel for this member.") + return + + embed = Embed( + title=str(member), + colour=Colour.blurple() + ) + embed.add_field(name="Team", value=self.team_name(channel), inline=True) + + await ctx.send(embed=embed) + + @codejam.command() + @commands.has_any_role(Roles.admins) + async def move(self, ctx: commands.Context, member: Member, new_team_name: str) -> None: + """Move participant from one team to another by changing the user's permissions for the relevant channels.""" + old_team_channel = self.team_channel(ctx.guild, member) + if not old_team_channel: + await ctx.send(":x: I can't find the team channel for this member.") + return + + if old_team_channel.name == new_team_name or self.team_name(old_team_channel) == new_team_name: + await ctx.send(f"`{member}` is already in `{new_team_name}`.") + return + + new_team_channel = self.team_channel(ctx.guild, new_team_name) + if not new_team_channel: + await ctx.send(f":x: I can't find a team channel named `{new_team_name}`.") + return + + await old_team_channel.set_permissions(member, overwrite=None, reason=f"Participant moved to {new_team_name}") + await new_team_channel.set_permissions( + member, + overwrite=discord.PermissionOverwrite(read_messages=True), + reason=f"Participant moved from {old_team_channel.name}" + ) + + await ctx.send( + f"Participant moved from `{self.team_name(old_team_channel)}` to `{self.team_name(new_team_channel)}`." + ) + + @codejam.command() + @commands.has_any_role(Roles.admins) + async def remove(self, ctx: commands.Context, member: Member) -> None: + """Remove the participant from their team. Does not remove the participants or leader roles.""" + channel = self.team_channel(ctx.guild, member) + if not channel: + await ctx.send(":x: I can't find the team channel for this member.") + return + + await channel.set_permissions( + member, + overwrite=None, + reason=f"Participant removed from the team {self.team_name(channel)}." + ) + await ctx.send(f"Removed the participant from `{self.team_name(channel)}`.") + + @staticmethod + def jam_categories(guild: Guild) -> list[discord.CategoryChannel]: + """Get all the code jam team categories.""" + return [category for category in guild.categories if category.name == _channels.CATEGORY_NAME] + + @staticmethod + def team_channel(guild: Guild, criterion: t.Union[str, Member]) -> t.Optional[discord.TextChannel]: + """Get a team channel through either a participant or the team name.""" + for category in CodeJams.jam_categories(guild): + for channel in category.channels: + if isinstance(channel, discord.TextChannel): + if ( + # If it's a string. + criterion == channel.name or criterion == CodeJams.team_name(channel) + # If it's a member. + or criterion in channel.overwrites + ): + return channel + + @staticmethod + def team_name(channel: discord.TextChannel) -> str: + """Retrieves the team name from the given channel.""" + return channel.name.replace("-", " ").title() diff --git a/bot/exts/filters/antimalware.py b/bot/exts/filters/antimalware.py index 89e539e7b..d727f7940 100644 --- a/bot/exts/filters/antimalware.py +++ b/bot/exts/filters/antimalware.py @@ -1,4 +1,3 @@ -import logging import typing as t from os.path import splitext @@ -7,8 +6,10 @@ from discord.ext.commands import Cog from bot.bot import Bot from bot.constants import Channels, Filter, URLs +from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) PY_EMBED_DESCRIPTION = ( "It looks like you tried to attach a Python file - " @@ -61,6 +62,10 @@ class AntiMalware(Cog): if message.webhook_id or message.author.bot: return + # Ignore code jam channels + if getattr(message.channel, "category", None) and message.channel.category.name == JAM_CATEGORY_NAME: + return + # Check if user is staff, if is, return # Since we only care that roles exist to iterate over, check for the attr rather than a User/Member instance if hasattr(message.author, "roles") and any(role.id in Filter.role_whitelist for role in message.author.roles): diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py index 7555e25a2..37ac70508 100644 --- a/bot/exts/filters/antispam.py +++ b/bot/exts/filters/antispam.py @@ -1,8 +1,9 @@ import asyncio -import logging +from collections import defaultdict from collections.abc import Mapping from dataclasses import dataclass, field from datetime import datetime, timedelta +from itertools import takewhile from operator import attrgetter, itemgetter from typing import Dict, Iterable, List, Set @@ -12,17 +13,17 @@ from discord.ext.commands import Cog from bot import rules from bot.bot import Bot from bot.constants import ( - AntiSpam as AntiSpamConfig, Channels, - Colours, DEBUG_MODE, Event, Filter, - Guild as GuildConfig, Icons, + AntiSpam as AntiSpamConfig, Channels, Colours, DEBUG_MODE, Event, Filter, Guild as GuildConfig, Icons ) from bot.converters import Duration +from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger from bot.utils import lock, scheduling +from bot.utils.message_cache import MessageCache from bot.utils.messages import format_user, send_attachments - -log = logging.getLogger(__name__) +log = get_logger(__name__) RULE_FUNCTION_MAPPING = { 'attachments': rules.apply_attachments, @@ -43,19 +44,18 @@ RULE_FUNCTION_MAPPING = { class DeletionContext: """Represents a Deletion Context for a single spam event.""" - channel: TextChannel - members: Dict[int, Member] = field(default_factory=dict) + members: frozenset[Member] + triggered_in: TextChannel + channels: set[TextChannel] = field(default_factory=set) rules: Set[str] = field(default_factory=set) messages: Dict[int, Message] = field(default_factory=dict) attachments: List[List[str]] = field(default_factory=list) - async def add(self, rule_name: str, members: Iterable[Member], messages: Iterable[Message]) -> None: + async def add(self, rule_name: str, channels: Iterable[TextChannel], messages: Iterable[Message]) -> None: """Adds new rule violation events to the deletion context.""" self.rules.add(rule_name) - for member in members: - if member.id not in self.members: - self.members[member.id] = member + self.channels.update(channels) for message in messages: if message.id not in self.messages: @@ -68,36 +68,45 @@ class DeletionContext: async def upload_messages(self, actor_id: int, modlog: ModLog) -> None: """Method that takes care of uploading the queue and posting modlog alert.""" - triggered_by_users = ", ".join(format_user(m) for m in self.members.values()) + triggered_by_users = ", ".join(format_user(m) for m in self.members) + triggered_in_channel = f"**Triggered in:** {self.triggered_in.mention}\n" if len(self.channels) > 1 else "" + channels_description = ", ".join(channel.mention for channel in self.channels) mod_alert_message = ( f"**Triggered by:** {triggered_by_users}\n" - f"**Channel:** {self.channel.mention}\n" + f"{triggered_in_channel}" + f"**Channels:** {channels_description}\n" f"**Rules:** {', '.join(rule for rule in self.rules)}\n" ) - # For multiple messages or those with excessive newlines, use the logs API - if len(self.messages) > 1 or 'newlines' in self.rules: + messages_as_list = list(self.messages.values()) + first_message = messages_as_list[0] + # For multiple messages and those with attachments or excessive newlines, use the logs API + if any(( + len(messages_as_list) > 1, + len(first_message.attachments) > 0, + first_message.content.count('\n') > 15 + )): url = await modlog.upload_log(self.messages.values(), actor_id, self.attachments) mod_alert_message += f"A complete log of the offending messages can be found [here]({url})" else: mod_alert_message += "Message:\n" - [message] = self.messages.values() - content = message.clean_content - remaining_chars = 2040 - len(mod_alert_message) + content = first_message.clean_content + remaining_chars = 4080 - len(mod_alert_message) if len(content) > remaining_chars: - content = content[:remaining_chars] + "..." + url = await modlog.upload_log([first_message], actor_id, self.attachments) + log_site_msg = f"The full message can be found [here]({url})" + content = content[:remaining_chars - (3 + len(log_site_msg))] + "..." - mod_alert_message += f"{content}" + mod_alert_message += content - *_, last_message = self.messages.values() await modlog.send_log_message( icon_url=Icons.filtering, colour=Colour(Colours.soft_red), title="Spam detected!", text=mod_alert_message, - thumbnail=last_message.author.avatar_url_as(static_format="png"), + thumbnail=first_message.author.display_avatar.url, channel_id=Channels.mod_alerts, ping_everyone=AntiSpamConfig.ping_everyone ) @@ -115,7 +124,19 @@ class AntiSpam(Cog): self.message_deletion_queue = dict() - self.bot.loop.create_task(self.alert_on_validation_error(), name="AntiSpam.alert_on_validation_error") + # Fetch the rule configuration with the highest rule interval. + max_interval_config = max( + AntiSpamConfig.rules.values(), + key=itemgetter('interval') + ) + self.max_interval = max_interval_config['interval'] + self.cache = MessageCache(AntiSpamConfig.cache_size, newest_first=True) + + scheduling.create_task( + self.alert_on_validation_error(), + name="AntiSpam.alert_on_validation_error", + event_loop=self.bot.loop, + ) @property def mod_log(self) -> ModLog: @@ -148,24 +169,18 @@ class AntiSpam(Cog): not message.guild or message.guild.id != GuildConfig.id or message.author.bot + or (getattr(message.channel, "category", None) and message.channel.category.name == JAM_CATEGORY_NAME) or (message.channel.id in Filter.channel_whitelist and not DEBUG_MODE) or (any(role.id in Filter.role_whitelist for role in message.author.roles) and not DEBUG_MODE) ): return - # Fetch the rule configuration with the highest rule interval. - max_interval_config = max( - AntiSpamConfig.rules.values(), - key=itemgetter('interval') - ) - max_interval = max_interval_config['interval'] + self.cache.append(message) - # Store history messages since `interval` seconds ago in a list to prevent unnecessary API calls. - earliest_relevant_at = datetime.utcnow() - timedelta(seconds=max_interval) - relevant_messages = [ - msg async for msg in message.channel.history(after=earliest_relevant_at, oldest_first=False) - if not msg.author.bot - ] + earliest_relevant_at = datetime.utcnow() - timedelta(seconds=self.max_interval) + relevant_messages = list( + takewhile(lambda msg: msg.created_at.replace(tzinfo=None) > earliest_relevant_at, self.cache) + ) for rule_name in AntiSpamConfig.rules: rule_config = AntiSpamConfig.rules[rule_name] @@ -173,9 +188,12 @@ class AntiSpam(Cog): # Create a list of messages that were sent in the interval that the rule cares about. latest_interesting_stamp = datetime.utcnow() - timedelta(seconds=rule_config['interval']) - messages_for_rule = [ - msg for msg in relevant_messages if msg.created_at > latest_interesting_stamp - ] + messages_for_rule = list( + takewhile( + lambda msg: msg.created_at.replace(tzinfo=None) > latest_interesting_stamp, relevant_messages + ) + ) + result = await rule_function(message, messages_for_rule, rule_config) # If the rule returns `None`, that means the message didn't violate it. @@ -188,19 +206,19 @@ class AntiSpam(Cog): full_reason = f"`{rule_name}` rule: {reason}" # If there's no spam event going on for this channel, start a new Message Deletion Context - channel = message.channel - if channel.id not in self.message_deletion_queue: - log.trace(f"Creating queue for channel `{channel.id}`") - self.message_deletion_queue[message.channel.id] = DeletionContext(channel) + authors_set = frozenset(members) + if authors_set not in self.message_deletion_queue: + log.trace(f"Creating queue for members `{authors_set}`") + self.message_deletion_queue[authors_set] = DeletionContext(authors_set, message.channel) scheduling.create_task( - self._process_deletion_context(message.channel.id), - name=f"AntiSpam._process_deletion_context({message.channel.id})" + self._process_deletion_context(authors_set), + name=f"AntiSpam._process_deletion_context({authors_set})" ) # Add the relevant of this trigger to the Deletion Context - await self.message_deletion_queue[message.channel.id].add( + await self.message_deletion_queue[authors_set].add( rule_name=rule_name, - members=members, + channels=set(message.channel for message in relevant_messages), messages=relevant_messages ) @@ -210,7 +228,7 @@ class AntiSpam(Cog): name=f"AntiSpam.punish(message={message.id}, member={member.id}, rule={rule_name})" ) - await self.maybe_delete_messages(channel, relevant_messages) + await self.maybe_delete_messages(relevant_messages) break @lock.lock_arg("antispam.punish", "member", attrgetter("id")) @@ -232,14 +250,31 @@ class AntiSpam(Cog): reason=reason ) - async def maybe_delete_messages(self, channel: TextChannel, messages: List[Message]) -> None: + async def maybe_delete_messages(self, messages: List[Message]) -> None: """Cleans the messages if cleaning is configured.""" if AntiSpamConfig.clean_offending: # If we have more than one message, we can use bulk delete. if len(messages) > 1: message_ids = [message.id for message in messages] self.mod_log.ignore(Event.message_delete, *message_ids) - await channel.delete_messages(messages) + channel_messages = defaultdict(list) + for message in messages: + channel_messages[message.channel].append(message) + for channel, messages in channel_messages.items(): + try: + await channel.delete_messages(messages) + except NotFound: + # In the rare case where we found messages matching the + # spam filter across multiple channels, it is possible + # that a single channel will only contain a single message + # to delete. If that should be the case, discord.py will + # use the "delete single message" endpoint instead of the + # bulk delete endpoint, and the single message deletion + # endpoint will complain if you give it that does not exist. + # As this means that we have no other message to delete in + # this channel (and message deletes work per-channel), + # we can just log an exception and carry on with business. + log.info(f"Tried to delete message `{messages[0].id}`, but message could not be found.") # Otherwise, the bulk delete endpoint will throw up. # Delete the message directly instead. @@ -250,7 +285,7 @@ class AntiSpam(Cog): except NotFound: log.info(f"Tried to delete message `{messages[0].id}`, but message could not be found.") - async def _process_deletion_context(self, context_id: int) -> None: + async def _process_deletion_context(self, context_id: frozenset) -> None: """Processes the Deletion Context queue.""" log.trace("Sleeping before processing message deletion queue.") await asyncio.sleep(10) @@ -262,6 +297,11 @@ class AntiSpam(Cog): deletion_context = self.message_deletion_queue.pop(context_id) await deletion_context.upload_messages(self.bot.user.id, self.mod_log) + @Cog.listener() + async def on_message_edit(self, before: Message, after: Message) -> None: + """Updates the message in the cache, if it's cached.""" + self.cache.update(after) + def validate_config(rules_: Mapping = AntiSpamConfig.rules) -> Dict[str, str]: """Validates the antispam configs.""" diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py index 232c1e48b..4b5200684 100644 --- a/bot/exts/filters/filter_lists.py +++ b/bot/exts/filters/filter_lists.py @@ -1,4 +1,3 @@ -import logging from typing import Optional from discord import Colour, Embed @@ -8,9 +7,11 @@ from bot import constants from bot.api import ResponseCodeError from bot.bot import Bot from bot.converters import ValidDiscordServerInvite, ValidFilterListType +from bot.log import get_logger from bot.pagination import LinePaginator +from bot.utils import scheduling -log = logging.getLogger(__name__) +log = get_logger(__name__) class FilterLists(Cog): @@ -27,7 +28,7 @@ class FilterLists(Cog): def __init__(self, bot: Bot) -> None: self.bot = bot - self.bot.loop.create_task(self._amend_docstrings()) + scheduling.create_task(self._amend_docstrings(), event_loop=self.bot.loop) async def _amend_docstrings(self) -> None: """Add the valid FilterList types to the docstrings, so they'll appear in !help invocations.""" diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index 464732453..a151db1f0 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -1,5 +1,4 @@ import asyncio -import logging import re from datetime import datetime, timedelta from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union @@ -15,16 +14,15 @@ from discord.utils import escape_markdown from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import ( - Channels, Colours, Filter, - Guild, Icons, URLs -) +from bot.constants import Channels, Colours, Filter, Guild, Icons, URLs +from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger +from bot.utils import scheduling from bot.utils.messages import format_user from bot.utils.regex import INVITE_RE -from bot.utils.scheduling import Scheduler -log = logging.getLogger(__name__) +log = get_logger(__name__) # Regular expressions CODE_BLOCK_RE = re.compile( @@ -63,7 +61,7 @@ class Filtering(Cog): def __init__(self, bot: Bot): self.bot = bot - self.scheduler = Scheduler(self.__class__.__name__) + self.scheduler = scheduling.Scheduler(self.__class__.__name__) self.name_lock = asyncio.Lock() staff_mistake_str = "If you believe this was a mistake, please let staff know!" @@ -103,19 +101,6 @@ class Filtering(Cog): ), "schedule_deletion": False }, - "filter_everyone_ping": { - "enabled": Filter.filter_everyone_ping, - "function": self._has_everyone_ping, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_everyone_ping, - "notification_msg": ( - "Please don't try to ping `@everyone` or `@here`. " - f"Your message has been removed. {staff_mistake_str}" - ), - "schedule_deletion": False, - "ping_everyone": False - }, "watch_regex": { "enabled": Filter.watch_regex, "function": self._has_watch_regex_match, @@ -129,10 +114,23 @@ class Filtering(Cog): "type": "watchlist", "content_only": False, "schedule_deletion": False - } + }, + "filter_everyone_ping": { + "enabled": Filter.filter_everyone_ping, + "function": self._has_everyone_ping, + "type": "filter", + "content_only": True, + "user_notification": Filter.notify_user_everyone_ping, + "notification_msg": ( + "Please don't try to ping `@everyone` or `@here`. " + f"Your message has been removed. {staff_mistake_str}" + ), + "schedule_deletion": False, + "ping_everyone": False + }, } - self.bot.loop.create_task(self.reschedule_offensive_msg_deletion()) + scheduling.create_task(self.reschedule_offensive_msg_deletion(), event_loop=self.bot.loop) def cog_unload(self) -> None: """Cancel scheduled tasks.""" @@ -225,7 +223,7 @@ class Filtering(Cog): title="Username filtering alert", text=log_string, channel_id=Channels.mod_alerts, - thumbnail=member.avatar_url + thumbnail=member.display_avatar.url ) # Update time when alert sent @@ -281,6 +279,12 @@ class Filtering(Cog): if delta is not None and delta < 100: continue + if filter_name in ("filter_invites", "filter_everyone_ping"): + # Disable invites filter in codejam team channels + category = getattr(msg.channel, "category", None) + if category and category.name == JAM_CATEGORY_NAME: + continue + # Does the filter only need the message content or the full message? if _filter["content_only"]: payload = msg.content @@ -379,7 +383,7 @@ class Filtering(Cog): colour=Colour(Colours.soft_red), title=f"{_filter['type'].title()} triggered!", text=message, - thumbnail=msg.author.avatar_url_as(static_format="png"), + thumbnail=msg.author.display_avatar.url, channel_id=Channels.mod_alerts, ping_everyone=ping_everyone, additional_embeds=stats.additional_embeds, @@ -471,16 +475,12 @@ class Filtering(Cog): Second return value is a reason of URL blacklisting (can be None). """ text = self.clean_input(text) - if not URL_RE.search(text): - return False, None - text = text.lower() domain_blacklist = self._get_filterlist_items("domain_name", allowed=False) - - for url in domain_blacklist: - if url.lower() in text: - return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"] - + for match in URL_RE.finditer(text): + for url in domain_blacklist: + if url.lower() in match.group(1).lower(): + return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"] return False, None @staticmethod @@ -507,7 +507,7 @@ class Filtering(Cog): # discord\.gg/gdudes-pony-farm text = text.replace("\\", "") - invites = INVITE_RE.findall(text) + invites = [m.group("invite") for m in INVITE_RE.finditer(text)] invite_data = dict() for invite in invites: if invite in invite_data: diff --git a/bot/exts/filters/pixels_token_remover.py b/bot/exts/filters/pixels_token_remover.py deleted file mode 100644 index 2356491e5..000000000 --- a/bot/exts/filters/pixels_token_remover.py +++ /dev/null @@ -1,108 +0,0 @@ -import logging -import re -import typing as t - -from discord import Colour, Message, NotFound -from discord.ext.commands import Cog - -from bot.bot import Bot -from bot.constants import Channels, Colours, Event, Icons -from bot.exts.moderation.modlog import ModLog -from bot.utils.messages import format_user - -log = logging.getLogger(__name__) - -LOG_MESSAGE = "Censored a valid Pixels token sent by {author} in {channel}, token was `{token}`" -DELETION_MESSAGE_TEMPLATE = ( - "Hey {mention}! I noticed you posted a valid Pixels API " - "token in your message and have removed your message. " - "This means that your token has been **compromised**. " - "I have taken the liberty of invalidating the token for you. " - "You can go to <https://pixels.pythondiscord.com/authorize> to get a new key." -) - -PIXELS_TOKEN_RE = re.compile(r"[A-Za-z0-9-_=]{30,}\.[A-Za-z0-9-_=]{50,}\.[A-Za-z0-9-_.+\=]{30,}") - - -class PixelsTokenRemover(Cog): - """Scans messages for Pixels API tokens, removes and invalidates them.""" - - def __init__(self, bot: Bot): - self.bot = bot - - @property - def mod_log(self) -> ModLog: - """Get currently loaded ModLog cog instance.""" - return self.bot.get_cog("ModLog") - - @Cog.listener() - async def on_message(self, msg: Message) -> None: - """Check each message for a string that matches the RS-256 token pattern.""" - # Ignore DMs; can't delete messages in there anyway. - if not msg.guild or msg.author.bot: - return - - found_token = await self.find_token_in_message(msg) - if found_token: - await self.take_action(msg, found_token) - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message) -> None: - """Check each edit for a string that matches the RS-256 token pattern.""" - await self.on_message(after) - - async def take_action(self, msg: Message, found_token: str) -> None: - """Remove the `msg` containing the `found_token` and send a mod log message.""" - self.mod_log.ignore(Event.message_delete, msg.id) - - try: - await msg.delete() - except NotFound: - log.debug(f"Failed to remove token in message {msg.id}: message already deleted.") - return - - await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) - - log_message = self.format_log_message(msg, found_token) - log.debug(log_message) - - # Send pretty mod log embed to mod-alerts - await self.mod_log.send_log_message( - icon_url=Icons.token_removed, - colour=Colour(Colours.soft_red), - title="Token removed!", - text=log_message, - thumbnail=msg.author.avatar_url_as(static_format="png"), - channel_id=Channels.mod_alerts, - ping_everyone=False, - ) - - self.bot.stats.incr("tokens.removed_pixels_tokens") - - @staticmethod - def format_log_message(msg: Message, token: str) -> str: - """Return the generic portion of the log message to send for `token` being censored in `msg`.""" - return LOG_MESSAGE.format( - author=format_user(msg.author), - channel=msg.channel.mention, - token=token - ) - - async def find_token_in_message(self, msg: Message) -> t.Optional[str]: - """Return a seemingly valid token found in `msg` or `None` if no token is found.""" - # Use finditer rather than search to guard against method calls prematurely returning the - # token check (e.g. `message.channel.send` also matches our token pattern) - for match in PIXELS_TOKEN_RE.finditer(msg.content): - auth_header = {"Authorization": f"Bearer {match[0]}"} - async with self.bot.http_session.delete("https://pixels.pythondiscord.com/token", headers=auth_header) as r: - if r.status == 204: - # Short curcuit on first match. - return match[0] - - # No matching substring - return - - -def setup(bot: Bot) -> None: - """Load the PixelsTokenRemover cog.""" - bot.add_cog(PixelsTokenRemover(bot)) diff --git a/bot/exts/filters/security.py b/bot/exts/filters/security.py index c680c5e27..fe3918423 100644 --- a/bot/exts/filters/security.py +++ b/bot/exts/filters/security.py @@ -1,10 +1,9 @@ -import logging - from discord.ext.commands import Cog, Context, NoPrivateMessage from bot.bot import Bot +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) class Security(Cog): diff --git a/bot/exts/filters/token_remover.py b/bot/exts/filters/token_remover.py index 93f1f3c33..520283ba3 100644 --- a/bot/exts/filters/token_remover.py +++ b/bot/exts/filters/token_remover.py @@ -1,6 +1,5 @@ import base64 import binascii -import logging import re import typing as t @@ -11,9 +10,11 @@ from bot import utils from bot.bot import Bot from bot.constants import Channels, Colours, Event, Icons from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger +from bot.utils.members import get_or_fetch_member from bot.utils.messages import format_user -log = logging.getLogger(__name__) +log = get_logger(__name__) LOG_MESSAGE = ( "Censored a seemingly valid token sent by {author} in {channel}, " @@ -99,7 +100,7 @@ class TokenRemover(Cog): await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) log_message = self.format_log_message(msg, found_token) - userid_message, mention_everyone = self.format_userid_log_message(msg, found_token) + userid_message, mention_everyone = await self.format_userid_log_message(msg, found_token) log.debug(log_message) # Send pretty mod log embed to mod-alerts @@ -108,7 +109,7 @@ class TokenRemover(Cog): colour=Colour(Colours.soft_red), title="Token removed!", text=log_message + "\n" + userid_message, - thumbnail=msg.author.avatar_url_as(static_format="png"), + thumbnail=msg.author.display_avatar.url, channel_id=Channels.mod_alerts, ping_everyone=mention_everyone, ) @@ -116,7 +117,7 @@ class TokenRemover(Cog): self.bot.stats.incr("tokens.removed_tokens") @classmethod - def format_userid_log_message(cls, msg: Message, token: Token) -> t.Tuple[str, bool]: + async def format_userid_log_message(cls, msg: Message, token: Token) -> t.Tuple[str, bool]: """ Format the portion of the log message that includes details about the detected user ID. @@ -128,7 +129,7 @@ class TokenRemover(Cog): Returns a tuple of (log_message, mention_everyone) """ user_id = cls.extract_user_id(token.user_id) - user = msg.guild.get_member(user_id) + user = await get_or_fetch_member(msg.guild, user_id) if user: return KNOWN_USER_LOG_MESSAGE.format( diff --git a/bot/exts/filters/webhook_remover.py b/bot/exts/filters/webhook_remover.py index f11fc8912..96334317c 100644 --- a/bot/exts/filters/webhook_remover.py +++ b/bot/exts/filters/webhook_remover.py @@ -1,4 +1,3 @@ -import logging import re from discord import Colour, Message, NotFound @@ -7,18 +6,22 @@ from discord.ext.commands import Cog from bot.bot import Bot from bot.constants import Channels, Colours, Event, Icons from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger from bot.utils.messages import format_user -WEBHOOK_URL_RE = re.compile(r"((?:https?://)?discord(?:app)?\.com/api/webhooks/\d+/)\S+/?", re.IGNORECASE) +WEBHOOK_URL_RE = re.compile( + r"((?:https?:\/\/)?(?:ptb\.|canary\.)?discord(?:app)?\.com\/api\/webhooks\/\d+\/)\S+\/?", + re.IGNORECASE +) ALERT_MESSAGE_TEMPLATE = ( "{user}, looks like you posted a Discord webhook URL. Therefore, your " - "message has been removed. Your webhook may have been **compromised** so " - "please re-create the webhook **immediately**. If you believe this was a " + "message has been removed, and your webhook has been deleted. " + "You can re-create it if you wish to. If you believe this was a " "mistake, please let us know." ) -log = logging.getLogger(__name__) +log = get_logger(__name__) class WebhookRemover(Cog): @@ -32,7 +35,7 @@ class WebhookRemover(Cog): """Get current instance of `ModLog`.""" return self.bot.get_cog("ModLog") - async def delete_and_respond(self, msg: Message, redacted_url: str) -> None: + async def delete_and_respond(self, msg: Message, redacted_url: str, *, webhook_deleted: bool) -> None: """Delete `msg` and send a warning that it contained the Discord webhook `redacted_url`.""" # Don't log this, due internal delete, not by user. Will make different entry. self.mod_log.ignore(Event.message_delete, msg.id) @@ -44,9 +47,12 @@ class WebhookRemover(Cog): return await msg.channel.send(ALERT_MESSAGE_TEMPLATE.format(user=msg.author.mention)) - + if webhook_deleted: + delete_state = "The webhook was successfully deleted." + else: + delete_state = "There was an error when deleting the webhook, it might have already been removed." message = ( - f"{format_user(msg.author)} posted a Discord webhook URL to {msg.channel.mention}. " + f"{format_user(msg.author)} posted a Discord webhook URL to {msg.channel.mention}. {delete_state} " f"Webhook URL was `{redacted_url}`" ) log.debug(message) @@ -57,7 +63,7 @@ class WebhookRemover(Cog): colour=Colour(Colours.soft_red), title="Discord webhook URL removed!", text=message, - thumbnail=msg.author.avatar_url_as(static_format="png"), + thumbnail=msg.author.display_avatar.url, channel_id=Channels.mod_alerts ) @@ -72,7 +78,10 @@ class WebhookRemover(Cog): matches = WEBHOOK_URL_RE.search(msg.content) if matches: - await self.delete_and_respond(msg, matches[1] + "xxx") + async with self.bot.http_session.delete(matches[0]) as resp: + # The Discord API Returns a 204 NO CONTENT response on success. + deleted_successfully = resp.status == 204 + await self.delete_and_respond(msg, matches[1] + "xxx", webhook_deleted=deleted_successfully) @Cog.listener() async def on_message_edit(self, before: Message, after: Message) -> None: diff --git a/bot/exts/fun/duck_pond.py b/bot/exts/fun/duck_pond.py index c78b9c141..c51656343 100644 --- a/bot/exts/fun/duck_pond.py +++ b/bot/exts/fun/duck_pond.py @@ -1,18 +1,20 @@ import asyncio -import logging from typing import Union import discord -from discord import Color, Embed, Member, Message, RawReactionActionEvent, TextChannel, User, errors +from discord import Color, Embed, Message, RawReactionActionEvent, TextChannel, errors from discord.ext.commands import Cog, Context, command from bot import constants from bot.bot import Bot +from bot.converters import MemberOrUser +from bot.log import get_logger +from bot.utils import scheduling from bot.utils.checks import has_any_role from bot.utils.messages import count_unique_users_reaction, send_attachments from bot.utils.webhooks import send_webhook -log = logging.getLogger(__name__) +log = get_logger(__name__) class DuckPond(Cog): @@ -23,7 +25,7 @@ class DuckPond(Cog): self.webhook_id = constants.Webhooks.duck_pond self.webhook = None self.ducked_messages = [] - self.bot.loop.create_task(self.fetch_webhook()) + scheduling.create_task(self.fetch_webhook(), event_loop=self.bot.loop) self.relay_lock = None async def fetch_webhook(self) -> None: @@ -36,7 +38,7 @@ class DuckPond(Cog): log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`") @staticmethod - def is_staff(member: Union[User, Member]) -> bool: + def is_staff(member: MemberOrUser) -> bool: """Check if a specific member or user is staff.""" if hasattr(member, "roles"): for role in member.roles: @@ -92,7 +94,7 @@ class DuckPond(Cog): webhook=self.webhook, content=message.clean_content, username=message.author.display_name, - avatar_url=message.author.avatar_url + avatar_url=message.author.display_avatar.url ) if message.attachments: @@ -107,7 +109,7 @@ class DuckPond(Cog): webhook=self.webhook, embed=e, username=message.author.display_name, - avatar_url=message.author.avatar_url + avatar_url=message.author.display_avatar.url ) except discord.HTTPException: log.exception("Failed to send an attachment to the webhook") @@ -171,8 +173,14 @@ class DuckPond(Cog): if not self.is_helper_viewable(channel): return - message = await channel.fetch_message(payload.message_id) + try: + message = await channel.fetch_message(payload.message_id) + except discord.NotFound: + return # Message was deleted. + member = discord.utils.get(message.guild.members, id=payload.user_id) + if not member: + return # Member left or wasn't in the cache. # Was the message sent by a human staff member? if not self.is_staff(message.author) or message.author.bot: diff --git a/bot/exts/fun/off_topic_names.py b/bot/exts/fun/off_topic_names.py index 845b8175c..427667c66 100644 --- a/bot/exts/fun/off_topic_names.py +++ b/bot/exts/fun/off_topic_names.py @@ -1,5 +1,4 @@ import difflib -import logging from datetime import datetime, timedelta from discord import Colour, Embed @@ -10,10 +9,12 @@ from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import Channels, MODERATION_ROLES from bot.converters import OffTopicName +from bot.log import get_logger from bot.pagination import LinePaginator +from bot.utils import scheduling CHANNELS = (Channels.off_topic_0, Channels.off_topic_1, Channels.off_topic_2) -log = logging.getLogger(__name__) +log = get_logger(__name__) async def update_names(bot: Bot) -> None: @@ -50,7 +51,7 @@ class OffTopicNames(Cog): self.bot = bot self.updater_task = None - self.bot.loop.create_task(self.init_offtopic_updater()) + scheduling.create_task(self.init_offtopic_updater(), event_loop=self.bot.loop) def cog_unload(self) -> None: """Cancel any running updater tasks on cog unload.""" @@ -62,7 +63,7 @@ class OffTopicNames(Cog): await self.bot.wait_until_guild_available() if self.updater_task is None: coro = update_names(self.bot) - self.updater_task = self.bot.loop.create_task(coro) + self.updater_task = scheduling.create_task(coro, event_loop=self.bot.loop) @group(name='otname', aliases=('otnames', 'otn'), invoke_without_command=True) @has_any_role(*MODERATION_ROLES) diff --git a/bot/exts/help_channels/__init__.py b/bot/exts/help_channels/__init__.py index 781f40449..beba18aa6 100644 --- a/bot/exts/help_channels/__init__.py +++ b/bot/exts/help_channels/__init__.py @@ -1,10 +1,9 @@ -import logging - from bot import constants from bot.bot import Bot from bot.exts.help_channels._channel import MAX_CHANNELS_PER_CATEGORY +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) def validate_config() -> None: diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 0846b28c8..e43c1e789 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -1,4 +1,3 @@ -import logging import typing as t from datetime import timedelta from enum import Enum @@ -10,9 +9,10 @@ from arrow import Arrow import bot from bot import constants from bot.exts.help_channels import _caches, _message -from bot.utils.channel import try_get_channel +from bot.log import get_logger +from bot.utils.channel import get_or_fetch_channel -log = logging.getLogger(__name__) +log = get_logger(__name__) MAX_CHANNELS_PER_CATEGORY = 50 EXCLUDED_CHANNELS = (constants.Channels.cooldown,) @@ -133,7 +133,7 @@ async def move_to_bottom(channel: discord.TextChannel, category_id: int, **optio options should be avoided, as it may interfere with the category move we perform. """ # Get a fresh copy of the category from the bot to avoid the cache mismatch issue we had. - category = await try_get_channel(category_id) + category = await get_or_fetch_channel(category_id) payload = [{"id": c.id, "position": c.position} for c in category.channels] diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 7fb72d7ef..498305b47 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -1,5 +1,4 @@ import asyncio -import logging import random import typing as t from datetime import timedelta @@ -12,10 +11,12 @@ from discord.ext import commands from bot import constants from bot.bot import Bot +from bot.constants import Channels, RedirectOutput from bot.exts.help_channels import _caches, _channel, _message, _name, _stats -from bot.utils import channel as channel_utils, lock, scheduling +from bot.log import get_logger +from bot.utils import channel as channel_utils, lock, members, scheduling -log = logging.getLogger(__name__) +log = get_logger(__name__) NAMESPACE = "help" HELP_CHANNEL_TOPIC = """ @@ -81,7 +82,7 @@ class HelpChannels(commands.Cog): # Asyncio stuff self.queue_tasks: t.List[asyncio.Task] = [] - self.init_task = self.bot.loop.create_task(self.init_cog()) + self.init_task = scheduling.create_task(self.init_cog(), event_loop=self.bot.loop) def cog_unload(self) -> None: """Cancel the init task and scheduled tasks when the cog unloads.""" @@ -266,6 +267,8 @@ class HelpChannels(commands.Cog): for channel in channels[:abs(missing)]: await self.unclaim_channel(channel, closed_on=_channel.ClosingReason.CLEANUP) + self.available_help_channels = set(_channel.get_category_channels(self.available_category)) + # Getting channels that need to be included in the dynamic message. await self.update_available_help_channels() log.trace("Dynamic available help message updated.") @@ -275,13 +278,13 @@ class HelpChannels(commands.Cog): log.trace("Getting the CategoryChannel objects for the help categories.") try: - self.available_category = await channel_utils.try_get_channel( + self.available_category = await channel_utils.get_or_fetch_channel( constants.Categories.help_available ) - self.in_use_category = await channel_utils.try_get_channel( + self.in_use_category = await channel_utils.get_or_fetch_channel( constants.Categories.help_in_use ) - self.dormant_category = await channel_utils.try_get_channel( + self.dormant_category = await channel_utils.get_or_fetch_channel( constants.Categories.help_dormant ) except discord.HTTPException: @@ -386,7 +389,12 @@ class HelpChannels(commands.Cog): ) log.trace(f"Sending dormant message for #{channel} ({channel.id}).") - embed = discord.Embed(description=_message.DORMANT_MSG) + embed = discord.Embed( + description=_message.DORMANT_MSG.format( + dormant=self.dormant_category.name, + available=self.available_category.name, + ) + ) await channel.send(embed=embed) log.trace(f"Pushing #{channel} ({channel.id}) into the channel queue.") @@ -426,7 +434,7 @@ class HelpChannels(commands.Cog): await _caches.claimants.delete(channel.id) await _caches.session_participants.delete(channel.id) - claimant = self.bot.get_guild(constants.Guild.id).get_member(claimant_id) + claimant = await members.get_or_fetch_member(self.bot.get_guild(constants.Guild.id), claimant_id) if claimant is None: log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed") else: @@ -499,7 +507,7 @@ class HelpChannels(commands.Cog): """Wait for a dormant channel to become available in the queue and return it.""" log.trace("Waiting for a dormant channel.") - task = asyncio.create_task(self.channel_queue.get()) + task = scheduling.create_task(self.channel_queue.get()) self.queue_tasks.append(task) channel = await task @@ -510,11 +518,6 @@ class HelpChannels(commands.Cog): async def update_available_help_channels(self) -> None: """Updates the dynamic message within #how-to-get-help for available help channels.""" - if not self.available_help_channels: - self.available_help_channels = set( - c for c in self.available_category.channels if not _channel.is_excluded_channel(c) - ) - available_channels = AVAILABLE_HELP_CHANNELS.format( available=", ".join( c.mention for c in sorted(self.available_help_channels, key=attrgetter("position")) @@ -580,7 +583,22 @@ class HelpChannels(commands.Cog): timestamp=message.created_at ) embed.add_field(name="Conversation", value=f"[Jump to message]({message.jump_url})") - await message.author.send(embed=embed) + + try: + await message.author.send(embed=embed) + except discord.Forbidden: + log.trace( + f"Failed to send helpdm message to {message.author.id}. DMs Closed/Blocked. " + "Removing user from helpdm." + ) + bot_commands_channel = self.bot.get_channel(Channels.bot_commands) + await _caches.help_dm.delete(message.author.id) + await bot_commands_channel.send( + f"{message.author.mention} {constants.Emojis.cross_mark} " + "To receive updates on help channels you're active in, enable your DMs.", + delete_after=RedirectOutput.delete_delay + ) + return await _caches.session_participants.set( message.channel.id, diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 4c7c39764..a52c67570 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -1,4 +1,3 @@ -import logging import textwrap import typing as t @@ -9,21 +8,21 @@ from arrow import Arrow import bot from bot import constants from bot.exts.help_channels import _caches +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) ASKING_GUIDE_URL = "https://pythondiscord.com/pages/asking-good-questions/" AVAILABLE_MSG = f""" -**Send your question here to claim the channel** -This channel will be dedicated to answering your question only. Others will try to answer and help you solve the issue. +Send your question here to claim the channel. -**Keep in mind:** -• It's always ok to just ask your question. You don't need permission. -• Explain what you expect to happen and what actually happens. -• Include a code sample and error message, if you got any. +**Remember to:** +• **Ask** your Python question, not if you can ask or if there's an expert who can help. +• **Show** a code sample as text (rather than a screenshot) and the error message, if you got one. +• **Explain** what you expect to happen and what actually happens. -For more tips, check out our guide on **[asking good questions]({ASKING_GUIDE_URL})**. +For more tips, check out our guide on [asking good questions]({ASKING_GUIDE_URL}). """ AVAILABLE_TITLE = "Available help channel" @@ -31,12 +30,12 @@ AVAILABLE_TITLE = "Available help channel" AVAILABLE_FOOTER = "Closes after a period of inactivity, or when you send !close." DORMANT_MSG = f""" -This help channel has been marked as **dormant**, and has been moved into the **Help: Dormant** \ +This help channel has been marked as **dormant**, and has been moved into the **{{dormant}}** \ category at the bottom of the channel list. It is no longer possible to send messages in this \ channel until it becomes available again. If your question wasn't answered yet, you can claim a new help channel from the \ -**Help: Available** category by simply asking your question again. Consider rephrasing the \ +**{{available}}** category by simply asking your question again. Consider rephrasing the \ question to maximize your chance of getting a good answer. If you're not sure how, have a look \ through our guide for **[asking a good question]({ASKING_GUIDE_URL})**. """ diff --git a/bot/exts/help_channels/_name.py b/bot/exts/help_channels/_name.py index 061f855ae..a9d9b2df1 100644 --- a/bot/exts/help_channels/_name.py +++ b/bot/exts/help_channels/_name.py @@ -1,5 +1,4 @@ import json -import logging import typing as t from collections import deque from pathlib import Path @@ -8,8 +7,9 @@ import discord from bot import constants from bot.exts.help_channels._channel import MAX_CHANNELS_PER_CATEGORY, get_category_channels +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) def create_name_queue(*categories: discord.CategoryChannel) -> deque: diff --git a/bot/exts/help_channels/_stats.py b/bot/exts/help_channels/_stats.py index eb34e75e1..4698c26de 100644 --- a/bot/exts/help_channels/_stats.py +++ b/bot/exts/help_channels/_stats.py @@ -1,12 +1,11 @@ -import logging - from more_itertools import ilen import bot from bot import constants from bot.exts.help_channels import _caches, _channel +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) def report_counts() -> None: diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py index 24a9ae28a..07b1b8a2d 100644 --- a/bot/exts/info/code_snippets.py +++ b/bot/exts/info/code_snippets.py @@ -4,15 +4,16 @@ import textwrap from typing import Any from urllib.parse import quote_plus +import discord from aiohttp import ClientResponseError -from discord import Message from discord.ext.commands import Cog from bot.bot import Bot from bot.constants import Channels +from bot.log import get_logger from bot.utils.messages import wait_for_deletion -log = logging.getLogger(__name__) +log = get_logger(__name__) GITHUB_RE = re.compile( r'https://github\.com/(?P<repo>[a-zA-Z0-9-]+/[\w.-]+)/blob/' @@ -45,6 +46,17 @@ class CodeSnippets(Cog): Matches each message against a regex and prints the contents of all matched snippets. """ + def __init__(self, bot: Bot): + """Initializes the cog's bot.""" + self.bot = bot + + self.pattern_handlers = [ + (GITHUB_RE, self._fetch_github_snippet), + (GITHUB_GIST_RE, self._fetch_github_gist_snippet), + (GITLAB_RE, self._fetch_gitlab_snippet), + (BITBUCKET_RE, self._fetch_bitbucket_snippet) + ] + async def _fetch_response(self, url: str, response_format: str, **kwargs) -> Any: """Makes http requests using aiohttp.""" async with self.bot.http_session.get(url, raise_for_status=True, **kwargs) as response: @@ -208,56 +220,56 @@ class CodeSnippets(Cog): # Returns an empty codeblock if the snippet is empty return f'{ret}``` ```' - def __init__(self, bot: Bot): - """Initializes the cog's bot.""" - self.bot = bot + async def _parse_snippets(self, content: str) -> str: + """Parse message content and return a string with a code block for each URL found.""" + all_snippets = [] + + for pattern, handler in self.pattern_handlers: + for match in pattern.finditer(content): + try: + snippet = await handler(**match.groupdict()) + all_snippets.append((match.start(), snippet)) + except ClientResponseError as error: + error_message = error.message # noqa: B306 + log.log( + logging.DEBUG if error.status == 404 else logging.ERROR, + f'Failed to fetch code snippet from {match[0]!r}: {error.status} ' + f'{error_message} for GET {error.request_info.real_url.human_repr()}' + ) - self.pattern_handlers = [ - (GITHUB_RE, self._fetch_github_snippet), - (GITHUB_GIST_RE, self._fetch_github_gist_snippet), - (GITLAB_RE, self._fetch_gitlab_snippet), - (BITBUCKET_RE, self._fetch_bitbucket_snippet) - ] + # Sorts the list of snippets by their match index and joins them into a single message + return '\n'.join(map(lambda x: x[1], sorted(all_snippets))) @Cog.listener() - async def on_message(self, message: Message) -> None: + async def on_message(self, message: discord.Message) -> None: """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" - if not message.author.bot: - all_snippets = [] - - for pattern, handler in self.pattern_handlers: - for match in pattern.finditer(message.content): - try: - snippet = await handler(**match.groupdict()) - all_snippets.append((match.start(), snippet)) - except ClientResponseError as error: - error_message = error.message # noqa: B306 - log.log( - logging.DEBUG if error.status == 404 else logging.ERROR, - f'Failed to fetch code snippet from {match[0]!r}: {error.status} ' - f'{error_message} for GET {error.request_info.real_url.human_repr()}' - ) - - # Sorts the list of snippets by their match index and joins them into a single message - message_to_send = '\n'.join(map(lambda x: x[1], sorted(all_snippets))) - - if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15: + if message.author.bot: + return + + message_to_send = await self._parse_snippets(message.content) + destination = message.channel + + if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15: + try: await message.edit(suppress=True) - if len(message_to_send) > 1000 and message.channel.id != Channels.bot_commands: - # Redirects to #bot-commands if the snippet contents are too long - await self.bot.wait_until_guild_available() - await message.channel.send(('The snippet you tried to send was too long. Please ' - f'see <#{Channels.bot_commands}> for the full snippet.')) - bot_commands_channel = self.bot.get_channel(Channels.bot_commands) - await wait_for_deletion( - await bot_commands_channel.send(message_to_send), - (message.author.id,) - ) - else: - await wait_for_deletion( - await message.channel.send(message_to_send), - (message.author.id,) - ) + except discord.NotFound: + # Don't send snippets if the original message was deleted. + return + + if len(message_to_send) > 1000 and message.channel.id != Channels.bot_commands: + # Redirects to #bot-commands if the snippet contents are too long + await self.bot.wait_until_guild_available() + destination = self.bot.get_channel(Channels.bot_commands) + + await message.channel.send( + 'The snippet you tried to send was too long. ' + f'Please see {destination.mention} for the full snippet.' + ) + + await wait_for_deletion( + await destination.send(message_to_send), + (message.author.id,) + ) def setup(bot: Bot) -> None: diff --git a/bot/exts/info/codeblock/_cog.py b/bot/exts/info/codeblock/_cog.py index 9094d9d15..a859d8cef 100644 --- a/bot/exts/info/codeblock/_cog.py +++ b/bot/exts/info/codeblock/_cog.py @@ -1,4 +1,3 @@ -import logging import time from typing import Optional @@ -11,11 +10,12 @@ from bot.bot import Bot from bot.exts.filters.token_remover import TokenRemover from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE from bot.exts.info.codeblock._instructions import get_instructions -from bot.utils import has_lines +from bot.log import get_logger +from bot.utils import has_lines, scheduling from bot.utils.channel import is_help_channel from bot.utils.messages import wait_for_deletion -log = logging.getLogger(__name__) +log = get_logger(__name__) class CodeBlockCog(Cog, name="Code Block"): @@ -114,7 +114,7 @@ class CodeBlockCog(Cog, name="Code Block"): bot_message = await message.channel.send(f"Hey {message.author.mention}!", embed=embed) self.codeblock_message_ids[message.id] = bot_message.id - self.bot.loop.create_task(wait_for_deletion(bot_message, (message.author.id,))) + scheduling.create_task(wait_for_deletion(bot_message, (message.author.id,)), event_loop=self.bot.loop) # Increase amount of codeblock correction in stats self.bot.stats.incr("codeblock_corrections") @@ -177,10 +177,13 @@ class CodeBlockCog(Cog, name="Code Block"): if not bot_message: return - if not instructions: - log.info("User's incorrect code block has been fixed. Removing instructions message.") - await bot_message.delete() - del self.codeblock_message_ids[payload.message_id] - else: - log.info("Message edited but still has invalid code blocks; editing the instructions.") - await bot_message.edit(embed=self.create_embed(instructions)) + try: + if not instructions: + log.info("User's incorrect code block was fixed. Removing instructions message.") + await bot_message.delete() + del self.codeblock_message_ids[payload.message_id] + else: + log.info("Message edited but still has invalid code blocks; editing instructions.") + await bot_message.edit(embed=self.create_embed(instructions)) + except discord.NotFound: + log.debug("Could not find instructions message; it was probably deleted.") diff --git a/bot/exts/info/codeblock/_instructions.py b/bot/exts/info/codeblock/_instructions.py index dadb5e1ef..8fcadeec2 100644 --- a/bot/exts/info/codeblock/_instructions.py +++ b/bot/exts/info/codeblock/_instructions.py @@ -1,11 +1,11 @@ """This module generates and formats instructional messages about fixing Markdown code blocks.""" -import logging from typing import Optional from bot.exts.info.codeblock import _parsing +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) _EXAMPLE_PY = "{lang}\nprint('Hello, world!')" # Make sure to escape any Markdown symbols here. _EXAMPLE_CODE_BLOCKS = ( diff --git a/bot/exts/info/codeblock/_parsing.py b/bot/exts/info/codeblock/_parsing.py index 73fd11b94..3c193d6c5 100644 --- a/bot/exts/info/codeblock/_parsing.py +++ b/bot/exts/info/codeblock/_parsing.py @@ -1,15 +1,15 @@ """This module provides functions for parsing Markdown code blocks.""" import ast -import logging import re import textwrap from typing import NamedTuple, Optional, Sequence from bot import constants +from bot.log import get_logger from bot.utils import has_lines -log = logging.getLogger(__name__) +log = get_logger(__name__) BACKTICK = "`" PY_LANG_CODES = ("python-repl", "python", "pycon", "py") # Order is important; "py" is last cause it's a subset. diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py index 38a8975c0..facdf4d0b 100644 --- a/bot/exts/info/doc/__init__.py +++ b/bot/exts/info/doc/__init__.py @@ -1,4 +1,5 @@ from bot.bot import Bot + from ._redis_cache import DocRedisCache MAX_SIGNATURE_AMOUNT = 3 diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py index 369bb462c..c27f28eac 100644 --- a/bot/exts/info/doc/_batch_parser.py +++ b/bot/exts/info/doc/_batch_parser.py @@ -2,7 +2,6 @@ from __future__ import annotations import asyncio import collections -import logging from collections import defaultdict from contextlib import suppress from operator import attrgetter @@ -13,20 +12,26 @@ from bs4 import BeautifulSoup import bot from bot.constants import Channels +from bot.log import get_logger from bot.utils import scheduling + from . import _cog, doc_cache from ._parsing import get_symbol_markdown +from ._redis_cache import StaleItemCounter -log = logging.getLogger(__name__) +log = get_logger(__name__) class StaleInventoryNotifier: """Handle sending notifications about stale inventories through `DocItem`s to dev log.""" + symbol_counter = StaleItemCounter() + def __init__(self): - self._init_task = bot.instance.loop.create_task( + self._init_task = scheduling.create_task( self._init_channel(), - name="StaleInventoryNotifier channel init" + name="StaleInventoryNotifier channel init", + event_loop=bot.instance.loop, ) self._warned_urls = set() @@ -38,13 +43,16 @@ class StaleInventoryNotifier: async def send_warning(self, doc_item: _cog.DocItem) -> None: """Send a warning to dev log if one wasn't already sent for `item`'s url.""" if doc_item.url not in self._warned_urls: - self._warned_urls.add(doc_item.url) - await self._init_task - embed = discord.Embed( - description=f"Doc item `{doc_item.symbol_id=}` present in loaded documentation inventories " - f"not found on [site]({doc_item.url}), inventories may need to be refreshed." - ) - await self._dev_log.send(embed=embed) + # Only warn if the item got less than 3 warnings + # or if it has been more than 3 weeks since the last warning + if await self.symbol_counter.increment_for(doc_item) < 3: + self._warned_urls.add(doc_item.url) + await self._init_task + embed = discord.Embed( + description=f"Doc item `{doc_item.symbol_id=}` present in loaded documentation inventories " + f"not found on [site]({doc_item.url}), inventories may need to be refreshed." + ) + await self._dev_log.send(embed=embed) class QueueItem(NamedTuple): @@ -101,7 +109,7 @@ class BatchParser: if doc_item not in self._item_futures and doc_item not in self._queue: self._item_futures[doc_item].user_requested = True - async with bot.instance.http_session.get(doc_item.url) as response: + async with bot.instance.http_session.get(doc_item.url, raise_for_status=True) as response: soup = await bot.instance.loop.run_in_executor( None, BeautifulSoup, diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py index c54a3ee1c..ebf5f5932 100644 --- a/bot/exts/info/doc/_cog.py +++ b/bot/exts/info/doc/_cog.py @@ -1,7 +1,6 @@ from __future__ import annotations import asyncio -import logging import sys import textwrap from collections import defaultdict @@ -13,17 +12,21 @@ import aiohttp import discord from discord.ext import commands +from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import Inventory, PackageName, ValidURL, allowed_strings +from bot.log import get_logger from bot.pagination import LinePaginator +from bot.utils import scheduling from bot.utils.lock import SharedEvent, lock from bot.utils.messages import send_denial, wait_for_deletion from bot.utils.scheduling import Scheduler + from . import NAMESPACE, PRIORITY_PACKAGES, _batch_parser, doc_cache -from ._inventory_parser import InventoryDict, fetch_inventory +from ._inventory_parser import InvalidHeaderError, InventoryDict, fetch_inventory -log = logging.getLogger(__name__) +log = get_logger(__name__) # symbols with a group contained here will get the group prefixed on duplicates FORCE_PREFIX_GROUPS = ( @@ -75,9 +78,10 @@ class DocCog(commands.Cog): self.refresh_event.set() self.symbol_get_event = SharedEvent() - self.init_refresh_task = self.bot.loop.create_task( + self.init_refresh_task = scheduling.create_task( self.init_refresh_inventory(), - name="Doc inventory init" + name="Doc inventory init", + event_loop=self.bot.loop, ) @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) @@ -135,7 +139,12 @@ class DocCog(commands.Cog): The first attempt is rescheduled to execute in `FETCH_RESCHEDULE_DELAY.first` minutes, the subsequent attempts in `FETCH_RESCHEDULE_DELAY.repeated` minutes. """ - package = await fetch_inventory(inventory_url) + try: + package = await fetch_inventory(inventory_url) + except InvalidHeaderError as e: + # Do not reschedule if the header is invalid, as the request went through but the contents are invalid. + log.warning(f"Invalid inventory header at {inventory_url}. Reason: {e}") + return if not package: if api_package_name in self.inventory_scheduler: @@ -150,6 +159,8 @@ class DocCog(commands.Cog): self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url), ) else: + if not base_url: + base_url = self.base_url_from_inventory_url(inventory_url) self.update_single(api_package_name, base_url, package) def ensure_unique_symbol_name(self, package_name: str, group_name: str, symbol_name: str) -> str: @@ -341,14 +352,22 @@ class DocCog(commands.Cog): if doc_embed is None: error_message = await send_denial(ctx, "No documentation found for the requested symbol.") await wait_for_deletion(error_message, (ctx.author.id,), timeout=NOT_FOUND_DELETE_DELAY) - with suppress(discord.NotFound): - await ctx.message.delete() - with suppress(discord.NotFound): - await error_message.delete() + + # Make sure that we won't cause a ghost-ping by deleting the message + if not (ctx.message.mentions or ctx.message.role_mentions): + with suppress(discord.NotFound): + await ctx.message.delete() + await error_message.delete() + else: msg = await ctx.send(embed=doc_embed) await wait_for_deletion(msg, (ctx.author.id,)) + @staticmethod + def base_url_from_inventory_url(inventory_url: str) -> str: + """Get a base url from the url to an objects inventory by removing the last path segment.""" + return inventory_url.removesuffix("/").rsplit("/", maxsplit=1)[0] + "/" + @docs_group.command(name="setdoc", aliases=("s",)) @commands.has_any_role(*MODERATION_ROLES) @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) @@ -356,21 +375,21 @@ class DocCog(commands.Cog): self, ctx: commands.Context, package_name: PackageName, - base_url: ValidURL, inventory: Inventory, + base_url: ValidURL = "", ) -> None: """ Adds a new documentation metadata object to the site's database. The database will update the object, should an existing item with the specified `package_name` already exist. + If the base url is not specified, a default created by removing the last segment of the inventory url is used. Example: !docs setdoc \ python \ - https://docs.python.org/3/ \ https://docs.python.org/3/objects.inv """ - if not base_url.endswith("/"): + if base_url and not base_url.endswith("/"): raise commands.BadArgument("The base url must end with a slash.") inventory_url, inventory_dict = inventory body = { @@ -378,13 +397,22 @@ class DocCog(commands.Cog): "base_url": base_url, "inventory_url": inventory_url } - await self.bot.api_client.post("bot/documentation-links", json=body) + try: + await self.bot.api_client.post("bot/documentation-links", json=body) + except ResponseCodeError as err: + if err.status == 400 and "already exists" in err.response_json.get("package", [""])[0]: + log.info(f"Ignoring HTTP 400 as package {package_name} has already been added.") + await ctx.send(f"Package {package_name} has already been added.") + return + raise log.info( f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" + "\n".join(f"{key}: {value}" for key, value in body.items()) ) + if not base_url: + base_url = self.base_url_from_inventory_url(inventory_url) self.update_single(package_name, base_url, inventory_dict) await ctx.send(f"Added the package `{package_name}` to the database and updated the inventories.") @@ -436,6 +464,7 @@ class DocCog(commands.Cog): ) -> None: """Clear the persistent redis cache for `package`.""" if await doc_cache.delete(package_name): + await self.item_fetcher.stale_inventory_notifier.symbol_counter.delete() await ctx.send(f"Successfully cleared the cache for `{package_name}`.") else: await ctx.send("No keys matching the package found.") @@ -444,4 +473,4 @@ class DocCog(commands.Cog): """Clear scheduled inventories, queued symbols and cleanup task on cog unload.""" self.inventory_scheduler.cancel_all() self.init_refresh_task.cancel() - asyncio.create_task(self.item_fetcher.clear(), name="DocCog.item_fetcher unload clear") + scheduling.create_task(self.item_fetcher.clear(), name="DocCog.item_fetcher unload clear") diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py index 94efd81b7..ca0a0ac4a 100644 --- a/bot/exts/info/doc/_html.py +++ b/bot/exts/info/doc/_html.py @@ -1,4 +1,3 @@ -import logging import re from functools import partial from typing import Callable, Container, Iterable, List, Union @@ -6,9 +5,11 @@ from typing import Callable, Container, Iterable, List, Union from bs4 import BeautifulSoup from bs4.element import NavigableString, PageElement, SoupStrainer, Tag +from bot.log import get_logger + from . import MAX_SIGNATURE_AMOUNT -log = logging.getLogger(__name__) +log = get_logger(__name__) _UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") _SEARCH_END_TAG_ATTRS = ( diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py index 80d5841a0..e69246d47 100644 --- a/bot/exts/info/doc/_inventory_parser.py +++ b/bot/exts/info/doc/_inventory_parser.py @@ -1,4 +1,3 @@ -import logging import re import zlib from collections import defaultdict @@ -7,8 +6,9 @@ from typing import AsyncIterator, DefaultDict, List, Optional, Tuple import aiohttp import bot +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) FAILED_REQUEST_ATTEMPTS = 3 _V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') @@ -16,6 +16,10 @@ _V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') InventoryDict = DefaultDict[str, List[Tuple[str, str]]] +class InvalidHeaderError(Exception): + """Raised when an inventory file has an invalid header.""" + + class ZlibStreamReader: """Class used for decoding zlib data of a stream line by line.""" @@ -80,19 +84,25 @@ async def _fetch_inventory(url: str) -> InventoryDict: stream = response.content inventory_header = (await stream.readline()).decode().rstrip() - inventory_version = int(inventory_header[-1:]) - await stream.readline() # skip project name - await stream.readline() # skip project version + try: + inventory_version = int(inventory_header[-1:]) + except ValueError: + raise InvalidHeaderError("Unable to convert inventory version header.") + + has_project_header = (await stream.readline()).startswith(b"# Project") + has_version_header = (await stream.readline()).startswith(b"# Version") + if not (has_project_header and has_version_header): + raise InvalidHeaderError("Inventory missing project or version header.") if inventory_version == 1: return await _load_v1(stream) elif inventory_version == 2: if b"zlib" not in await stream.readline(): - raise ValueError(f"Invalid inventory file at url {url}.") + raise InvalidHeaderError("'zlib' not found in header of compressed inventory.") return await _load_v2(stream) - raise ValueError(f"Invalid inventory file at url {url}.") + raise InvalidHeaderError("Incompatible inventory version.") async def fetch_inventory(url: str) -> Optional[InventoryDict]: @@ -115,6 +125,8 @@ async def fetch_inventory(url: str) -> Optional[InventoryDict]: f"Failed to get inventory from {url}; " f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." ) + except InvalidHeaderError: + raise except Exception: log.exception( f"An unexpected error has occurred during fetching of {url}; " diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py index bf840b96f..6ab38eb3d 100644 --- a/bot/exts/info/doc/_parsing.py +++ b/bot/exts/info/doc/_parsing.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging import re import string import textwrap @@ -10,14 +9,17 @@ from typing import Collection, Iterable, Iterator, List, Optional, TYPE_CHECKING from bs4 import BeautifulSoup from bs4.element import NavigableString, Tag +from bot.log import get_logger from bot.utils.helpers import find_nth_occurrence + from . import MAX_SIGNATURE_AMOUNT from ._html import get_dd_description, get_general_description, get_signatures from ._markdown import DocMarkdownConverter + if TYPE_CHECKING: from ._cog import DocItem -log = logging.getLogger(__name__) +log = get_logger(__name__) _WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") _PARAMETERS_RE = re.compile(r"\((.+)\)") @@ -34,7 +36,7 @@ _EMBED_CODE_BLOCK_LINE_LENGTH = 61 # _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight _MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * MAX_SIGNATURE_AMOUNT # Maximum embed description length - signatures on top -_MAX_DESCRIPTION_LENGTH = 2048 - _MAX_SIGNATURES_LENGTH +_MAX_DESCRIPTION_LENGTH = 4096 - _MAX_SIGNATURES_LENGTH _TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace BracketPair = namedtuple("BracketPair", ["opening_bracket", "closing_bracket"]) diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py index ad764816f..107f2344f 100644 --- a/bot/exts/info/doc/_redis_cache.py +++ b/bot/exts/info/doc/_redis_cache.py @@ -4,6 +4,7 @@ import datetime from typing import Optional, TYPE_CHECKING from async_rediscache.types.base import RedisObject, namespace_lock + if TYPE_CHECKING: from ._cog import DocItem @@ -24,8 +25,7 @@ class DocRedisCache(RedisObject): All keys from a single page are stored together, expiring a week after the first set. """ - url_key = remove_suffix(item.relative_url_path, ".html") - redis_key = f"{self.namespace}:{item.package}:{url_key}" + redis_key = f"{self.namespace}:{item_key(item)}" needs_expire = False with await self._get_pool_connection() as connection: @@ -43,10 +43,36 @@ class DocRedisCache(RedisObject): @namespace_lock async def get(self, item: DocItem) -> Optional[str]: """Return the Markdown content of the symbol `item` if it exists.""" - url_key = remove_suffix(item.relative_url_path, ".html") + with await self._get_pool_connection() as connection: + return await connection.hget(f"{self.namespace}:{item_key(item)}", item.symbol_id, encoding="utf8") + + @namespace_lock + async def delete(self, package: str) -> bool: + """Remove all values for `package`; return True if at least one key was deleted, False otherwise.""" + with await self._get_pool_connection() as connection: + package_keys = [ + package_key async for package_key in connection.iscan(match=f"{self.namespace}:{package}:*") + ] + if package_keys: + await connection.delete(*package_keys) + return True + return False + +class StaleItemCounter(RedisObject): + """Manage increment counters for stale `DocItem`s.""" + + @namespace_lock + async def increment_for(self, item: DocItem) -> int: + """ + Increment the counter for `item` by 1, set it to expire in 3 weeks and return the new value. + + If the counter didn't exist, initialize it with 1. + """ + key = f"{self.namespace}:{item_key(item)}:{item.symbol_id}" with await self._get_pool_connection() as connection: - return await connection.hget(f"{self.namespace}:{item.package}:{url_key}", item.symbol_id, encoding="utf8") + await connection.expire(key, WEEK_SECONDS * 3) + return int(await connection.incr(key)) @namespace_lock async def delete(self, package: str) -> bool: @@ -61,10 +87,6 @@ class DocRedisCache(RedisObject): return False -def remove_suffix(string: str, suffix: str) -> str: - """Remove `suffix` from end of `string`.""" - # TODO replace usages with str.removesuffix on 3.9 - if string.endswith(suffix): - return string[:-len(suffix)] - else: - return string +def item_key(item: DocItem) -> str: + """Get the redis redis key string from `item`.""" + return f"{item.package}:{item.relative_url_path.removesuffix('.html')}" diff --git a/bot/exts/info/help.py b/bot/exts/info/help.py index 3a05b2c8a..743dfdd3f 100644 --- a/bot/exts/info/help.py +++ b/bot/exts/info/help.py @@ -1,21 +1,22 @@ import itertools -import logging +import re from collections import namedtuple from contextlib import suppress from typing import List, Union from discord import Colour, Embed from discord.ext.commands import Bot, Cog, Command, CommandError, Context, DisabledCommand, Group, HelpCommand -from fuzzywuzzy import fuzz, process -from fuzzywuzzy.utils import full_process +from rapidfuzz import fuzz, process +from rapidfuzz.utils import default_process from bot import constants -from bot.constants import Channels, STAFF_ROLES +from bot.constants import Channels, STAFF_PARTNERS_COMMUNITY_ROLES from bot.decorators import redirect_output +from bot.log import get_logger from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion -log = logging.getLogger(__name__) +log = get_logger(__name__) COMMANDS_PER_PAGE = 8 PREFIX = constants.Bot.prefix @@ -54,7 +55,7 @@ class CustomHelpCommand(HelpCommand): def __init__(self): super().__init__(command_attrs={"help": "Shows help for bot commands"}) - @redirect_output(destination_channel=Channels.bot_commands, bypass_roles=STAFF_ROLES) + @redirect_output(destination_channel=Channels.bot_commands, bypass_roles=STAFF_PARTNERS_COMMUNITY_ROLES) async def command_callback(self, ctx: Context, *, command: str = None) -> None: """Attempts to match the provided query with a valid command or cog.""" # the only reason we need to tamper with this is because d.py does not support "categories", @@ -125,16 +126,9 @@ class CustomHelpCommand(HelpCommand): Will return an instance of the `HelpQueryNotFound` exception with the error message and possible matches. """ - choices = await self.get_all_help_choices() - - # Run fuzzywuzzy's processor beforehand, and avoid matching if processed string is empty - # This avoids fuzzywuzzy from raising a warning on inputs with only non-alphanumeric characters - if (processed := full_process(string)): - result = process.extractBests(processed, choices, scorer=fuzz.ratio, score_cutoff=60, processor=None) - else: - result = [] - - return HelpQueryNotFound(f'Query "{string}" not found.', dict(result)) + choices = list(await self.get_all_help_choices()) + result = process.extract(default_process(string), choices, scorer=fuzz.ratio, score_cutoff=60, processor=None) + return HelpQueryNotFound(f'Query "{string}" not found.', {choice[0]: choice[1] for choice in result}) async def subcommand_not_found(self, command: Command, string: str) -> "HelpQueryNotFound": """ @@ -186,7 +180,10 @@ class CustomHelpCommand(HelpCommand): except CommandError: command_details += NOT_ALLOWED_TO_RUN_MESSAGE - command_details += f"*{command.help or 'No details provided.'}*\n" + # Remove line breaks from docstrings, if not used to separate paragraphs. + # Allow overriding this behaviour via putting \u2003 at the start of a line. + formatted_doc = re.sub("(?<!\n)\n(?![\n\u2003])", " ", command.help) + command_details += f"*{formatted_doc or 'No details provided.'}*\n" embed.description = command_details return embed diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 834fee1b4..0dcb8de11 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -1,25 +1,28 @@ import colorsys -import logging import pprint import textwrap from collections import defaultdict -from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple, Union +from typing import Any, DefaultDict, Mapping, Optional, Tuple, Union -import fuzzywuzzy +import rapidfuzz from discord import AllowedMentions, Colour, Embed, Guild, Message, Role from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role +from discord.utils import escape_markdown from bot import constants from bot.api import ResponseCodeError from bot.bot import Bot -from bot.converters import FetchedMember +from bot.converters import MemberOrUser from bot.decorators import in_whitelist +from bot.errors import NonExistentRoleError +from bot.log import get_logger from bot.pagination import LinePaginator from bot.utils.channel import is_mod_channel, is_staff_channel from bot.utils.checks import cooldown_with_role_bypass, has_no_roles_check, in_whitelist_check -from bot.utils.time import humanize_delta, time_since +from bot.utils.members import get_or_fetch_member +from bot.utils.time import TimestampFormats, discord_timestamp, humanize_delta -log = logging.getLogger(__name__) +log = get_logger(__name__) class Information(Cog): @@ -42,21 +45,36 @@ class Information(Cog): return channel_counter @staticmethod - def get_member_counts(guild: Guild) -> Dict[str, int]: + def join_role_stats(role_ids: list[int], guild: Guild, name: Optional[str] = None) -> dict[str, int]: + """Return a dictionary with the number of `members` of each role given, and the `name` for this joined group.""" + member_count = 0 + for role_id in role_ids: + if (role := guild.get_role(role_id)) is not None: + member_count += len(role.members) + else: + raise NonExistentRoleError(role_id) + return {name or role.name.title(): member_count} + + @staticmethod + def get_member_counts(guild: Guild) -> dict[str, int]: """Return the total number of members for certain roles in `guild`.""" - roles = ( - guild.get_role(role_id) for role_id in ( - constants.Roles.helpers, constants.Roles.moderators, constants.Roles.admins, - constants.Roles.owners, constants.Roles.contributors, - ) + role_ids = [constants.Roles.helpers, constants.Roles.mod_team, constants.Roles.admins, + constants.Roles.owners, constants.Roles.contributors] + + role_stats = {} + for role_id in role_ids: + role_stats.update(Information.join_role_stats([role_id], guild)) + role_stats.update( + Information.join_role_stats([constants.Roles.project_leads, constants.Roles.domain_leads], guild, "Leads") ) - return {role.name.title(): len(role.members) for role in roles} + return role_stats def get_extended_server_info(self, ctx: Context) -> str: """Return additional server info only visible in moderation channels.""" talentpool_info = "" if cog := self.bot.get_cog("Talentpool"): - talentpool_info = f"Nominated: {len(cog.watched_users)}\n" + num_nominated = len(cog.cache) if cog.cache else "-" + talentpool_info = f"Nominated: {num_nominated}\n" bb_info = "" if cog := self.bot.get_cog("Big Brother"): @@ -79,7 +97,7 @@ class Information(Cog): {python_general.mention} cooldown: {python_general.slowmode_delay}s """) - @has_any_role(*constants.STAFF_ROLES) + @has_any_role(*constants.STAFF_PARTNERS_COMMUNITY_ROLES) @command(name="roles") async def roles_info(self, ctx: Context) -> None: """Returns a list of all roles and their corresponding IDs.""" @@ -99,7 +117,7 @@ class Information(Cog): await LinePaginator.paginate(role_list, ctx, embed, empty=False) - @has_any_role(*constants.STAFF_ROLES) + @has_any_role(*constants.STAFF_PARTNERS_COMMUNITY_ROLES) @command(name="role") async def role_info(self, ctx: Context, *roles: Union[Role, str]) -> None: """ @@ -117,9 +135,9 @@ class Information(Cog): parsed_roles.add(role_name) continue - match = fuzzywuzzy.process.extractOne( + match = rapidfuzz.process.extractOne( role_name, all_roles, score_cutoff=80, - scorer=fuzzywuzzy.fuzz.ratio + scorer=rapidfuzz.fuzz.ratio ) if not match: @@ -154,7 +172,7 @@ class Information(Cog): """Returns an embed full of server information.""" embed = Embed(colour=Colour.blurple(), title="Server Information") - created = time_since(ctx.guild.created_at, precision="days") + created = discord_timestamp(ctx.guild.created_at, TimestampFormats.RELATIVE) region = ctx.guild.region num_roles = len(ctx.guild.roles) - 1 # Exclude @everyone @@ -171,21 +189,21 @@ class Information(Cog): online_presences = py_invite.approximate_presence_count offline_presences = py_invite.approximate_member_count - online_presences member_status = ( - f"{constants.Emojis.status_online} {online_presences} " - f"{constants.Emojis.status_offline} {offline_presences}" + f"{constants.Emojis.status_online} {online_presences:,} " + f"{constants.Emojis.status_offline} {offline_presences:,}" ) - embed.description = textwrap.dedent(f""" - Created: {created} - Voice region: {region}\ - {features} - Roles: {num_roles} - Member status: {member_status} - """) - embed.set_thumbnail(url=ctx.guild.icon_url) + embed.description = ( + f"Created: {created}" + f"\nVoice region: {region}" + f"{features}" + f"\nRoles: {num_roles}" + f"\nMember status: {member_status}" + ) + embed.set_thumbnail(url=ctx.guild.icon.url) # Members - total_members = ctx.guild.member_count + total_members = f"{ctx.guild.member_count:,}" member_counts = self.get_member_counts(ctx.guild) member_info = "\n".join(f"{role}: {count}" for role, count in member_counts.items()) embed.add_field(name=f"Members: {total_members}", value=member_info) @@ -205,8 +223,13 @@ class Information(Cog): await ctx.send(embed=embed) @command(name="user", aliases=["user_info", "member", "member_info", "u"]) - async def user_info(self, ctx: Context, user: FetchedMember = None) -> None: + async def user_info(self, ctx: Context, user_or_message: Union[MemberOrUser, Message] = None) -> None: """Returns info about a user.""" + if isinstance(user_or_message, Message): + user = user_or_message.author + else: + user = user_or_message + if user is None: user = ctx.author @@ -216,19 +239,20 @@ class Information(Cog): return # Will redirect to #bot-commands if it fails. - if in_whitelist_check(ctx, roles=constants.STAFF_ROLES): + if in_whitelist_check(ctx, roles=constants.STAFF_PARTNERS_COMMUNITY_ROLES): embed = await self.create_user_embed(ctx, user) await ctx.send(embed=embed) - async def create_user_embed(self, ctx: Context, user: FetchedMember) -> Embed: + async def create_user_embed(self, ctx: Context, user: MemberOrUser) -> Embed: """Creates an embed containing information on the `user`.""" - on_server = bool(ctx.guild.get_member(user.id)) + on_server = bool(await get_or_fetch_member(ctx.guild, user.id)) - created = time_since(user.created_at, max_units=3) + created = discord_timestamp(user.created_at, TimestampFormats.RELATIVE) name = str(user) if on_server and user.nick: name = f"{user.nick} ({name})" + name = escape_markdown(name) if user.public_flags.verified_bot: name += f" {constants.Emojis.verified_bot}" @@ -241,11 +265,15 @@ class Information(Cog): if is_set and (emoji := getattr(constants.Emojis, f"badge_{badge}", None)): badges.append(emoji) - activity = await self.user_messages(user) - if on_server: - joined = time_since(user.joined_at, max_units=3) - roles = ", ".join(role.mention for role in user.roles[1:]) + if user.joined_at: + joined = discord_timestamp(user.joined_at, TimestampFormats.RELATIVE) + else: + joined = "Unable to get join date" + + # The 0 is for excluding the default @everyone role, + # and the -1 is for reversing the order of the roles to highest to lowest in hierarchy. + roles = ", ".join(role.mention for role in user.roles[:0:-1]) membership = {"Joined": joined, "Verified": not user.pending, "Roles": roles or None} if not is_mod_channel(ctx.channel): membership.pop("Verified") @@ -272,8 +300,7 @@ class Information(Cog): # Show more verbose output in moderation channels for infractions and nominations if is_mod_channel(ctx.channel): - fields.append(activity) - + fields.append(await self.user_messages(user)) fields.append(await self.expanded_user_infraction_counts(user)) fields.append(await self.user_nomination_counts(user)) else: @@ -288,12 +315,12 @@ class Information(Cog): for field_name, field_content in fields: embed.add_field(name=field_name, value=field_content, inline=False) - embed.set_thumbnail(url=user.avatar_url_as(static_format="png")) + embed.set_thumbnail(url=user.display_avatar.url) embed.colour = user.colour if user.colour != Colour.default() else Colour.blurple() return embed - async def basic_user_infraction_counts(self, user: FetchedMember) -> Tuple[str, str]: + async def basic_user_infraction_counts(self, user: MemberOrUser) -> Tuple[str, str]: """Gets the total and active infraction counts for the given `member`.""" infractions = await self.bot.api_client.get( 'bot/infractions', @@ -310,7 +337,7 @@ class Information(Cog): return "Infractions", infraction_output - async def expanded_user_infraction_counts(self, user: FetchedMember) -> Tuple[str, str]: + async def expanded_user_infraction_counts(self, user: MemberOrUser) -> Tuple[str, str]: """ Gets expanded infraction counts for the given `member`. @@ -351,7 +378,7 @@ class Information(Cog): return "Infractions", "\n".join(infraction_output) - async def user_nomination_counts(self, user: FetchedMember) -> Tuple[str, str]: + async def user_nomination_counts(self, user: MemberOrUser) -> Tuple[str, str]: """Gets the active and historical nomination counts for the given `member`.""" nominations = await self.bot.api_client.get( 'bot/nominations', @@ -376,7 +403,7 @@ class Information(Cog): return "Nominations", "\n".join(output) - async def user_messages(self, user: FetchedMember) -> Tuple[Union[bool, str], Tuple[str, str]]: + async def user_messages(self, user: MemberOrUser) -> Tuple[Union[bool, str], Tuple[str, str]]: """ Gets the amount of messages for `member`. @@ -435,11 +462,12 @@ class Information(Cog): # remove trailing whitespace return out.rstrip() - @cooldown_with_role_bypass(2, 60 * 3, BucketType.member, bypass_roles=constants.STAFF_ROLES) - @group(invoke_without_command=True) - @in_whitelist(channels=(constants.Channels.bot_commands,), roles=constants.STAFF_ROLES) - async def raw(self, ctx: Context, *, message: Message, json: bool = False) -> None: - """Shows information about the raw API response.""" + async def send_raw_content(self, ctx: Context, message: Message, json: bool = False) -> None: + """ + Send information about the raw API response for a `discord.Message`. + + If `json` is True, send the information in a copy-pasteable Python format. + """ if ctx.author not in message.channel.members: await ctx.send(":x: You do not have permissions to see the channel this message is in.") return @@ -475,10 +503,17 @@ class Information(Cog): for page in paginator.pages: await ctx.send(page, allowed_mentions=AllowedMentions.none()) + @cooldown_with_role_bypass(2, 60 * 3, BucketType.member, bypass_roles=constants.STAFF_PARTNERS_COMMUNITY_ROLES) + @group(invoke_without_command=True) + @in_whitelist(channels=(constants.Channels.bot_commands,), roles=constants.STAFF_PARTNERS_COMMUNITY_ROLES) + async def raw(self, ctx: Context, message: Message) -> None: + """Shows information about the raw API response.""" + await self.send_raw_content(ctx, message) + @raw.command() async def json(self, ctx: Context, message: Message) -> None: """Shows information about the raw API response in a copy-pasteable Python format.""" - await ctx.invoke(self.raw, message=message, json=True) + await self.send_raw_content(ctx, message, json=True) def setup(bot: Bot) -> None: diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py index 8ac96bbdb..259095b50 100644 --- a/bot/exts/info/pep.py +++ b/bot/exts/info/pep.py @@ -1,4 +1,3 @@ -import logging from datetime import datetime, timedelta from email.parser import HeaderParser from io import StringIO @@ -9,9 +8,11 @@ from discord.ext.commands import Cog, Context, command from bot.bot import Bot from bot.constants import Keys -from bot.utils.cache import AsyncCache +from bot.log import get_logger +from bot.utils import scheduling +from bot.utils.caching import AsyncCache -log = logging.getLogger(__name__) +log = get_logger(__name__) ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png" BASE_PEP_URL = "http://www.python.org/dev/peps/pep-" @@ -32,7 +33,7 @@ class PythonEnhancementProposals(Cog): self.peps: Dict[int, str] = {} # To avoid situations where we don't have last datetime, set this to now. self.last_refreshed_peps: datetime = datetime.now() - self.bot.loop.create_task(self.refresh_peps_urls()) + scheduling.create_task(self.refresh_peps_urls(), event_loop=self.bot.loop) async def refresh_peps_urls(self) -> None: """Refresh PEP URLs listing in every 3 hours.""" diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py index 2e42e7d6b..c3d2e2a3c 100644 --- a/bot/exts/info/pypi.py +++ b/bot/exts/info/pypi.py @@ -1,14 +1,16 @@ import itertools -import logging import random import re +from contextlib import suppress -from discord import Embed +from discord import Embed, NotFound from discord.ext.commands import Cog, Context, command from discord.utils import escape_markdown from bot.bot import Bot from bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput +from bot.log import get_logger +from bot.utils.messages import wait_for_deletion URL = "https://pypi.org/pypi/{package}/json" PYPI_ICON = "https://cdn.discordapp.com/emojis/766274397257334814.png" @@ -18,7 +20,7 @@ PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white)) ILLEGAL_CHARACTERS = re.compile(r"[^-_.a-zA-Z0-9]+") INVALID_INPUT_DELETE_DELAY = RedirectOutput.delete_delay -log = logging.getLogger(__name__) +log = get_logger(__name__) class PyPi(Cog): @@ -67,8 +69,15 @@ class PyPi(Cog): log.trace(f"Error when fetching PyPi package: {response.status}.") if error: - await ctx.send(embed=embed, delete_after=INVALID_INPUT_DELETE_DELAY) - await ctx.message.delete(delay=INVALID_INPUT_DELETE_DELAY) + error_message = await ctx.send(embed=embed) + await wait_for_deletion(error_message, (ctx.author.id,), timeout=INVALID_INPUT_DELETE_DELAY) + + # Make sure that we won't cause a ghost-ping by deleting the message + if not (ctx.message.mentions or ctx.message.role_mentions): + with suppress(NotFound): + await ctx.message.delete() + await error_message.delete() + else: await ctx.send(embed=embed) diff --git a/bot/exts/info/python_news.py b/bot/exts/info/python_news.py index 0ab5738a4..2fad9d2ab 100644 --- a/bot/exts/info/python_news.py +++ b/bot/exts/info/python_news.py @@ -1,4 +1,4 @@ -import logging +import re import typing as t from datetime import date, datetime @@ -10,6 +10,8 @@ from discord.ext.tasks import loop from bot import constants from bot.bot import Bot +from bot.log import get_logger +from bot.utils import scheduling from bot.utils.webhooks import send_webhook PEPS_RSS_URL = "https://www.python.org/dev/peps/peps.rss/" @@ -21,7 +23,15 @@ THREAD_URL = "https://mail.python.org/archives/list/{list}@python.org/thread/{id AVATAR_URL = "https://www.python.org/static/opengraph-icon-200x200.png" -log = logging.getLogger(__name__) +# By first matching everything within a codeblock, +# when matching markdown it won't be within a codeblock +MARKDOWN_REGEX = re.compile( + r"(?P<codeblock>`.*?`)" # matches everything within a codeblock + r"|(?P<markdown>(?<!\\)[_|])", # matches unescaped `_` and `|` + re.DOTALL # required to support multi-line codeblocks +) + +log = get_logger(__name__) class PythonNews(Cog): @@ -32,8 +42,8 @@ class PythonNews(Cog): self.webhook_names = {} self.webhook: t.Optional[discord.Webhook] = None - self.bot.loop.create_task(self.get_webhook_names()) - self.bot.loop.create_task(self.get_webhook_and_channel()) + scheduling.create_task(self.get_webhook_names(), event_loop=self.bot.loop) + scheduling.create_task(self.get_webhook_and_channel(), event_loop=self.bot.loop) async def start_tasks(self) -> None: """Start the tasks for fetching new PEPs and mailing list messages.""" @@ -72,6 +82,14 @@ class PythonNews(Cog): if mail["name"].split("@")[0] in constants.PythonNews.mail_lists: self.webhook_names[mail["name"].split("@")[0]] = mail["display_name"] + @staticmethod + def escape_markdown(content: str) -> str: + """Escape the markdown underlines and spoilers that aren't in codeblocks.""" + return MARKDOWN_REGEX.sub( + lambda match: match.group("codeblock") or "\\" + match.group("markdown"), + content + ) + async def post_pep_news(self) -> None: """Fetch new PEPs and when they don't have announcement in #python-news, create it.""" # Wait until everything is ready and http_session available @@ -102,8 +120,8 @@ class PythonNews(Cog): # Build an embed and send a webhook embed = discord.Embed( - title=new["title"], - description=new["summary"], + title=self.escape_markdown(new["title"]), + description=self.escape_markdown(new["summary"]), timestamp=new_datetime, url=new["link"], colour=constants.Colours.soft_green @@ -122,7 +140,7 @@ class PythonNews(Cog): self.bot.stats.incr("python_news.posted.pep") if msg.channel.is_news(): - log.trace("Publishing PEP annnouncement because it was in a news channel") + log.trace("Publishing PEP announcement because it was in a news channel") await msg.publish() # Apply new sent news to DB to avoid duplicate sending @@ -167,13 +185,13 @@ class PythonNews(Cog): ): continue - content = email_information["content"] + content = self.escape_markdown(email_information["content"]) link = THREAD_URL.format(id=thread["href"].split("/")[-2], list=maillist) # Build an embed and send a message to the webhook embed = discord.Embed( - title=thread_information["subject"], - description=content[:500] + f"... [continue reading]({link})" if len(content) > 500 else content, + title=self.escape_markdown(thread_information["subject"]), + description=content[:1000] + f"... [continue reading]({link})" if len(content) > 1000 else content, timestamp=new_date, url=link, colour=constants.Colours.soft_green diff --git a/bot/exts/info/site.py b/bot/exts/info/site.py index fb5b99086..e1f2f5153 100644 --- a/bot/exts/info/site.py +++ b/bot/exts/info/site.py @@ -1,15 +1,14 @@ -import logging - from discord import Colour, Embed from discord.ext.commands import Cog, Context, Greedy, group from bot.bot import Bot from bot.constants import URLs +from bot.log import get_logger from bot.pagination import LinePaginator -log = logging.getLogger(__name__) +log = get_logger(__name__) -PAGES_URL = f"{URLs.site_schema}{URLs.site}/pages" +BASE_URL = f"{URLs.site_schema}{URLs.site}" class Site(Cog): @@ -43,7 +42,7 @@ class Site(Cog): @site_group.command(name="resources", root_aliases=("resources", "resource")) async def site_resources(self, ctx: Context) -> None: """Info about the site's Resources page.""" - learning_url = f"{PAGES_URL}/resources" + learning_url = f"{BASE_URL}/resources" embed = Embed(title="Resources") embed.set_footer(text=f"{learning_url}") @@ -59,7 +58,7 @@ class Site(Cog): @site_group.command(name="tools", root_aliases=("tools",)) async def site_tools(self, ctx: Context) -> None: """Info about the site's Tools page.""" - tools_url = f"{PAGES_URL}/resources/tools" + tools_url = f"{BASE_URL}/resources/tools" embed = Embed(title="Tools") embed.set_footer(text=f"{tools_url}") @@ -74,7 +73,7 @@ class Site(Cog): @site_group.command(name="help") async def site_help(self, ctx: Context) -> None: """Info about the site's Getting Help page.""" - url = f"{PAGES_URL}/resources/guides/asking-good-questions" + url = f"{BASE_URL}/pages/guides/pydis-guides/asking-good-questions/" embed = Embed(title="Asking Good Questions") embed.set_footer(text=url) @@ -90,7 +89,7 @@ class Site(Cog): @site_group.command(name="faq", root_aliases=("faq",)) async def site_faq(self, ctx: Context) -> None: """Info about the site's FAQ page.""" - url = f"{PAGES_URL}/frequently-asked-questions" + url = f"{BASE_URL}/pages/frequently-asked-questions" embed = Embed(title="FAQ") embed.set_footer(text=url) @@ -107,13 +106,13 @@ class Site(Cog): @site_group.command(name="rules", aliases=("r", "rule"), root_aliases=("rules", "rule")) async def site_rules(self, ctx: Context, rules: Greedy[int]) -> None: """Provides a link to all rules or, if specified, displays specific rule(s).""" - rules_embed = Embed(title='Rules', color=Colour.blurple(), url=f'{PAGES_URL}/rules') + rules_embed = Embed(title='Rules', color=Colour.blurple(), url=f'{BASE_URL}/pages/rules') if not rules: # Rules were not submitted. Return the default description. rules_embed.description = ( "The rules and guidelines that apply to this community can be found on" - f" our [rules page]({PAGES_URL}/rules). We expect" + f" our [rules page]({BASE_URL}/pages/rules). We expect" " all members of the community to have read and understood these." ) diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py index ef07c77a1..8ce25b4e8 100644 --- a/bot/exts/info/source.py +++ b/bot/exts/info/source.py @@ -2,47 +2,16 @@ import inspect from pathlib import Path from typing import Optional, Tuple, Union -from discord import Embed, utils +from discord import Embed from discord.ext import commands from bot.bot import Bot from bot.constants import URLs +from bot.converters import SourceConverter SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, str, commands.ExtensionNotLoaded] -class SourceConverter(commands.Converter): - """Convert an argument into a help command, tag, command, or cog.""" - - @staticmethod - async def convert(ctx: commands.Context, argument: str) -> SourceType: - """Convert argument into source object.""" - if argument.lower() == "help": - return ctx.bot.help_command - - cog = ctx.bot.get_cog(argument) - if cog: - return cog - - cmd = ctx.bot.get_command(argument) - if cmd: - return cmd - - tags_cog = ctx.bot.get_cog("Tags") - show_tag = True - - if not tags_cog: - show_tag = False - elif argument.lower() in tags_cog._cache: - return argument.lower() - - escaped_arg = utils.escape_markdown(argument) - - raise commands.BadArgument( - f"Unable to convert '{escaped_arg}' to valid command{', tag,' if show_tag else ''} or Cog." - ) - - class BotSource(commands.Cog): """Displays information about the bot's source code.""" diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py index bb91a8563..842647555 100644 --- a/bot/exts/info/tags.py +++ b/bot/exts/info/tags.py @@ -1,4 +1,3 @@ -import logging import re import time from pathlib import Path @@ -10,10 +9,11 @@ from discord.ext.commands import Cog, Context, group from bot import constants from bot.bot import Bot from bot.converters import TagNameConverter +from bot.log import get_logger from bot.pagination import LinePaginator from bot.utils.messages import wait_for_deletion -log = logging.getLogger(__name__) +log = get_logger(__name__) TEST_CHANNELS = ( constants.Channels.bot_commands, diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index dfb1afd19..80ba10112 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -1,5 +1,3 @@ -import asyncio -import logging import traceback from collections import namedtuple from datetime import datetime @@ -9,7 +7,7 @@ from typing import Optional, Union from aioredis import RedisError from async_rediscache import RedisCache from dateutil.relativedelta import relativedelta -from discord import Colour, Embed, Member, User +from discord import Colour, Embed, Forbidden, Member, TextChannel, User from discord.ext import tasks from discord.ext.commands import Cog, Context, group, has_any_role @@ -17,11 +15,15 @@ from bot.bot import Bot from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles from bot.converters import DurationDelta, Expiry from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger +from bot.utils import scheduling from bot.utils.messages import format_user from bot.utils.scheduling import Scheduler -from bot.utils.time import humanize_delta, parse_duration_string, relativedelta_to_timedelta +from bot.utils.time import ( + TimestampFormats, discord_timestamp, humanize_delta, parse_duration_string, relativedelta_to_timedelta +) -log = logging.getLogger(__name__) +log = get_logger(__name__) REJECTION_MESSAGE = """ Hi, {user} - Thanks for your interest in our server! @@ -67,7 +69,7 @@ class Defcon(Cog): self.scheduler = Scheduler(self.__class__.__name__) - self.bot.loop.create_task(self._sync_settings()) + scheduling.create_task(self._sync_settings(), event_loop=self.bot.loop) @property def mod_log(self) -> ModLog: @@ -109,17 +111,19 @@ class Defcon(Cog): if self.threshold: now = datetime.utcnow() - if now - member.created_at < relativedelta_to_timedelta(self.threshold): + if now - member.created_at.replace(tzinfo=None) < relativedelta_to_timedelta(self.threshold): log.info(f"Rejecting user {member}: Account is too new") message_sent = False try: await member.send(REJECTION_MESSAGE.format(user=member.mention)) - message_sent = True + except Forbidden: + log.debug(f"Cannot send DEFCON rejection DM to {member}: DMs disabled") except Exception: - log.exception(f"Unable to send rejection message to user: {member}") + # Broadly catch exceptions because DM isn't critical, but it's imperative to kick them. + log.exception(f"Error sending DEFCON rejection message to {member}") await member.kick(reason="DEFCON active, user is too new") self.bot.stats.incr("defcon.leaves") @@ -133,7 +137,7 @@ class Defcon(Cog): await self.mod_log.send_log_message( Icons.defcon_denied, Colours.soft_red, "Entry denied", - message, member.avatar_url_as(static_format="png") + message, member.display_avatar.url ) @group(name='defcon', aliases=('dc',), invoke_without_command=True) @@ -150,7 +154,7 @@ class Defcon(Cog): colour=Colour.blurple(), title="DEFCON Status", description=f""" **Threshold:** {humanize_delta(self.threshold) if self.threshold else "-"} - **Expires in:** {humanize_delta(relativedelta(self.expiry, datetime.utcnow())) if self.expiry else "-"} + **Expires:** {discord_timestamp(self.expiry, TimestampFormats.RELATIVE) if self.expiry else "-"} **Verification level:** {ctx.guild.verification_level.name} """ ) @@ -172,7 +176,7 @@ class Defcon(Cog): """ if isinstance(threshold, int): threshold = relativedelta(days=threshold) - await self._update_threshold(ctx.author, threshold=threshold, expiry=expiry) + await self._update_threshold(ctx.author, ctx.channel, threshold, expiry) @defcon_group.command() @has_any_role(Roles.admins) @@ -181,7 +185,12 @@ class Defcon(Cog): role = ctx.guild.default_role permissions = role.permissions - permissions.update(send_messages=False, add_reactions=False, connect=False) + permissions.update( + send_messages=False, + add_reactions=False, + send_messages_in_threads=False, + connect=False + ) await role.edit(reason="DEFCON shutdown", permissions=permissions) await ctx.send(f"{Action.SERVER_SHUTDOWN.value.emoji} Server shut down.") @@ -192,7 +201,12 @@ class Defcon(Cog): role = ctx.guild.default_role permissions = role.permissions - permissions.update(send_messages=True, add_reactions=True, connect=True) + permissions.update( + send_messages=True, + add_reactions=True, + send_messages_in_threads=True, + connect=True + ) await role.edit(reason="DEFCON unshutdown", permissions=permissions) await ctx.send(f"{Action.SERVER_OPEN.value.emoji} Server reopened.") @@ -201,10 +215,16 @@ class Defcon(Cog): new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold) if self.threshold else '-'})" self.mod_log.ignore(Event.guild_channel_update, Channels.defcon) - asyncio.create_task(self.channel.edit(topic=new_topic)) + scheduling.create_task(self.channel.edit(topic=new_topic)) @defcon_settings.atomic_transaction - async def _update_threshold(self, author: User, threshold: relativedelta, expiry: Optional[Expiry] = None) -> None: + async def _update_threshold( + self, + author: User, + channel: TextChannel, + threshold: relativedelta, + expiry: Optional[Expiry] = None + ) -> None: """Update the new threshold in the cog, cache, defcon channel, and logs, and additionally schedule expiry.""" self.threshold = threshold if threshold == relativedelta(days=0): # If the threshold is 0, we don't need to schedule anything @@ -244,9 +264,13 @@ class Defcon(Cog): else: channel_message = "removed" - await self.channel.send( - f"{action.value.emoji} DEFCON threshold {channel_message}{error}." - ) + message = f"{action.value.emoji} DEFCON threshold {channel_message}{error}." + await self.channel.send(message) + + # If invoked outside of #defcon send to `ctx.channel` too + if channel != self.channel: + await channel.send(message) + await self._send_defcon_log(action, author) self._update_channel_topic() @@ -254,7 +278,7 @@ class Defcon(Cog): async def _remove_threshold(self) -> None: """Resets the threshold back to 0.""" - await self._update_threshold(self.bot.user, relativedelta(days=0)) + await self._update_threshold(self.bot.user, self.channel, relativedelta(days=0)) @staticmethod def _stringify_relativedelta(delta: relativedelta) -> str: diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index 1d2206e27..566422e29 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -1,13 +1,13 @@ -import logging - import discord from discord.ext.commands import Cog, Context, command, has_any_role from bot.bot import Bot from bot.constants import Emojis, MODERATION_ROLES +from bot.log import get_logger +from bot.utils.channel import is_mod_channel from bot.utils.services import send_to_paste_service -log = logging.getLogger(__name__) +log = get_logger(__name__) class DMRelay(Cog): @@ -63,8 +63,9 @@ class DMRelay(Cog): await ctx.send(paste_link) async def cog_check(self, ctx: Context) -> bool: - """Only allow moderators to invoke the commands in this cog.""" - return await has_any_role(*MODERATION_ROLES).predicate(ctx) + """Only allow moderators to invoke the commands in this cog in mod channels.""" + return (await has_any_role(*MODERATION_ROLES).predicate(ctx) + and is_mod_channel(ctx.channel)) def setup(bot: Bot) -> None: diff --git a/bot/exts/moderation/incidents.py b/bot/exts/moderation/incidents.py index 0e479d33f..e265e29d3 100644 --- a/bot/exts/moderation/incidents.py +++ b/bot/exts/moderation/incidents.py @@ -1,5 +1,4 @@ import asyncio -import logging import typing as t from datetime import datetime from enum import Enum @@ -9,9 +8,11 @@ from discord.ext.commands import Cog from bot.bot import Bot from bot.constants import Channels, Colours, Emojis, Guild, Webhooks +from bot.log import get_logger +from bot.utils import scheduling from bot.utils.messages import sub_clyde -log = logging.getLogger(__name__) +log = get_logger(__name__) # Amount of messages for `crawl_task` to process at most on start-up - limited to 50 # as in practice, there should never be this many messages, and if there are, @@ -93,7 +94,7 @@ async def make_embed(incident: discord.Message, outcome: Signal, actioned_by: di timestamp=datetime.utcnow(), colour=colour, ) - embed.set_footer(text=footer, icon_url=actioned_by.avatar_url) + embed.set_footer(text=footer, icon_url=actioned_by.display_avatar.url) if incident.attachments: attachment = incident.attachments[0] # User-sent messages can only contain one attachment @@ -104,7 +105,7 @@ async def make_embed(incident: discord.Message, outcome: Signal, actioned_by: di else: embed.set_author(name="[Failed to relay attachment]", url=attachment.proxy_url) # Embed links the file else: - file = None + file = discord.utils.MISSING return embed, file @@ -143,7 +144,14 @@ async def add_signals(incident: discord.Message) -> None: log.trace(f"Skipping emoji as it's already been placed: {signal_emoji}") else: log.trace(f"Adding reaction: {signal_emoji}") - await incident.add_reaction(signal_emoji.value) + try: + await incident.add_reaction(signal_emoji.value) + except discord.NotFound as e: + if e.code != 10008: + raise + + log.trace(f"Couldn't react with signal because message {incident.id} was deleted; skipping incident") + return class Incidents(Cog): @@ -183,7 +191,7 @@ class Incidents(Cog): self.bot = bot self.event_lock = asyncio.Lock() - self.crawl_task = self.bot.loop.create_task(self.crawl_incidents()) + self.crawl_task = scheduling.create_task(self.crawl_incidents(), event_loop=self.bot.loop) async def crawl_incidents(self) -> None: """ @@ -245,7 +253,7 @@ class Incidents(Cog): await webhook.send( embed=embed, username=sub_clyde(incident.author.name), - avatar_url=incident.author.avatar_url, + avatar_url=incident.author.display_avatar.url, file=attachment_file, ) except Exception: @@ -268,7 +276,7 @@ class Incidents(Cog): return payload.message_id == incident.id coroutine = self.bot.wait_for(event="raw_message_delete", check=check, timeout=timeout) - return self.bot.loop.create_task(coroutine) + return scheduling.create_task(coroutine, event_loop=self.bot.loop) async def process_event(self, reaction: str, incident: discord.Message, member: discord.Member) -> None: """ @@ -288,14 +296,20 @@ class Incidents(Cog): members_roles: t.Set[int] = {role.id for role in member.roles} if not members_roles & ALLOWED_ROLES: # Intersection is truthy on at least 1 common element log.debug(f"Removing invalid reaction: user {member} is not permitted to send signals") - await incident.remove_reaction(reaction, member) + try: + await incident.remove_reaction(reaction, member) + except discord.NotFound: + log.trace("Couldn't remove reaction because the reaction or its message was deleted") return try: signal = Signal(reaction) except ValueError: log.debug(f"Removing invalid reaction: emoji {reaction} is not a valid signal") - await incident.remove_reaction(reaction, member) + try: + await incident.remove_reaction(reaction, member) + except discord.NotFound: + log.trace("Couldn't remove reaction because the reaction or its message was deleted") return log.trace(f"Received signal: {signal}") @@ -313,7 +327,10 @@ class Incidents(Cog): confirmation_task = self.make_confirmation_task(incident, timeout) log.trace("Deleting original message") - await incident.delete() + try: + await incident.delete() + except discord.NotFound: + log.trace("Couldn't delete message because it was already deleted") log.trace(f"Awaiting deletion confirmation: {timeout=} seconds") try: diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py index 8286d3635..d4e96b10b 100644 --- a/bot/exts/moderation/infraction/_scheduler.py +++ b/bot/exts/moderation/infraction/_scheduler.py @@ -1,4 +1,3 @@ -import logging import textwrap import typing as t from abc import abstractmethod @@ -13,13 +12,14 @@ from bot import constants from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import Colours +from bot.converters import MemberOrUser from bot.exts.moderation.infraction import _utils -from bot.exts.moderation.infraction._utils import UserSnowflake from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger from bot.utils import messages, scheduling, time from bot.utils.channel import is_mod_channel -log = logging.getLogger(__name__) +log = get_logger(__name__) class InfractionScheduler: @@ -29,7 +29,7 @@ class InfractionScheduler: self.bot = bot self.scheduler = scheduling.Scheduler(self.__class__.__name__) - self.bot.loop.create_task(self.reschedule_infractions(supported_infractions)) + scheduling.create_task(self.reschedule_infractions(supported_infractions), event_loop=self.bot.loop) def cog_unload(self) -> None: """Cancel scheduled tasks.""" @@ -81,12 +81,16 @@ class InfractionScheduler: apply_coro: t.Optional[t.Awaitable] ) -> None: """Reapply an infraction if it's still active or deactivate it if less than 60 sec left.""" - # Calculate the time remaining, in seconds, for the mute. - expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) - delta = (expiry - datetime.utcnow()).total_seconds() + if infraction["expires_at"] is not None: + # Calculate the time remaining, in seconds, for the mute. + expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) + delta = (expiry - datetime.utcnow()).total_seconds() + else: + # If the infraction is permanent, it is not possible to get the time remaining. + delta = None - # Mark as inactive if less than a minute remains. - if delta < 60: + # Mark as inactive if the infraction is not permanent and less than a minute remains. + if delta is not None and delta < 60: log.info( "Infraction will be deactivated instead of re-applied " "because less than 1 minute remains." @@ -115,7 +119,7 @@ class InfractionScheduler: self, ctx: Context, infraction: _utils.Infraction, - user: UserSnowflake, + user: MemberOrUser, action_coro: t.Optional[t.Awaitable] = None, user_reason: t.Optional[str] = None, additional_info: str = "", @@ -161,21 +165,14 @@ class InfractionScheduler: # send DMs to user that it doesn't share a guild with. If we were to # apply kick/ban infractions first, this would mean that we'd make it # impossible for us to deliver a DM. See python-discord/bot#982. - if not infraction["hidden"]: + if not infraction["hidden"] and infr_type in {"ban", "kick"}: dm_result = f"{constants.Emojis.failmail} " dm_log_text = "\nDM: **Failed**" - # Sometimes user is a discord.Object; make it a proper user. - try: - if not isinstance(user, (discord.Member, discord.User)): - user = await self.bot.fetch_user(user.id) - except discord.HTTPException as e: - log.error(f"Failed to DM {user.id}: could not fetch user (status {e.status})") - else: - # Accordingly display whether the user was successfully notified via DM. - if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon): - dm_result = ":incoming_envelope: " - dm_log_text = "\nDM: Sent" + # Accordingly update whether the user was successfully notified via DM. + if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon): + dm_result = ":incoming_envelope: " + dm_log_text = "\nDM: Sent" end_msg = "" if infraction["actor"] == self.bot.user.id: @@ -235,6 +232,16 @@ class InfractionScheduler: else: infr_message = f" **{purge}{' '.join(infr_type.split('_'))}** to {user.mention}{expiry_msg}{end_msg}" + # If we need to DM and haven't already tried to + if not infraction["hidden"] and infr_type not in {"ban", "kick"}: + dm_result = f"{constants.Emojis.failmail} " + dm_log_text = "\nDM: **Failed**" + + # Accordingly update whether the user was successfully notified via DM. + if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon): + dm_result = ":incoming_envelope: " + dm_log_text = "\nDM: Sent" + # Send a confirmation message to the invoking context. log.trace(f"Sending infraction #{id_} confirmation message.") await ctx.send(f"{dm_result}{confirm_msg}{infr_message}.") @@ -246,7 +253,7 @@ class InfractionScheduler: icon_url=icon, colour=Colours.soft_red, title=f"Infraction {log_title}: {' '.join(infr_type.split('_'))}", - thumbnail=user.avatar_url_as(static_format="png"), + thumbnail=user.display_avatar.url, text=textwrap.dedent(f""" Member: {messages.format_user(user)} Actor: {ctx.author.mention}{dm_log_text}{expiry_log_text} @@ -264,14 +271,18 @@ class InfractionScheduler: self, ctx: Context, infr_type: str, - user: UserSnowflake, - send_msg: bool = True + user: MemberOrUser, + *, + send_msg: bool = True, + notify: bool = True ) -> None: """ Prematurely end an infraction for a user and log the action in the mod log. If `send_msg` is True, then a pardoning confirmation message will be sent to - the context channel. Otherwise, no such message will be sent. + the context channel. Otherwise, no such message will be sent. + + If `notify` is True, notify the user of the pardon via DM where applicable. """ log.trace(f"Pardoning {infr_type} infraction for {user}.") @@ -292,7 +303,7 @@ class InfractionScheduler: return # Deactivate the infraction and cancel its scheduled expiration task. - log_text = await self.deactivate_infraction(response[0], send_log=False) + log_text = await self.deactivate_infraction(response[0], send_log=False, notify=notify) log_text["Member"] = messages.format_user(user) log_text["Actor"] = ctx.author.mention @@ -336,7 +347,7 @@ class InfractionScheduler: icon_url=_utils.INFRACTION_ICONS[infr_type][1], colour=Colours.soft_green, title=f"Infraction {log_title}: {' '.join(infr_type.split('_'))}", - thumbnail=user.avatar_url_as(static_format="png"), + thumbnail=user.display_avatar.url, text="\n".join(f"{k}: {v}" for k, v in log_text.items()), footer=footer, content=log_content, @@ -345,7 +356,9 @@ class InfractionScheduler: async def deactivate_infraction( self, infraction: _utils.Infraction, - send_log: bool = True + *, + send_log: bool = True, + notify: bool = True ) -> t.Dict[str, str]: """ Deactivate an active infraction and return a dictionary of lines to send in a mod log. @@ -354,6 +367,8 @@ class InfractionScheduler: expiration task cancelled. If `send_log` is True, a mod log is sent for the deactivation of the infraction. + If `notify` is True, notify the user of the pardon via DM where applicable. + Infractions of unsupported types will raise a ValueError. """ guild = self.bot.get_guild(constants.Guild.id) @@ -380,7 +395,7 @@ class InfractionScheduler: try: log.trace("Awaiting the pardon action coroutine.") - returned_log = await self._pardon_action(infraction) + returned_log = await self._pardon_action(infraction, notify) if returned_log is not None: log_text = {**log_text, **returned_log} # Merge the logs together @@ -449,7 +464,7 @@ class InfractionScheduler: log_title = "expiration failed" if "Failure" in log_text else "expired" user = self.bot.get_user(user_id) - avatar = user.avatar_url_as(static_format="png") if user else None + avatar = user.display_avatar.url if user else None # Move reason to end so when reason is too long, this is not gonna cut out required items. log_text["Reason"] = log_text.pop("Reason") @@ -468,10 +483,15 @@ class InfractionScheduler: return log_text @abstractmethod - async def _pardon_action(self, infraction: _utils.Infraction) -> t.Optional[t.Dict[str, str]]: + async def _pardon_action( + self, + infraction: _utils.Infraction, + notify: bool + ) -> t.Optional[t.Dict[str, str]]: """ Execute deactivation steps specific to the infraction's type and return a log dict. + If `notify` is True, notify the user of the pardon via DM where applicable. If an infraction type is unsupported, return None instead. """ raise NotImplementedError diff --git a/bot/exts/moderation/infraction/_utils.py b/bot/exts/moderation/infraction/_utils.py index e4eb7f79c..c0ef80e3d 100644 --- a/bot/exts/moderation/infraction/_utils.py +++ b/bot/exts/moderation/infraction/_utils.py @@ -1,4 +1,3 @@ -import logging import typing as t from datetime import datetime @@ -7,9 +6,11 @@ from discord.ext.commands import Context from bot.api import ResponseCodeError from bot.constants import Colours, Icons -from bot.errors import InvalidInfractedUser +from bot.converters import MemberOrUser +from bot.errors import InvalidInfractedUserError +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) # apply icon, pardon icon INFRACTION_ICONS = { @@ -24,20 +25,20 @@ INFRACTION_ICONS = { RULES_URL = "https://pythondiscord.com/pages/rules" # Type aliases -UserObject = t.Union[discord.Member, discord.User] -UserSnowflake = t.Union[UserObject, discord.Object] Infraction = t.Dict[str, t.Union[str, int, bool]] -APPEAL_EMAIL = "[email protected]" +APPEAL_SERVER_INVITE = "https://discord.gg/WXrCJxWBnm" INFRACTION_TITLE = "Please review our rules" -INFRACTION_APPEAL_EMAIL_FOOTER = f"To appeal this infraction, send an e-mail to {APPEAL_EMAIL}" +INFRACTION_APPEAL_SERVER_FOOTER = f"\n\nTo appeal this infraction, join our [appeals server]({APPEAL_SERVER_INVITE})." INFRACTION_APPEAL_MODMAIL_FOOTER = ( - 'If you would like to discuss or appeal this infraction, ' - 'send a message to the ModMail bot' + '\n\nIf you would like to discuss or appeal this infraction, ' + 'send a message to the ModMail bot.' ) INFRACTION_AUTHOR_NAME = "Infraction information" +LONGEST_EXTRAS = max(len(INFRACTION_APPEAL_SERVER_FOOTER), len(INFRACTION_APPEAL_MODMAIL_FOOTER)) + INFRACTION_DESCRIPTION_TEMPLATE = ( "**Type:** {type}\n" "**Expires:** {expires}\n" @@ -45,7 +46,7 @@ INFRACTION_DESCRIPTION_TEMPLATE = ( ) -async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]: +async def post_user(ctx: Context, user: MemberOrUser) -> t.Optional[dict]: """ Create a new user in the database. @@ -53,14 +54,11 @@ async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]: """ log.trace(f"Attempting to add user {user.id} to the database.") - if not isinstance(user, (discord.Member, discord.User)): - log.debug("The user being added to the DB is not a Member or User object.") - payload = { - 'discriminator': int(getattr(user, 'discriminator', 0)), + 'discriminator': int(user.discriminator), 'id': user.id, 'in_guild': False, - 'name': getattr(user, 'name', 'Name unknown'), + 'name': user.name, 'roles': [] } @@ -75,7 +73,7 @@ async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]: async def post_infraction( ctx: Context, - user: UserSnowflake, + user: MemberOrUser, infr_type: str, reason: str, expires_at: datetime = None, @@ -85,7 +83,7 @@ async def post_infraction( """Posts an infraction to the API.""" if isinstance(user, (discord.Member, discord.User)) and user.bot: log.trace(f"Posting of {infr_type} infraction for {user} to the API aborted. User is a bot.") - raise InvalidInfractedUser(user) + raise InvalidInfractedUserError(user) log.trace(f"Posting {infr_type} infraction for {user} to the API.") @@ -118,7 +116,7 @@ async def post_infraction( async def get_active_infraction( ctx: Context, - user: UserSnowflake, + user: MemberOrUser, infr_type: str, send_msg: bool = True ) -> t.Optional[dict]: @@ -143,17 +141,22 @@ async def get_active_infraction( # Checks to see if the moderator should be told there is an active infraction if send_msg: log.trace(f"{user} has active infractions of type {infr_type}.") - await ctx.send( - f":x: According to my records, this user already has a {infr_type} infraction. " - f"See infraction **#{active_infractions[0]['id']}**." - ) + await send_active_infraction_message(ctx, active_infractions[0]) return active_infractions[0] else: log.trace(f"{user} does not have active infractions of type {infr_type}.") +async def send_active_infraction_message(ctx: Context, infraction: Infraction) -> None: + """Send a message stating that the given infraction is active.""" + await ctx.send( + f":x: According to my records, this user already has a {infraction['type']} infraction. " + f"See infraction **#{infraction['id']}**." + ) + + async def notify_infraction( - user: UserObject, + user: MemberOrUser, infr_type: str, expires_at: t.Optional[str] = None, reason: t.Optional[str] = None, @@ -164,13 +167,15 @@ async def notify_infraction( text = INFRACTION_DESCRIPTION_TEMPLATE.format( type=infr_type.title(), - expires=f"{expires_at} UTC" if expires_at else "N/A", + expires=expires_at or "N/A", reason=reason or "No reason provided." ) # For case when other fields than reason is too long and this reach limit, then force-shorten string - if len(text) > 2048: - text = f"{text[:2045]}..." + if len(text) > 4096 - LONGEST_EXTRAS: + text = f"{text[:4093-LONGEST_EXTRAS]}..." + + text += INFRACTION_APPEAL_SERVER_FOOTER if infr_type.lower() == 'ban' else INFRACTION_APPEAL_MODMAIL_FOOTER embed = discord.Embed( description=text, @@ -181,15 +186,11 @@ async def notify_infraction( embed.title = INFRACTION_TITLE embed.url = RULES_URL - embed.set_footer( - text=INFRACTION_APPEAL_EMAIL_FOOTER if infr_type == 'Ban' else INFRACTION_APPEAL_MODMAIL_FOOTER - ) - return await send_private_embed(user, embed) async def notify_pardon( - user: UserObject, + user: MemberOrUser, title: str, content: str, icon_url: str = Icons.user_verified @@ -207,7 +208,7 @@ async def notify_pardon( return await send_private_embed(user, embed) -async def send_private_embed(user: UserObject, embed: discord.Embed) -> bool: +async def send_private_embed(user: MemberOrUser, embed: discord.Embed) -> bool: """ A helper method for sending an embed to a user's DMs. diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index f19323c7c..e495a94b3 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -1,4 +1,3 @@ -import logging import textwrap import typing as t @@ -10,14 +9,15 @@ from discord.ext.commands import Context, command from bot import constants from bot.bot import Bot from bot.constants import Event -from bot.converters import Duration, Expiry, FetchedMember +from bot.converters import Duration, Expiry, MemberOrUser, UnambiguousMemberOrUser from bot.decorators import respect_role_hierarchy from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler -from bot.exts.moderation.infraction._utils import UserSnowflake +from bot.log import get_logger +from bot.utils.members import get_or_fetch_member from bot.utils.messages import format_user -log = logging.getLogger(__name__) +log = get_logger(__name__) class Infractions(InfractionScheduler, commands.Cog): @@ -54,7 +54,7 @@ class Infractions(InfractionScheduler, commands.Cog): # region: Permanent infractions @command() - async def warn(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: + async def warn(self, ctx: Context, user: UnambiguousMemberOrUser, *, reason: t.Optional[str] = None) -> None: """Warn a user for the given reason.""" if not isinstance(user, Member): await ctx.send(":x: The user doesn't appear to be on the server.") @@ -67,7 +67,7 @@ class Infractions(InfractionScheduler, commands.Cog): await self.apply_infraction(ctx, infraction, user) @command() - async def kick(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: + async def kick(self, ctx: Context, user: UnambiguousMemberOrUser, *, reason: t.Optional[str] = None) -> None: """Kick a user for the given reason.""" if not isinstance(user, Member): await ctx.send(":x: The user doesn't appear to be on the server.") @@ -79,7 +79,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def ban( self, ctx: Context, - user: FetchedMember, + user: UnambiguousMemberOrUser, duration: t.Optional[Expiry] = None, *, reason: t.Optional[str] = None @@ -95,7 +95,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def purgeban( self, ctx: Context, - user: FetchedMember, + user: UnambiguousMemberOrUser, duration: t.Optional[Expiry] = None, *, reason: t.Optional[str] = None @@ -111,7 +111,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def voiceban( self, ctx: Context, - user: FetchedMember, + user: UnambiguousMemberOrUser, duration: t.Optional[Expiry] = None, *, reason: t.Optional[str] @@ -129,7 +129,7 @@ class Infractions(InfractionScheduler, commands.Cog): @command(aliases=["mute"]) async def tempmute( self, ctx: Context, - user: FetchedMember, + user: UnambiguousMemberOrUser, duration: t.Optional[Expiry] = None, *, reason: t.Optional[str] = None @@ -163,7 +163,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def tempban( self, ctx: Context, - user: FetchedMember, + user: UnambiguousMemberOrUser, duration: Expiry, *, reason: t.Optional[str] = None @@ -189,7 +189,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def tempvoiceban( self, ctx: Context, - user: FetchedMember, + user: UnambiguousMemberOrUser, duration: Expiry, *, reason: t.Optional[str] @@ -215,7 +215,7 @@ class Infractions(InfractionScheduler, commands.Cog): # region: Permanent shadow infractions @command(hidden=True) - async def note(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: + async def note(self, ctx: Context, user: UnambiguousMemberOrUser, *, reason: t.Optional[str] = None) -> None: """Create a private note for a user with the given reason without notifying the user.""" infraction = await _utils.post_infraction(ctx, user, "note", reason, hidden=True, active=False) if infraction is None: @@ -224,7 +224,7 @@ class Infractions(InfractionScheduler, commands.Cog): await self.apply_infraction(ctx, infraction, user) @command(hidden=True, aliases=['shadowban', 'sban']) - async def shadow_ban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: + async def shadow_ban(self, ctx: Context, user: UnambiguousMemberOrUser, *, reason: t.Optional[str] = None) -> None: """Permanently ban a user for the given reason without notifying the user.""" await self.apply_ban(ctx, user, reason, hidden=True) @@ -235,7 +235,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def shadow_tempban( self, ctx: Context, - user: FetchedMember, + user: UnambiguousMemberOrUser, duration: Expiry, *, reason: t.Optional[str] = None @@ -261,17 +261,17 @@ class Infractions(InfractionScheduler, commands.Cog): # region: Remove infractions (un- commands) @command() - async def unmute(self, ctx: Context, user: FetchedMember) -> None: + async def unmute(self, ctx: Context, user: UnambiguousMemberOrUser) -> None: """Prematurely end the active mute infraction for the user.""" await self.pardon_infraction(ctx, "mute", user) @command() - async def unban(self, ctx: Context, user: FetchedMember) -> None: + async def unban(self, ctx: Context, user: UnambiguousMemberOrUser) -> None: """Prematurely end the active ban infraction for the user.""" await self.pardon_infraction(ctx, "ban", user) @command(aliases=("uvban",)) - async def unvoiceban(self, ctx: Context, user: FetchedMember) -> None: + async def unvoiceban(self, ctx: Context, user: UnambiguousMemberOrUser) -> None: """Prematurely end the active voice ban infraction for the user.""" await self.pardon_infraction(ctx, "voice_ban", user) @@ -280,8 +280,19 @@ class Infractions(InfractionScheduler, commands.Cog): async def apply_mute(self, ctx: Context, user: Member, reason: t.Optional[str], **kwargs) -> None: """Apply a mute infraction with kwargs passed to `post_infraction`.""" - if await _utils.get_active_infraction(ctx, user, "mute"): - return + if active := await _utils.get_active_infraction(ctx, user, "mute", send_msg=False): + if active["actor"] != self.bot.user.id: + await _utils.send_active_infraction_message(ctx, active) + return + + # Allow the current mute attempt to override an automatically triggered mute. + log_text = await self.deactivate_infraction(active, notify=False) + if "Failure" in log_text: + await ctx.send( + f":x: can't override infraction **mute** for {user.mention}: " + f"failed to deactivate. {log_text['Failure']}" + ) + return infraction = await _utils.post_infraction(ctx, user, "mute", reason, active=True, **kwargs) if infraction is None: @@ -304,6 +315,10 @@ class Infractions(InfractionScheduler, commands.Cog): @respect_role_hierarchy(member_arg=2) async def apply_kick(self, ctx: Context, user: Member, reason: t.Optional[str], **kwargs) -> None: """Apply a kick infraction with kwargs passed to `post_infraction`.""" + if user.top_role >= ctx.me.top_role: + await ctx.send(":x: I can't kick users above or equal to me in the role hierarchy.") + return + infraction = await _utils.post_infraction(ctx, user, "kick", reason, active=False, **kwargs) if infraction is None: return @@ -320,7 +335,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def apply_ban( self, ctx: Context, - user: UserSnowflake, + user: MemberOrUser, reason: t.Optional[str], purge_days: t.Optional[int] = 0, **kwargs @@ -330,6 +345,10 @@ class Infractions(InfractionScheduler, commands.Cog): Will also remove the banned user from the Big Brother watch list if applicable. """ + if isinstance(user, Member) and user.top_role >= ctx.me.top_role: + await ctx.send(":x: I can't ban users above or equal to me in the role hierarchy.") + return + # In the case of a permanent ban, we don't need get_active_infractions to tell us if one is active is_temporary = kwargs.get("expires_at") is not None active_infraction = await _utils.get_active_infraction(ctx, user, "ban", is_temporary) @@ -345,7 +364,7 @@ class Infractions(InfractionScheduler, commands.Cog): return log.trace("Old tempban is being replaced by new permaban.") - await self.pardon_infraction(ctx, "ban", user, is_temporary) + await self.pardon_infraction(ctx, "ban", user, send_msg=is_temporary) infraction = await _utils.post_infraction(ctx, user, "ban", reason, active=True, **kwargs) if infraction is None: @@ -376,7 +395,7 @@ class Infractions(InfractionScheduler, commands.Cog): await bb_cog.apply_unwatch(ctx, user, bb_reason, send_message=False) @respect_role_hierarchy(member_arg=2) - async def apply_voice_ban(self, ctx: Context, user: UserSnowflake, reason: t.Optional[str], **kwargs) -> None: + async def apply_voice_ban(self, ctx: Context, user: MemberOrUser, reason: t.Optional[str], **kwargs) -> None: """Apply a voice ban infraction with kwargs passed to `post_infraction`.""" if await _utils.get_active_infraction(ctx, user, "voice_ban"): return @@ -403,9 +422,16 @@ class Infractions(InfractionScheduler, commands.Cog): # endregion # region: Base pardon functions - async def pardon_mute(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]: - """Remove a user's muted role, DM them a notification, and return a log dict.""" - user = guild.get_member(user_id) + async def pardon_mute( + self, + user_id: int, + guild: discord.Guild, + reason: t.Optional[str], + *, + notify: bool = True + ) -> t.Dict[str, str]: + """Remove a user's muted role, optionally DM them a notification, and return a log dict.""" + user = await get_or_fetch_member(guild, user_id) log_text = {} if user: @@ -413,16 +439,17 @@ class Infractions(InfractionScheduler, commands.Cog): self.mod_log.ignore(Event.member_update, user.id) await user.remove_roles(self._muted_role, reason=reason) - # DM the user about the expiration. - notified = await _utils.notify_pardon( - user=user, - title="You have been unmuted", - content="You may now send messages in the server.", - icon_url=_utils.INFRACTION_ICONS["mute"][1] - ) + if notify: + # DM the user about the expiration. + notified = await _utils.notify_pardon( + user=user, + title="You have been unmuted", + content="You may now send messages in the server.", + icon_url=_utils.INFRACTION_ICONS["mute"][1] + ) + log_text["DM"] = "Sent" if notified else "**Failed**" log_text["Member"] = format_user(user) - log_text["DM"] = "Sent" if notified else "**Failed**" else: log.info(f"Failed to unmute user {user_id}: user not found") log_text["Failure"] = "User was not found in the guild." @@ -444,31 +471,39 @@ class Infractions(InfractionScheduler, commands.Cog): return log_text - async def pardon_voice_ban(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]: - """Add Voice Verified role back to user, DM them a notification, and return a log dict.""" - user = guild.get_member(user_id) + async def pardon_voice_ban( + self, + user_id: int, + guild: discord.Guild, + *, + notify: bool = True + ) -> t.Dict[str, str]: + """Optionally DM the user a pardon notification and return a log dict.""" + user = await get_or_fetch_member(guild, user_id) log_text = {} if user: - # DM user about infraction expiration - notified = await _utils.notify_pardon( - user=user, - title="Voice ban ended", - content="You have been unbanned and can verify yourself again in the server.", - icon_url=_utils.INFRACTION_ICONS["voice_ban"][1] - ) + if notify: + # DM user about infraction expiration + notified = await _utils.notify_pardon( + user=user, + title="Voice ban ended", + content="You have been unbanned and can verify yourself again in the server.", + icon_url=_utils.INFRACTION_ICONS["voice_ban"][1] + ) + log_text["DM"] = "Sent" if notified else "**Failed**" log_text["Member"] = format_user(user) - log_text["DM"] = "Sent" if notified else "**Failed**" else: log_text["Info"] = "User was not found in the guild." return log_text - async def _pardon_action(self, infraction: _utils.Infraction) -> t.Optional[t.Dict[str, str]]: + async def _pardon_action(self, infraction: _utils.Infraction, notify: bool) -> t.Optional[t.Dict[str, str]]: """ Execute deactivation steps specific to the infraction's type and return a log dict. + If `notify` is True, notify the user of the pardon via DM where applicable. If an infraction type is unsupported, return None instead. """ guild = self.bot.get_guild(constants.Guild.id) @@ -476,11 +511,11 @@ class Infractions(InfractionScheduler, commands.Cog): reason = f"Infraction #{infraction['id']} expired or was pardoned." if infraction["type"] == "mute": - return await self.pardon_mute(user_id, guild, reason) + return await self.pardon_mute(user_id, guild, reason, notify=notify) elif infraction["type"] == "ban": return await self.pardon_ban(user_id, guild, reason) elif infraction["type"] == "voice_ban": - return await self.pardon_voice_ban(user_id, guild, reason) + return await self.pardon_voice_ban(user_id, guild, notify=notify) # endregion @@ -493,7 +528,7 @@ class Infractions(InfractionScheduler, commands.Cog): async def cog_command_error(self, ctx: Context, error: Exception) -> None: """Send a notification to the invoking context on a Union failure.""" if isinstance(error, commands.BadUnionArgument): - if discord.User in error.converters or discord.Member in error.converters: + if discord.User in error.converters or Member in error.converters: await ctx.send(str(error.errors[0])) error.handled = True diff --git a/bot/exts/moderation/infraction/management.py b/bot/exts/moderation/infraction/management.py index b3783cd60..b1c8b64dc 100644 --- a/bot/exts/moderation/infraction/management.py +++ b/bot/exts/moderation/infraction/management.py @@ -1,23 +1,28 @@ -import logging import textwrap import typing as t from datetime import datetime +import dateutil.parser import discord +from dateutil.relativedelta import relativedelta from discord.ext import commands from discord.ext.commands import Context from discord.utils import escape_markdown from bot import constants from bot.bot import Bot -from bot.converters import Expiry, Infraction, Snowflake, UserMention, allowed_strings, proxy_user +from bot.converters import Expiry, Infraction, MemberOrUser, Snowflake, UnambiguousUser, allowed_strings +from bot.errors import InvalidInfraction from bot.exts.moderation.infraction.infractions import Infractions from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger from bot.pagination import LinePaginator from bot.utils import messages, time from bot.utils.channel import is_mod_channel +from bot.utils.members import get_or_fetch_member +from bot.utils.time import humanize_delta, until_expiration -log = logging.getLogger(__name__) +log = get_logger(__name__) class ModManagement(commands.Cog): @@ -41,9 +46,22 @@ class ModManagement(commands.Cog): # region: Edit infraction commands @commands.group(name='infraction', aliases=('infr', 'infractions', 'inf', 'i'), invoke_without_command=True) - async def infraction_group(self, ctx: Context) -> None: - """Infraction manipulation commands.""" - await ctx.send_help(ctx.command) + async def infraction_group(self, ctx: Context, infraction: Infraction = None) -> None: + """ + Infraction manipulation commands. + + If `infraction` is passed then this command fetches that infraction. The `Infraction` converter + supports 'l', 'last' and 'recent' to get the most recent infraction made by `ctx.author`. + """ + if infraction is None: + await ctx.send_help(ctx.command) + return + + embed = discord.Embed( + title=f"Infraction #{infraction['id']}", + colour=discord.Colour.orange() + ) + await self.send_infraction_list(ctx, embed, [infraction]) @infraction_group.command(name="append", aliases=("amend", "add", "a")) async def infraction_append( @@ -78,7 +96,7 @@ class ModManagement(commands.Cog): """ old_reason = infraction["reason"] - if old_reason is not None: + if old_reason is not None and reason is not None: add_period = not old_reason.endswith((".", "!", "?")) reason = old_reason + (". " if add_period else " ") + reason @@ -123,10 +141,11 @@ class ModManagement(commands.Cog): log_text = "" if duration is not None and not infraction['active']: - if reason is None: + if (infr_type := infraction['type']) in ('note', 'warning'): + await ctx.send(f":x: Cannot edit the expiration of a {infr_type}.") + else: await ctx.send(":x: Cannot edit the expiration of an expired infraction.") - return - confirm_messages.append("expiry unchanged (infraction already expired)") + return elif isinstance(duration, str): request_data['expires_at'] = None confirm_messages.append("marked as permanent") @@ -164,8 +183,8 @@ class ModManagement(commands.Cog): self.infractions_cog.schedule_expiration(new_infraction) log_text += f""" - Previous expiry: {infraction['expires_at'] or "Permanent"} - New expiry: {new_infraction['expires_at'] or "Permanent"} + Previous expiry: {until_expiration(infraction['expires_at']) or "Permanent"} + New expiry: {until_expiration(new_infraction['expires_at']) or "Permanent"} """.rstrip() changes = ' & '.join(confirm_messages) @@ -173,11 +192,11 @@ class ModManagement(commands.Cog): # Get information about the infraction's user user_id = new_infraction['user'] - user = ctx.guild.get_member(user_id) + user = await get_or_fetch_member(ctx.guild, user_id) if user: user_text = messages.format_user(user) - thumbnail = user.avatar_url_as(static_format="png") + thumbnail = user.display_avatar.url else: user_text = f"<@{user_id}>" thumbnail = None @@ -198,29 +217,34 @@ class ModManagement(commands.Cog): # region: Search infractions @infraction_group.group(name="search", aliases=('s',), invoke_without_command=True) - async def infraction_search_group(self, ctx: Context, query: t.Union[UserMention, Snowflake, str]) -> None: + async def infraction_search_group(self, ctx: Context, query: t.Union[UnambiguousUser, Snowflake, str]) -> None: """Searches for infractions in the database.""" if isinstance(query, int): await self.search_user(ctx, discord.Object(query)) - else: + elif isinstance(query, str): await self.search_reason(ctx, query) + else: + await self.search_user(ctx, query) - @infraction_search_group.command(name="user", aliases=("member", "id")) - async def search_user(self, ctx: Context, user: t.Union[discord.User, proxy_user]) -> None: + @infraction_search_group.command(name="user", aliases=("member", "userid")) + async def search_user(self, ctx: Context, user: t.Union[MemberOrUser, discord.Object]) -> None: """Search for infractions by member.""" infraction_list = await self.bot.api_client.get( 'bot/infractions/expanded', params={'user__id': str(user.id)} ) - user = self.bot.get_user(user.id) - if not user and infraction_list: - # Use the user data retrieved from the DB for the username. - user = infraction_list[0]["user"] - user = escape_markdown(user["name"]) + f"#{user['discriminator']:04}" + if isinstance(user, (discord.Member, discord.User)): + user_str = escape_markdown(str(user)) + else: + if infraction_list: + user = infraction_list[0]["user"] + user_str = escape_markdown(user["name"]) + f"#{user['discriminator']:04}" + else: + user_str = str(user.id) embed = discord.Embed( - title=f"Infractions for {user} ({len(infraction_list)} total)", + title=f"Infractions for {user_str} ({len(infraction_list)} total)", colour=discord.Colour.orange() ) await self.send_infraction_list(ctx, embed, infraction_list) @@ -288,10 +312,11 @@ class ModManagement(commands.Cog): remaining = "Inactive" if expires_at is None: - expires = "*Permanent*" + duration = "*Permanent*" else: - date_from = datetime.strptime(created, time.INFRACTION_FORMAT) - expires = time.format_infraction_with_duration(expires_at, date_from) + date_from = datetime.fromtimestamp(float(time.DISCORD_TIMESTAMP_REGEX.match(created).group(1))) + date_to = dateutil.parser.isoparse(expires_at).replace(tzinfo=None) + duration = humanize_delta(relativedelta(date_to, date_from)) lines = textwrap.dedent(f""" {"**===============**" if active else "==============="} @@ -300,8 +325,8 @@ class ModManagement(commands.Cog): Type: **{infraction["type"]}** Shadow: {infraction["hidden"]} Created: {created} - Expires: {expires} - Remaining: {remaining} + Expires: {remaining} + Duration: {duration} Actor: <@{infraction["actor"]["id"]}> ID: `{infraction["id"]}` Reason: {infraction["reason"] or "*None*"} @@ -322,13 +347,20 @@ class ModManagement(commands.Cog): return all(checks) # This cannot be static (must have a __func__ attribute). - async def cog_command_error(self, ctx: Context, error: Exception) -> None: - """Send a notification to the invoking context on a Union failure.""" + async def cog_command_error(self, ctx: Context, error: commands.CommandError) -> None: + """Handles errors for commands within this cog.""" if isinstance(error, commands.BadUnionArgument): if discord.User in error.converters: await ctx.send(str(error.errors[0])) error.handled = True + elif isinstance(error, InvalidInfraction): + if error.infraction_arg.isdigit(): + await ctx.send(f":x: Could not find an infraction with id `{error.infraction_arg}`.") + else: + await ctx.send(f":x: `{error.infraction_arg}` is not a valid integer infraction id.") + error.handled = True + def setup(bot: Bot) -> None: """Load the ModManagement cog.""" diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 07e79b9fe..08c92b8f3 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -1,5 +1,4 @@ import json -import logging import random import textwrap import typing as t @@ -14,10 +13,12 @@ from bot.bot import Bot from bot.converters import Duration, Expiry from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler +from bot.log import get_logger +from bot.utils.members import get_or_fetch_member from bot.utils.messages import format_user from bot.utils.time import format_infraction -log = logging.getLogger(__name__) +log = get_logger(__name__) NICKNAME_POLICY_URL = "https://pythondiscord.com/pages/rules/#nickname-policy" SUPERSTARIFY_DEFAULT_DURATION = "1h" @@ -132,6 +133,10 @@ class Superstarify(InfractionScheduler, Cog): An optional reason can be provided, which would be added to a message stating their old nickname and linking to the nickname policy. """ + if member.top_role >= ctx.me.top_role: + await ctx.send(":x: I can't starify users above or equal to me in the role hierarchy.") + return + if await _utils.get_active_infraction(ctx, member, "superstar"): return @@ -192,13 +197,13 @@ class Superstarify(InfractionScheduler, Cog): """Remove the superstarify infraction and allow the user to change their nickname.""" await self.pardon_infraction(ctx, "superstar", member) - async def _pardon_action(self, infraction: _utils.Infraction) -> t.Optional[t.Dict[str, str]]: - """Pardon a superstar infraction and return a log dict.""" + async def _pardon_action(self, infraction: _utils.Infraction, notify: bool) -> t.Optional[t.Dict[str, str]]: + """Pardon a superstar infraction, optionally notify the user via DM, and return a log dict.""" if infraction["type"] != "superstar": return guild = self.bot.get_guild(constants.Guild.id) - user = guild.get_member(infraction["user"]) + user = await get_or_fetch_member(guild, infraction["user"]) # Don't bother sending a notification if the user left the guild. if not user: @@ -208,18 +213,19 @@ class Superstarify(InfractionScheduler, Cog): ) return {} + log_text = {"Member": format_user(user)} + # DM the user about the expiration. - notified = await _utils.notify_pardon( - user=user, - title="You are no longer superstarified", - content="You may now change your nickname on the server.", - icon_url=_utils.INFRACTION_ICONS["superstar"][1] - ) + if notify: + notified = await _utils.notify_pardon( + user=user, + title="You are no longer superstarified", + content="You may now change your nickname on the server.", + icon_url=_utils.INFRACTION_ICONS["superstar"][1] + ) + log_text["DM"] = "Sent" if notified else "**Failed**" - return { - "Member": format_user(user), - "DM": "Sent" if notified else "**Failed**" - } + return log_text @staticmethod def get_nick(infraction_id: int, member_id: int) -> str: diff --git a/bot/exts/moderation/metabase.py b/bot/exts/moderation/metabase.py index db5f04d83..ce9c220b3 100644 --- a/bot/exts/moderation/metabase.py +++ b/bot/exts/moderation/metabase.py @@ -1,6 +1,5 @@ import csv import json -import logging from datetime import timedelta from io import StringIO from typing import Dict, List, Optional @@ -14,11 +13,12 @@ from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot from bot.constants import Metabase as MetabaseConfig, Roles from bot.converters import allowed_strings -from bot.utils import send_to_paste_service +from bot.log import get_logger +from bot.utils import scheduling, send_to_paste_service from bot.utils.channel import is_mod_channel from bot.utils.scheduling import Scheduler -log = logging.getLogger(__name__) +log = get_logger(__name__) BASE_HEADERS = { "Content-Type": "application/json" @@ -40,7 +40,26 @@ class Metabase(Cog): self.exports: Dict[int, List[Dict]] = {} # Saves the output of each question, so internal eval can access it - self.init_task = self.bot.loop.create_task(self.init_cog()) + self.init_task = scheduling.create_task(self.init_cog(), event_loop=self.bot.loop) + + async def cog_command_error(self, ctx: Context, error: Exception) -> None: + """Handle ClientResponseError errors locally to invalidate token if needed.""" + if not isinstance(error.original, ClientResponseError): + return + + if error.original.status == 403: + # User doesn't have access to the given question + log.warning(f"Failed to auth with Metabase for {error.original.url}.") + await ctx.send(f":x: {ctx.author.mention} Failed to auth with Metabase for that question.") + elif error.original.status == 404: + await ctx.send(f":x: {ctx.author.mention} That question could not be found.") + else: + # User credentials are invalid, or the refresh failed. + # Delete the expiry time, to force a refresh on next startup. + await self.session_info.delete("session_expiry") + log.exception("Session token is invalid or refresh failed.") + await ctx.send(f":x: {ctx.author.mention} Session token is invalid or refresh failed.") + error.handled = True async def init_cog(self) -> None: """Initialise the metabase session.""" @@ -65,7 +84,7 @@ class Metabase(Cog): "username": MetabaseConfig.username, "password": MetabaseConfig.password } - async with self.bot.http_session.post(f"{MetabaseConfig.url}/session", json=data) as resp: + async with self.bot.http_session.post(f"{MetabaseConfig.base_url}/api/session", json=data) as resp: json_data = await resp.json() self.session_token = json_data.get("id") @@ -86,7 +105,7 @@ class Metabase(Cog): """A group of commands for interacting with metabase.""" await ctx.send_help(ctx.command) - @metabase_group.command(name="extract") + @metabase_group.command(name="extract", aliases=("export",)) async def metabase_extract( self, ctx: Context, @@ -106,48 +125,50 @@ class Metabase(Cog): Valid extensions are: csv and json. """ - async with ctx.typing(): - - # Make sure we have a session token before running anything - await self.init_task - - url = f"{MetabaseConfig.url}/card/{question_id}/query/{extension}" - try: - async with self.bot.http_session.post(url, headers=self.headers, raise_for_status=True) as resp: - if extension == "csv": - out = await resp.text() - # Save the output for use with int e - self.exports[question_id] = list(csv.DictReader(StringIO(out))) - - elif extension == "json": - out = await resp.json() - # Save the output for use with int e - self.exports[question_id] = out - - # Format it nicely for human eyes - out = json.dumps(out, indent=4, sort_keys=True) - except ClientResponseError as e: - if e.status == 403: - # User doesn't have access to the given question - log.warning(f"Failed to auth with Metabase for question {question_id}.") - await ctx.send(f":x: {ctx.author.mention} Failed to auth with Metabase for that question.") - else: - # User credentials are invalid, or the refresh failed. - # Delete the expiry time, to force a refresh on next startup. - await self.session_info.delete("session_expiry") - log.exception("Session token is invalid or refresh failed.") - await ctx.send(f":x: {ctx.author.mention} Session token is invalid or refresh failed.") - return - - paste_link = await send_to_paste_service(out, extension=extension) - if paste_link: - message = f":+1: {ctx.author.mention} Here's your link: {paste_link}" - else: - message = f":x: {ctx.author.mention} Link service is unavailible." - await ctx.send( - f"{message}\nYou can also access this data within internal eval by doing: " - f"`bot.get_cog('Metabase').exports[{question_id}]`" - ) + await ctx.trigger_typing() + + # Make sure we have a session token before running anything + await self.init_task + + url = f"{MetabaseConfig.base_url}/api/card/{question_id}/query/{extension}" + + async with self.bot.http_session.post(url, headers=self.headers, raise_for_status=True) as resp: + if extension == "csv": + out = await resp.text(encoding="utf-8") + # Save the output for use with int e + self.exports[question_id] = list(csv.DictReader(StringIO(out))) + + elif extension == "json": + out = await resp.json(encoding="utf-8") + # Save the output for use with int e + self.exports[question_id] = out + + # Format it nicely for human eyes + out = json.dumps(out, indent=4, sort_keys=True) + + paste_link = await send_to_paste_service(out, extension=extension) + if paste_link: + message = f":+1: {ctx.author.mention} Here's your link: {paste_link}" + else: + message = f":x: {ctx.author.mention} Link service is unavailible." + await ctx.send( + f"{message}\nYou can also access this data within internal eval by doing: " + f"`bot.get_cog('Metabase').exports[{question_id}]`" + ) + + @metabase_group.command(name="publish", aliases=("share",)) + async def metabase_publish(self, ctx: Context, question_id: int) -> None: + """Publically shares the given question and posts the link.""" + await ctx.trigger_typing() + # Make sure we have a session token before running anything + await self.init_task + + url = f"{MetabaseConfig.base_url}/api/card/{question_id}/public_link" + + async with self.bot.http_session.post(url, headers=self.headers, raise_for_status=True) as resp: + response_json = await resp.json(encoding="utf-8") + sharing_url = f"{MetabaseConfig.public_url}/public/question/{response_json['uuid']}" + await ctx.send(f":+1: {ctx.author.mention} Here's your sharing link: {sharing_url}") # This cannot be static (must have a __func__ attribute). async def cog_check(self, ctx: Context) -> bool: diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index be65ade6e..b90480f0d 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -1,7 +1,6 @@ import asyncio import difflib import itertools -import logging import typing as t from datetime import datetime from itertools import zip_longest @@ -9,17 +8,18 @@ from itertools import zip_longest import discord from dateutil.relativedelta import relativedelta from deepdiff import DeepDiff -from discord import Colour +from discord import Colour, Message, Thread from discord.abc import GuildChannel from discord.ext.commands import Cog, Context from discord.utils import escape_markdown from bot.bot import Bot from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles, URLs +from bot.log import get_logger from bot.utils.messages import format_user from bot.utils.time import humanize_delta -log = logging.getLogger(__name__) +log = get_logger(__name__) GUILD_CHANNEL = t.Union[discord.CategoryChannel, discord.TextChannel, discord.VoiceChannel] @@ -99,7 +99,7 @@ class ModLog(Cog, name="ModLog"): """Generate log embed and send to logging channel.""" # Truncate string directly here to avoid removing newlines embed = discord.Embed( - description=text[:2045] + "..." if len(text) > 2048 else text + description=text[:4093] + "..." if len(text) > 4096 else text ) if title and icon_url: @@ -378,7 +378,7 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.guild_update, Colour.blurple(), "Guild updated", message, - thumbnail=after.icon_url_as(format="png") + thumbnail=after.icon.with_static_format("png") ) @Cog.listener() @@ -394,7 +394,7 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.user_ban, Colours.soft_red, "User banned", format_user(member), - thumbnail=member.avatar_url_as(static_format="png"), + thumbnail=member.display_avatar.url, channel_id=Channels.user_log ) @@ -405,7 +405,7 @@ class ModLog(Cog, name="ModLog"): return now = datetime.utcnow() - difference = abs(relativedelta(now, member.created_at)) + difference = abs(relativedelta(now, member.created_at.replace(tzinfo=None))) message = format_user(member) + "\n\n**Account age:** " + humanize_delta(difference) @@ -415,7 +415,7 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.sign_in, Colours.soft_green, "User joined", message, - thumbnail=member.avatar_url_as(static_format="png"), + thumbnail=member.display_avatar.url, channel_id=Channels.user_log ) @@ -432,7 +432,7 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.sign_out, Colours.soft_red, "User left", format_user(member), - thumbnail=member.avatar_url_as(static_format="png"), + thumbnail=member.display_avatar.url, channel_id=Channels.user_log ) @@ -449,7 +449,7 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.user_unban, Colour.blurple(), "User unbanned", format_user(member), - thumbnail=member.avatar_url_as(static_format="png"), + thumbnail=member.display_avatar.url, channel_id=Channels.mod_log ) @@ -515,21 +515,50 @@ class ModLog(Cog, name="ModLog"): colour=Colour.blurple(), title="Member updated", text=message, - thumbnail=after.avatar_url_as(static_format="png"), + thumbnail=after.display_avatar.url, channel_id=Channels.user_log ) + def is_message_blacklisted(self, message: Message) -> bool: + """Return true if the message is in a blacklisted thread or channel.""" + # Ignore bots or DMs + if message.author.bot or not message.guild: + return True + + return self.is_channel_ignored(message.channel.id) + + def is_channel_ignored(self, channel_id: int) -> bool: + """ + Return true if the channel, or parent channel in the case of threads, passed should be ignored by modlog. + + Currently ignored channels are: + 1. Channels not in the guild we care about (constants.Guild.id). + 2. Channels that mods do not have view permissions to + 3. Channels in constants.Guild.modlog_blacklist + """ + channel = self.bot.get_channel(channel_id) + + # Ignore not found channels, DMs, and messages outside of the main guild. + if not channel or not hasattr(channel, "guild") or channel.guild.id != GuildConstant.id: + return True + + # Look at the parent channel of a thread. + if isinstance(channel, Thread): + channel = channel.parent + + # Mod team doesn't have view permission to the channel. + if not channel.permissions_for(channel.guild.get_role(Roles.mod_team)).view_channel: + return True + + return channel.id in GuildConstant.modlog_blacklist + @Cog.listener() async def on_message_delete(self, message: discord.Message) -> None: """Log message delete event to message change log.""" channel = message.channel author = message.author - # Ignore DMs. - if not message.guild: - return - - if message.guild.id != GuildConstant.id or channel.id in GuildConstant.modlog_blacklist: + if self.is_message_blacklisted(message): return self._cached_deletes.append(message.id) @@ -564,7 +593,7 @@ class ModLog(Cog, name="ModLog"): # Shorten the message content if necessary content = message.clean_content - remaining_chars = 2040 - len(response) + remaining_chars = 4090 - len(response) if len(content) > remaining_chars: botlog_url = await self.upload_log(messages=[message], actor_id=message.author.id) @@ -584,7 +613,7 @@ class ModLog(Cog, name="ModLog"): @Cog.listener() async def on_raw_message_delete(self, event: discord.RawMessageDeleteEvent) -> None: """Log raw message delete event to message change log.""" - if event.guild_id != GuildConstant.id or event.channel_id in GuildConstant.modlog_blacklist: + if self.is_channel_ignored(event.channel_id): return await asyncio.sleep(1) # Wait here in case the normal event was fired @@ -625,12 +654,7 @@ class ModLog(Cog, name="ModLog"): @Cog.listener() async def on_message_edit(self, msg_before: discord.Message, msg_after: discord.Message) -> None: """Log message edit event to message change log.""" - if ( - not msg_before.guild - or msg_before.guild.id != GuildConstant.id - or msg_before.channel.id in GuildConstant.modlog_blacklist - or msg_before.author.bot - ): + if self.is_message_blacklisted(msg_before): return self._cached_edits.append(msg_before.id) @@ -707,12 +731,7 @@ class ModLog(Cog, name="ModLog"): except discord.NotFound: # Was deleted before we got the event return - if ( - not message.guild - or message.guild.id != GuildConstant.id - or message.channel.id in GuildConstant.modlog_blacklist - or message.author.bot - ): + if self.is_message_blacklisted(message): return await asyncio.sleep(1) # Wait here in case the normal event was fired @@ -752,6 +771,64 @@ class ModLog(Cog, name="ModLog"): ) @Cog.listener() + async def on_thread_update(self, before: Thread, after: Thread) -> None: + """Log thread archiving, un-archiving and name edits.""" + if before.name != after.name: + await self.send_log_message( + Icons.hash_blurple, + Colour.blurple(), + "Thread name edited", + ( + f"Thread {after.mention} (`{after.id}`) from {after.parent.mention} (`{after.parent.id}`): " + f"`{before.name}` -> `{after.name}`" + ) + ) + return + + if not before.archived and after.archived: + colour = Colour.red() + action = "archived" + icon = Icons.hash_red + elif before.archived and not after.archived: + colour = Colour.green() + action = "un-archived" + icon = Icons.hash_green + else: + return + + await self.send_log_message( + icon, + colour, + f"Thread {action}", + f"Thread {after.mention} (`{after.id}`) from {after.parent.mention} (`{after.parent.id}`) was {action}" + ) + + @Cog.listener() + async def on_thread_delete(self, thread: Thread) -> None: + """Log thread deletion.""" + await self.send_log_message( + Icons.hash_red, + Colour.red(), + "Thread deleted", + f"Thread {thread.mention} (`{thread.id}`) from {thread.parent.mention} (`{thread.parent.id}`) deleted" + ) + + @Cog.listener() + async def on_thread_join(self, thread: Thread) -> None: + """Log thread creation.""" + # If we are in the thread already we can most probably assume we already logged it? + # We don't really have a better way of doing this since the API doesn't make any difference between the two + if thread.me: + return + + await self.send_log_message( + Icons.hash_green, + Colour.green(), + "Thread created", + f"Thread {thread.mention} (`{thread.id}`) from {thread.parent.mention} (`{thread.parent.id}`) created" + ) + + @Cog.listener() async def on_voice_state_update( self, member: discord.Member, @@ -761,7 +838,8 @@ class ModLog(Cog, name="ModLog"): """Log member voice state changes to the voice log channel.""" if ( member.guild.id != GuildConstant.id - or (before.channel and before.channel.id in GuildConstant.modlog_blacklist) + or (before.channel and self.is_channel_ignored(before.channel.id)) + or (after.channel and self.is_channel_ignored(after.channel.id)) ): return @@ -820,7 +898,7 @@ class ModLog(Cog, name="ModLog"): colour=colour, title="Voice state updated", text=message, - thumbnail=member.avatar_url_as(static_format="png"), + thumbnail=member.display_avatar.url, channel_id=Channels.voice_log ) diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py index 1f6b7984a..6cc46ad26 100644 --- a/bot/exts/moderation/modpings.py +++ b/bot/exts/moderation/modpings.py @@ -1,6 +1,5 @@ import asyncio import datetime -import logging from async_rediscache import RedisCache from dateutil.parser import isoparse, parse @@ -10,9 +9,11 @@ from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot from bot.constants import Colours, Emojis, Guild, Icons, MODERATION_ROLES, Roles from bot.converters import Expiry -from bot.utils.scheduling import Scheduler, create_task +from bot.log import get_logger +from bot.utils import scheduling +from bot.utils.scheduling import Scheduler -log = logging.getLogger(__name__) +log = get_logger(__name__) MAXIMUM_WORK_LIMIT = 16 @@ -38,8 +39,15 @@ class ModPings(Cog): self.guild = None self.moderators_role = None - self.reschedule_task = self.bot.loop.create_task(self.reschedule_roles(), name="mod-pings-reschedule") - self.modpings_schedule_task = create_task(self.reschedule_modpings_schedule(), event_loop=self.bot.loop) + self.modpings_schedule_task = scheduling.create_task( + self.reschedule_modpings_schedule(), + event_loop=self.bot.loop + ) + self.reschedule_task = scheduling.create_task( + self.reschedule_roles(), + name="mod-pings-reschedule", + event_loop=self.bot.loop, + ) async def reschedule_roles(self) -> None: """Reschedule moderators role re-apply times.""" @@ -54,7 +62,7 @@ class ModPings(Cog): log.trace("Applying the moderators role to the mod team where necessary.") for mod in mod_team.members: if mod in pings_on: # Make sure that on-duty mods aren't in the cache. - if mod in pings_off: + if mod.id in pings_off: await self.pings_off_mods.delete(mod.id) continue @@ -116,6 +124,7 @@ class ModPings(Cog): """Reapply the moderator's role to the given moderator.""" log.trace(f"Re-applying role to mod with ID {mod.id}.") await mod.add_roles(self.moderators_role, reason="Pings off period expired.") + await self.pings_off_mods.delete(mod.id) @group(name='modpings', aliases=('modping',), invoke_without_command=True) @has_any_role(*MODERATION_ROLES) @@ -143,7 +152,6 @@ class ModPings(Cog): The duration cannot be longer than 30 days. """ - duration: datetime.datetime delta = duration - datetime.datetime.utcnow() if delta > datetime.timedelta(days=30): await ctx.send(":x: Cannot remove the role for longer than 30 days.") diff --git a/bot/exts/moderation/silence.py b/bot/exts/moderation/silence.py index 2a7ca932e..511520252 100644 --- a/bot/exts/moderation/silence.py +++ b/bot/exts/moderation/silence.py @@ -1,47 +1,68 @@ import json -import logging +import typing from contextlib import suppress from datetime import datetime, timedelta, timezone -from operator import attrgetter -from typing import Optional +from typing import Optional, OrderedDict, Union from async_rediscache import RedisCache -from discord import TextChannel +from discord import Guild, PermissionOverwrite, TextChannel, Thread, VoiceChannel from discord.ext import commands, tasks from discord.ext.commands import Context +from discord.utils import MISSING +from bot import constants from bot.bot import Bot -from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, Roles from bot.converters import HushDurationConverter -from bot.utils.lock import LockedResourceError, lock_arg +from bot.log import get_logger +from bot.utils import scheduling +from bot.utils.lock import LockedResourceError, lock, lock_arg from bot.utils.scheduling import Scheduler -log = logging.getLogger(__name__) +log = get_logger(__name__) LOCK_NAMESPACE = "silence" -MSG_SILENCE_FAIL = f"{Emojis.cross_mark} current channel is already silenced." -MSG_SILENCE_PERMANENT = f"{Emojis.check_mark} silenced current channel indefinitely." -MSG_SILENCE_SUCCESS = f"{Emojis.check_mark} silenced current channel for {{duration}} minute(s)." +MSG_SILENCE_FAIL = f"{constants.Emojis.cross_mark} {{channel}} is already silenced." +MSG_SILENCE_PERMANENT = f"{constants.Emojis.check_mark} silenced {{channel}} indefinitely." +MSG_SILENCE_SUCCESS = f"{constants.Emojis.check_mark} silenced {{{{channel}}}} for {{duration}} minute(s)." -MSG_UNSILENCE_FAIL = f"{Emojis.cross_mark} current channel was not silenced." +MSG_UNSILENCE_FAIL = f"{constants.Emojis.cross_mark} {{channel}} was not silenced." MSG_UNSILENCE_MANUAL = ( - f"{Emojis.cross_mark} current channel was not unsilenced because the current overwrites were " + f"{constants.Emojis.cross_mark} {{channel}} was not unsilenced because the current overwrites were " f"set manually or the cache was prematurely cleared. " f"Please edit the overwrites manually to unsilence." ) -MSG_UNSILENCE_SUCCESS = f"{Emojis.check_mark} unsilenced current channel." +MSG_UNSILENCE_SUCCESS = f"{constants.Emojis.check_mark} unsilenced {{channel}}." + +TextOrVoiceChannel = Union[TextChannel, VoiceChannel] + +VOICE_CHANNELS = { + constants.Channels.code_help_voice_0: constants.Channels.code_help_chat_0, + constants.Channels.code_help_voice_1: constants.Channels.code_help_chat_1, + constants.Channels.general_voice_0: constants.Channels.voice_chat_0, + constants.Channels.general_voice_1: constants.Channels.voice_chat_1, + constants.Channels.staff_voice: constants.Channels.staff_voice_chat, +} class SilenceNotifier(tasks.Loop): """Loop notifier for posting notices to `alert_channel` containing added channels.""" def __init__(self, alert_channel: TextChannel): - super().__init__(self._notifier, seconds=1, minutes=0, hours=0, count=None, reconnect=True, loop=None) + super().__init__( + self._notifier, + seconds=1, + minutes=0, + hours=0, + count=None, + reconnect=True, + loop=None, + time=MISSING + ) self._silenced_channels = {} self._alert_channel = alert_channel - def add_channel(self, channel: TextChannel) -> None: + def add_channel(self, channel: TextOrVoiceChannel) -> None: """Add channel to `_silenced_channels` and start loop if not launched.""" if not self._silenced_channels: self.start() @@ -68,7 +89,15 @@ class SilenceNotifier(tasks.Loop): f"{channel.mention} for {(self._current_loop-start)//60} min" for channel, start in self._silenced_channels.items() ) - await self._alert_channel.send(f"<@&{Roles.moderators}> currently silenced channels: {channels_text}") + await self._alert_channel.send( + f"<@&{constants.Roles.moderators}> currently silenced channels: {channels_text}" + ) + + +async def _select_lock_channel(args: OrderedDict[str, any]) -> TextOrVoiceChannel: + """Passes the channel to be silenced to the resource lock.""" + channel, _ = Silence.parse_silence_args(args["ctx"], args["duration_or_channel"], args["duration"]) + return channel class Silence(commands.Cog): @@ -86,94 +115,208 @@ class Silence(commands.Cog): self.bot = bot self.scheduler = Scheduler(self.__class__.__name__) - self._init_task = self.bot.loop.create_task(self._async_init()) + self._init_task = scheduling.create_task(self._async_init(), event_loop=self.bot.loop) async def _async_init(self) -> None: """Set instance attributes once the guild is available and reschedule unsilences.""" await self.bot.wait_until_guild_available() - guild = self.bot.get_guild(Guild.id) + guild = self.bot.get_guild(constants.Guild.id) + self._everyone_role = guild.default_role - self._mod_alerts_channel = self.bot.get_channel(Channels.mod_alerts) - self.notifier = SilenceNotifier(self.bot.get_channel(Channels.mod_log)) + self._verified_voice_role = guild.get_role(constants.Roles.voice_verified) + + self._mod_alerts_channel = self.bot.get_channel(constants.Channels.mod_alerts) + + self.notifier = SilenceNotifier(self.bot.get_channel(constants.Channels.mod_log)) await self._reschedule() + async def send_message( + self, + message: str, + source_channel: TextChannel, + target_channel: TextOrVoiceChannel, + *, + alert_target: bool = False + ) -> None: + """Helper function to send message confirmation to `source_channel`, and notification to `target_channel`.""" + # Reply to invocation channel + source_reply = message + if source_channel != target_channel: + source_reply = source_reply.format(channel=target_channel.mention) + else: + source_reply = source_reply.format(channel="current channel") + await source_channel.send(source_reply) + + # Reply to target channel + if alert_target: + if isinstance(target_channel, VoiceChannel): + voice_chat = self.bot.get_channel(VOICE_CHANNELS.get(target_channel.id)) + if voice_chat and source_channel != voice_chat: + await voice_chat.send(message.format(channel=target_channel.mention)) + + elif source_channel != target_channel: + await target_channel.send(message.format(channel="current channel")) + @commands.command(aliases=("hush",)) - @lock_arg(LOCK_NAMESPACE, "ctx", attrgetter("channel"), raise_error=True) - async def silence(self, ctx: Context, duration: HushDurationConverter = 10) -> None: + @lock(LOCK_NAMESPACE, _select_lock_channel, raise_error=True) + async def silence( + self, + ctx: Context, + duration_or_channel: typing.Union[TextOrVoiceChannel, HushDurationConverter] = None, + duration: HushDurationConverter = 10, + *, + kick: bool = False + ) -> None: """ Silence the current channel for `duration` minutes or `forever`. Duration is capped at 15 minutes, passing forever makes the silence indefinite. Indefinitely silenced channels get added to a notifier which posts notices every 15 minutes from the start. + + Passing a voice channel will attempt to move members out of the channel and back to force sync permissions. + If `kick` is True, members will not be added back to the voice channel, and members will be unable to rejoin. """ await self._init_task + channel, duration = self.parse_silence_args(ctx, duration_or_channel, duration) - channel_info = f"#{ctx.channel} ({ctx.channel.id})" + channel_info = f"#{channel} ({channel.id})" log.debug(f"{ctx.author} is silencing channel {channel_info}.") - if not await self._set_silence_overwrites(ctx.channel): + # Since threads don't have specific overrides, we cannot silence them individually. + # The parent channel has to be muted or the thread should be archived. + if isinstance(channel, Thread): + await ctx.send(":x: Threads cannot be silenced.") + return + + if not await self._set_silence_overwrites(channel, kick=kick): log.info(f"Tried to silence channel {channel_info} but the channel was already silenced.") - await ctx.send(MSG_SILENCE_FAIL) + await self.send_message(MSG_SILENCE_FAIL, ctx.channel, channel, alert_target=False) return - await self._schedule_unsilence(ctx, duration) + if isinstance(channel, VoiceChannel): + if kick: + await self._kick_voice_members(channel) + else: + await self._force_voice_sync(channel) + + await self._schedule_unsilence(ctx, channel, duration) if duration is None: - self.notifier.add_channel(ctx.channel) + self.notifier.add_channel(channel) log.info(f"Silenced {channel_info} indefinitely.") - await ctx.send(MSG_SILENCE_PERMANENT) + await self.send_message(MSG_SILENCE_PERMANENT, ctx.channel, channel, alert_target=True) + else: log.info(f"Silenced {channel_info} for {duration} minute(s).") - await ctx.send(MSG_SILENCE_SUCCESS.format(duration=duration)) - - @commands.command(aliases=("unhush",)) - async def unsilence(self, ctx: Context) -> None: - """ - Unsilence the current channel. - - If the channel was silenced indefinitely, notifications for the channel will stop. - """ - await self._init_task - log.debug(f"Unsilencing channel #{ctx.channel} from {ctx.author}'s command.") - await self._unsilence_wrapper(ctx.channel) - - @lock_arg(LOCK_NAMESPACE, "channel", raise_error=True) - async def _unsilence_wrapper(self, channel: TextChannel) -> None: - """Unsilence `channel` and send a success/failure message.""" - if not await self._unsilence(channel): - overwrite = channel.overwrites_for(self._everyone_role) - if overwrite.send_messages is False or overwrite.add_reactions is False: - await channel.send(MSG_UNSILENCE_MANUAL) + formatted_message = MSG_SILENCE_SUCCESS.format(duration=duration) + await self.send_message(formatted_message, ctx.channel, channel, alert_target=True) + + @staticmethod + def parse_silence_args( + ctx: Context, + duration_or_channel: typing.Union[TextOrVoiceChannel, int], + duration: HushDurationConverter + ) -> typing.Tuple[TextOrVoiceChannel, Optional[int]]: + """Helper method to parse the arguments of the silence command.""" + if duration_or_channel: + if isinstance(duration_or_channel, (TextChannel, VoiceChannel)): + channel = duration_or_channel else: - await channel.send(MSG_UNSILENCE_FAIL) + channel = ctx.channel + duration = duration_or_channel else: - await channel.send(MSG_UNSILENCE_SUCCESS) + channel = ctx.channel + + if duration == -1: + duration = None - async def _set_silence_overwrites(self, channel: TextChannel) -> bool: + return channel, duration + + async def _set_silence_overwrites(self, channel: TextOrVoiceChannel, *, kick: bool = False) -> bool: """Set silence permission overwrites for `channel` and return True if successful.""" - overwrite = channel.overwrites_for(self._everyone_role) - prev_overwrites = dict(send_messages=overwrite.send_messages, add_reactions=overwrite.add_reactions) + # Get the original channel overwrites + if isinstance(channel, TextChannel): + role = self._everyone_role + overwrite = channel.overwrites_for(role) + prev_overwrites = dict( + send_messages=overwrite.send_messages, + add_reactions=overwrite.add_reactions, + create_private_threads=overwrite.create_private_threads, + create_public_threads=overwrite.create_public_threads, + send_messages_in_threads=overwrite.send_messages_in_threads + ) + else: + role = self._verified_voice_role + overwrite = channel.overwrites_for(role) + prev_overwrites = dict(speak=overwrite.speak) + if kick: + prev_overwrites.update(connect=overwrite.connect) + + # Stop if channel was already silenced if channel.id in self.scheduler or all(val is False for val in prev_overwrites.values()): return False - overwrite.update(send_messages=False, add_reactions=False) - await channel.set_permissions(self._everyone_role, overwrite=overwrite) + # Set new permissions, store + overwrite.update(**dict.fromkeys(prev_overwrites, False)) + await channel.set_permissions(role, overwrite=overwrite) await self.previous_overwrites.set(channel.id, json.dumps(prev_overwrites)) return True - async def _schedule_unsilence(self, ctx: Context, duration: Optional[int]) -> None: + async def _schedule_unsilence(self, ctx: Context, channel: TextOrVoiceChannel, duration: Optional[int]) -> None: """Schedule `ctx.channel` to be unsilenced if `duration` is not None.""" if duration is None: - await self.unsilence_timestamps.set(ctx.channel.id, -1) + await self.unsilence_timestamps.set(channel.id, -1) else: - self.scheduler.schedule_later(duration * 60, ctx.channel.id, ctx.invoke(self.unsilence)) + self.scheduler.schedule_later(duration * 60, channel.id, ctx.invoke(self.unsilence, channel=channel)) unsilence_time = datetime.now(tz=timezone.utc) + timedelta(minutes=duration) - await self.unsilence_timestamps.set(ctx.channel.id, unsilence_time.timestamp()) + await self.unsilence_timestamps.set(channel.id, unsilence_time.timestamp()) - async def _unsilence(self, channel: TextChannel) -> bool: + @commands.command(aliases=("unhush",)) + async def unsilence(self, ctx: Context, *, channel: TextOrVoiceChannel = None) -> None: + """ + Unsilence the given channel if given, else the current one. + + If the channel was silenced indefinitely, notifications for the channel will stop. + """ + await self._init_task + if channel is None: + channel = ctx.channel + log.debug(f"Unsilencing channel #{channel} from {ctx.author}'s command.") + await self._unsilence_wrapper(channel, ctx) + + @lock_arg(LOCK_NAMESPACE, "channel", raise_error=True) + async def _unsilence_wrapper(self, channel: TextOrVoiceChannel, ctx: Optional[Context] = None) -> None: + """ + Unsilence `channel` and send a success/failure message to ctx.channel. + + If ctx is None or not passed, `channel` is used in its place. + If `channel` and ctx.channel are the same, only one message is sent. + """ + msg_channel = channel + if ctx is not None: + msg_channel = ctx.channel + + if not await self._unsilence(channel): + if isinstance(channel, VoiceChannel): + overwrite = channel.overwrites_for(self._verified_voice_role) + has_channel_overwrites = overwrite.speak is False + else: + overwrite = channel.overwrites_for(self._everyone_role) + has_channel_overwrites = overwrite.send_messages is False or overwrite.add_reactions is False + + # Send fail message to muted channel or voice chat channel, and invocation channel + if has_channel_overwrites: + await self.send_message(MSG_UNSILENCE_MANUAL, msg_channel, channel, alert_target=False) + else: + await self.send_message(MSG_UNSILENCE_FAIL, msg_channel, channel, alert_target=False) + + else: + await self.send_message(MSG_UNSILENCE_SUCCESS, msg_channel, channel, alert_target=True) + + async def _unsilence(self, channel: TextOrVoiceChannel) -> bool: """ Unsilence `channel`. @@ -183,19 +326,42 @@ class Silence(commands.Cog): Return `True` if channel permissions were changed, `False` otherwise. """ + # Get stored overwrites, and return if channel is unsilenced prev_overwrites = await self.previous_overwrites.get(channel.id) if channel.id not in self.scheduler and prev_overwrites is None: log.info(f"Tried to unsilence channel #{channel} ({channel.id}) but the channel was not silenced.") return False - overwrite = channel.overwrites_for(self._everyone_role) + # Select the role based on channel type, and get current overwrites + if isinstance(channel, TextChannel): + role = self._everyone_role + overwrite = channel.overwrites_for(role) + permissions = "`Send Messages` and `Add Reactions`" + else: + role = self._verified_voice_role + overwrite = channel.overwrites_for(role) + permissions = "`Speak` and `Connect`" + + # Check if old overwrites were not stored if prev_overwrites is None: log.info(f"Missing previous overwrites for #{channel} ({channel.id}); defaulting to None.") - overwrite.update(send_messages=None, add_reactions=None) + overwrite.update( + send_messages=None, + add_reactions=None, + create_private_threads=None, + create_public_threads=None, + send_messages_in_threads=None, + speak=None, + connect=None + ) else: overwrite.update(**json.loads(prev_overwrites)) - await channel.set_permissions(self._everyone_role, overwrite=overwrite) + # Update Permissions + await channel.set_permissions(role, overwrite=overwrite) + if isinstance(channel, VoiceChannel): + await self._force_voice_sync(channel) + log.info(f"Unsilenced channel #{channel} ({channel.id}).") self.scheduler.cancel(channel.id) @@ -203,15 +369,81 @@ class Silence(commands.Cog): await self.previous_overwrites.delete(channel.id) await self.unsilence_timestamps.delete(channel.id) + # Alert Admin team if old overwrites were not available if prev_overwrites is None: await self._mod_alerts_channel.send( - f"<@&{Roles.admins}> Restored overwrites with default values after unsilencing " - f"{channel.mention}. Please check that the `Send Messages` and `Add Reactions` " - f"overwrites for {self._everyone_role.mention} are at their desired values." + f"<@&{constants.Roles.admins}> Restored overwrites with default values after unsilencing " + f"{channel.mention}. Please check that the {permissions} " + f"overwrites for {role.mention} are at their desired values." ) return True + @staticmethod + async def _get_afk_channel(guild: Guild) -> VoiceChannel: + """Get a guild's AFK channel, or create one if it does not exist.""" + afk_channel = guild.afk_channel + + if afk_channel is None: + overwrites = { + guild.default_role: PermissionOverwrite(speak=False, connect=False, view_channel=False) + } + afk_channel = await guild.create_voice_channel("mute-temp", overwrites=overwrites) + log.info(f"Failed to get afk-channel, created #{afk_channel} ({afk_channel.id})") + + return afk_channel + + @staticmethod + async def _kick_voice_members(channel: VoiceChannel) -> None: + """Remove all non-staff members from a voice channel.""" + log.debug(f"Removing all non staff members from #{channel.name} ({channel.id}).") + + for member in channel.members: + # Skip staff + if any(role.id in constants.MODERATION_ROLES for role in member.roles): + continue + + try: + await member.move_to(None, reason="Kicking member from voice channel.") + log.trace(f"Kicked {member.name} from voice channel.") + except Exception as e: + log.debug(f"Failed to move {member.name}. Reason: {e}") + continue + + log.debug("Removed all members.") + + async def _force_voice_sync(self, channel: VoiceChannel) -> None: + """ + Move all non-staff members from `channel` to a temporary channel and back to force toggle role mute. + + Permission modification has to happen before this function. + """ + # Obtain temporary channel + delete_channel = channel.guild.afk_channel is None + afk_channel = await self._get_afk_channel(channel.guild) + + try: + # Move all members to temporary channel and back + for member in channel.members: + # Skip staff + if any(role.id in constants.MODERATION_ROLES for role in member.roles): + continue + + try: + await member.move_to(afk_channel, reason="Muting VC member.") + log.trace(f"Moved {member.name} to afk channel.") + + await member.move_to(channel, reason="Muting VC member.") + log.trace(f"Moved {member.name} to original voice channel.") + except Exception as e: + log.debug(f"Failed to move {member.name}. Reason: {e}") + continue + + finally: + # Delete VC channel if it was created. + if delete_channel: + await afk_channel.delete(reason="Deleting temporary mute channel.") + async def _reschedule(self) -> None: """Reschedule unsilencing of active silences and add permanent ones to the notifier.""" for channel_id, timestamp in await self.unsilence_timestamps.items(): @@ -247,7 +479,7 @@ class Silence(commands.Cog): # This cannot be static (must have a __func__ attribute). async def cog_check(self, ctx: Context) -> bool: """Only allow moderators to invoke the commands in this cog.""" - return await commands.has_any_role(*MODERATION_ROLES).predicate(ctx) + return await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx) def setup(bot: Bot) -> None: diff --git a/bot/exts/moderation/slowmode.py b/bot/exts/moderation/slowmode.py index d8baff76a..9583597e0 100644 --- a/bot/exts/moderation/slowmode.py +++ b/bot/exts/moderation/slowmode.py @@ -1,4 +1,3 @@ -import logging from typing import Optional from dateutil.relativedelta import relativedelta @@ -8,9 +7,10 @@ from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot from bot.constants import Channels, Emojis, MODERATION_ROLES from bot.converters import DurationDelta +from bot.log import get_logger from bot.utils import time -log = logging.getLogger(__name__) +log = get_logger(__name__) SLOWMODE_MAX_DELAY = 21600 # seconds diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py index fd856a7f4..99bbd8721 100644 --- a/bot/exts/moderation/stream.py +++ b/bot/exts/moderation/stream.py @@ -1,4 +1,3 @@ -import logging from datetime import timedelta, timezone from operator import itemgetter @@ -9,13 +8,17 @@ from async_rediscache import RedisCache from discord.ext import commands from bot.bot import Bot -from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission +from bot.constants import ( + Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_PARTNERS_COMMUNITY_ROLES, VideoPermission +) from bot.converters import Expiry +from bot.log import get_logger from bot.pagination import LinePaginator -from bot.utils.scheduling import Scheduler -from bot.utils.time import format_infraction_with_duration +from bot.utils import scheduling +from bot.utils.members import get_or_fetch_member +from bot.utils.time import discord_timestamp, format_infraction_with_duration -log = logging.getLogger(__name__) +log = get_logger(__name__) class Stream(commands.Cog): @@ -27,8 +30,8 @@ class Stream(commands.Cog): def __init__(self, bot: Bot): self.bot = bot - self.scheduler = Scheduler(self.__class__.__name__) - self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis()) + self.scheduler = scheduling.Scheduler(self.__class__.__name__) + self.reload_task = scheduling.create_task(self._reload_tasks_from_redis(), event_loop=self.bot.loop) def cog_unload(self) -> None: """Cancel all scheduled tasks.""" @@ -44,23 +47,17 @@ class Stream(commands.Cog): """Reload outstanding tasks from redis on startup, delete the task if the member has since left the server.""" await self.bot.wait_until_guild_available() items = await self.task_cache.items() + guild = self.bot.get_guild(Guild.id) for key, value in items: - member = self.bot.get_guild(Guild.id).get_member(key) + member = await get_or_fetch_member(guild, key) if not member: - # Member isn't found in the cache - try: - member = await self.bot.get_guild(Guild.id).fetch_member(key) - except discord.errors.NotFound: - log.debug( - f"Member {key} left the guild before we could schedule " - "the revoking of their streaming permissions." - ) - await self.task_cache.delete(key) - continue - except discord.HTTPException: - log.exception(f"Exception while trying to retrieve member {key} from Discord.") - continue + log.debug( + "User with ID %d left the guild before their streaming permissions could be revoked.", + key + ) + await self.task_cache.delete(key) + continue revoke_time = Arrow.utcfromtimestamp(value) log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}") @@ -134,16 +131,7 @@ class Stream(commands.Cog): await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") - # Use embed as embed timestamps do timezone conversions. - embed = discord.Embed( - description=f"{Emojis.check_mark} {member.mention} can now stream.", - colour=Colours.soft_green - ) - embed.set_footer(text=f"Streaming permission has been given to {member} until") - embed.timestamp = duration - - # Mention in content as mentions in embeds don't ping - await ctx.send(content=member.mention, embed=embed) + await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {discord_timestamp(duration)}.") # Convert here for nicer logging revoke_time = format_infraction_with_duration(str(duration)) @@ -202,17 +190,17 @@ class Stream(commands.Cog): @commands.command(aliases=('lstream',)) @commands.has_any_role(*MODERATION_ROLES) async def liststream(self, ctx: commands.Context) -> None: - """Lists all non-staff users who have permission to stream.""" - non_staff_members_with_stream = [ + """Lists all users who aren't staff, partners or members of the python community and have stream permissions.""" + non_staff_partners_community_members_with_stream = [ member for member in ctx.guild.get_role(Roles.video).members - if not any(role.id in STAFF_ROLES for role in member.roles) + if not any(role.id in STAFF_PARTNERS_COMMUNITY_ROLES for role in member.roles) ] # List of tuples (UtcPosixTimestamp, str) # So that the list can be sorted on the UtcPosixTimestamp before the message is passed to the paginator. streamer_info = [] - for member in non_staff_members_with_stream: + for member in non_staff_partners_community_members_with_stream: if revoke_time := await self.task_cache.get(member.id): # Member only has temporary streaming perms revoke_delta = Arrow.utcfromtimestamp(revoke_time).humanize() diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py index bfe9b74b4..ed5571d2a 100644 --- a/bot/exts/moderation/verification.py +++ b/bot/exts/moderation/verification.py @@ -1,4 +1,3 @@ -import logging import typing as t import discord @@ -7,9 +6,10 @@ from discord.ext.commands import Cog, Context, command, has_any_role from bot import constants from bot.bot import Bot from bot.decorators import in_whitelist +from bot.log import get_logger from bot.utils.checks import InWhitelistCheckFailure -log = logging.getLogger(__name__) +log = get_logger(__name__) # Sent via DMs once user joins the guild ON_JOIN_MESSAGE = """ diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py index 94b23a344..8fdc7c76b 100644 --- a/bot/exts/moderation/voice_gate.py +++ b/bot/exts/moderation/voice_gate.py @@ -1,5 +1,4 @@ import asyncio -import logging from contextlib import suppress from datetime import datetime, timedelta @@ -8,15 +7,15 @@ from async_rediscache import RedisCache from discord import Colour, Member, VoiceState from discord.ext.commands import Cog, Context, command - from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import Channels, Event, MODERATION_ROLES, Roles, VoiceGate as GateConf from bot.decorators import has_no_roles, in_whitelist from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger from bot.utils.checks import InWhitelistCheckFailure -log = logging.getLogger(__name__) +log = get_logger(__name__) # Flag written to the cog's RedisCache as a value when the Member's (key) notification # was already removed ~ this signals both that no further notifications should be sent, @@ -118,7 +117,7 @@ class VoiceGate(Cog): await self.redis_cache.set(member.id, message.id) return True, message.channel - @command(aliases=('voiceverify',)) + @command(aliases=("voiceverify", "voice-verify",)) @has_no_roles(Roles.voice_verified) @in_whitelist(channels=(Channels.voice_gate,), redirect=None) async def voice_verify(self, ctx: Context, *_) -> None: @@ -166,7 +165,10 @@ class VoiceGate(Cog): return checks = { - "joined_at": ctx.author.joined_at > datetime.utcnow() - timedelta(days=GateConf.minimum_days_member), + "joined_at": ( + ctx.author.joined_at.replace(tzinfo=None) > datetime.utcnow() + - timedelta(days=GateConf.minimum_days_member) + ), "total_messages": data["total_messages"] < GateConf.minimum_messages, "voice_banned": data["voice_banned"], "activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks @@ -254,6 +256,10 @@ class VoiceGate(Cog): log.trace("User not in a voice channel. Ignore.") return + if isinstance(after.channel, discord.StageChannel): + log.trace("User joined a stage channel. Ignore.") + return + # To avoid race conditions, checking if the user should receive a notification # and sending it if appropriate is delegated to an atomic helper notification_sent, message_channel = await self._ping_newcomer(member) diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index 9f26c34f2..8f97130ca 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -1,5 +1,4 @@ import asyncio -import logging import re import textwrap from abc import abstractmethod @@ -17,11 +16,13 @@ from bot.constants import BigBrother as BigBrotherConfig, Guild as GuildConfig, from bot.exts.filters.token_remover import TokenRemover from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE from bot.exts.moderation.modlog import ModLog +from bot.log import CustomLogger, get_logger from bot.pagination import LinePaginator -from bot.utils import CogABCMeta, messages +from bot.utils import CogABCMeta, messages, scheduling +from bot.utils.members import get_or_fetch_member from bot.utils.time import get_time_delta -log = logging.getLogger(__name__) +log = get_logger(__name__) URL_RE = re.compile(r"(https?://[^\s]+)") @@ -46,7 +47,7 @@ class WatchChannel(metaclass=CogABCMeta): webhook_id: int, api_endpoint: str, api_default_params: dict, - logger: logging.Logger, + logger: CustomLogger, *, disable_header: bool = False ) -> None: @@ -69,7 +70,7 @@ class WatchChannel(metaclass=CogABCMeta): self.message_history = MessageHistory() self.disable_header = disable_header - self._start = self.bot.loop.create_task(self.start_watchchannel()) + self._start = scheduling.create_task(self.start_watchchannel(), event_loop=self.bot.loop) @property def modlog(self) -> ModLog: @@ -169,7 +170,7 @@ class WatchChannel(metaclass=CogABCMeta): """Queues up messages sent by watched users.""" if msg.author.id in self.watched_users: if not self.consuming_messages: - self._consume_task = self.bot.loop.create_task(self.consume_messages()) + self._consume_task = scheduling.create_task(self.consume_messages(), event_loop=self.bot.loop) self.log.trace(f"Received message: {msg.content} ({len(msg.attachments)} attachments)") self.message_queue[msg.author.id][msg.channel.id].append(msg) @@ -199,7 +200,10 @@ class WatchChannel(metaclass=CogABCMeta): if self.message_queue: self.log.trace("Channel queue not empty: Continuing consuming queues") - self._consume_task = self.bot.loop.create_task(self.consume_messages(delay_consumption=False)) + self._consume_task = scheduling.create_task( + self.consume_messages(delay_consumption=False), + event_loop=self.bot.loop, + ) else: self.log.trace("Done consuming messages.") @@ -246,7 +250,7 @@ class WatchChannel(metaclass=CogABCMeta): await self.webhook_send( cleaned_content, username=msg.author.display_name, - avatar_url=msg.author.avatar_url + avatar_url=msg.author.display_avatar.url ) if msg.attachments: @@ -260,7 +264,7 @@ class WatchChannel(metaclass=CogABCMeta): await self.webhook_send( embed=e, username=msg.author.display_name, - avatar_url=msg.author.avatar_url + avatar_url=msg.author.display_avatar.url ) except discord.HTTPException as exc: self.log.exception( @@ -278,7 +282,7 @@ class WatchChannel(metaclass=CogABCMeta): user_id = msg.author.id guild = self.bot.get_guild(GuildConfig.id) - actor = guild.get_member(self.watched_users[user_id]['actor']) + actor = await get_or_fetch_member(guild, self.watched_users[user_id]['actor']) actor = actor.display_name if actor else self.watched_users[user_id]['actor'] inserted_at = self.watched_users[user_id]['inserted_at'] @@ -295,9 +299,9 @@ class WatchChannel(metaclass=CogABCMeta): footer = f"Added {time_delta} by {actor} | Reason: {reason}" embed = Embed(description=f"{msg.author.mention} {message_jump}") - embed.set_footer(text=textwrap.shorten(footer, width=128, placeholder="...")) + embed.set_footer(text=textwrap.shorten(footer, width=256, placeholder="...")) - await self.webhook_send(embed=embed, username=msg.author.display_name, avatar_url=msg.author.avatar_url) + await self.webhook_send(embed=embed, username=msg.author.display_name, avatar_url=msg.author.display_avatar.url) async def list_watched_users( self, ctx: Context, oldest_first: bool = False, update_cache: bool = True @@ -352,7 +356,7 @@ class WatchChannel(metaclass=CogABCMeta): list_data["info"] = {} for user_id, user_data in watched_iter: - member = ctx.guild.get_member(user_id) + member = await get_or_fetch_member(ctx.guild, user_id) line = f"• `{user_id}`" if member: line += f" ({member.name}#{member.discriminator})" diff --git a/bot/exts/moderation/watchchannels/bigbrother.py b/bot/exts/moderation/watchchannels/bigbrother.py index 3b44056d3..ab37b1b80 100644 --- a/bot/exts/moderation/watchchannels/bigbrother.py +++ b/bot/exts/moderation/watchchannels/bigbrother.py @@ -1,4 +1,3 @@ -import logging import textwrap from collections import ChainMap @@ -6,11 +5,12 @@ from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot from bot.constants import Channels, MODERATION_ROLES, Webhooks -from bot.converters import FetchedMember +from bot.converters import MemberOrUser from bot.exts.moderation.infraction._utils import post_infraction from bot.exts.moderation.watchchannels._watchchannel import WatchChannel +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) class BigBrother(WatchChannel, Cog, name="Big Brother"): @@ -60,7 +60,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): @bigbrother_group.command(name='watch', aliases=('w',), root_aliases=('watch',)) @has_any_role(*MODERATION_ROLES) - async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: + async def watch_command(self, ctx: Context, user: MemberOrUser, *, reason: str) -> None: """ Relay messages sent by the given `user` to the `#big-brother` channel. @@ -71,11 +71,11 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): @bigbrother_group.command(name='unwatch', aliases=('uw',), root_aliases=('unwatch',)) @has_any_role(*MODERATION_ROLES) - async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: + async def unwatch_command(self, ctx: Context, user: MemberOrUser, *, reason: str) -> None: """Stop relaying messages by the given `user`.""" await self.apply_unwatch(ctx, user, reason) - async def apply_watch(self, ctx: Context, user: FetchedMember, reason: str) -> None: + async def apply_watch(self, ctx: Context, user: MemberOrUser, reason: str) -> None: """ Add `user` to watched users and apply a watch infraction with `reason`. @@ -87,18 +87,23 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): return if not await self.fetch_user_cache(): - await ctx.send(f":x: Updating the user cache failed, can't watch user {user}") + await ctx.send(f":x: Updating the user cache failed, can't watch user {user.mention}") return if user.id in self.watched_users: - await ctx.send(f":x: {user} is already being watched.") + await ctx.send(f":x: {user.mention} is already being watched.") + return + + # discord.User instances don't have a roles attribute + if hasattr(user, "roles") and any(role.id in MODERATION_ROLES for role in user.roles): + await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I must be kind to my masters.") return response = await post_infraction(ctx, user, 'watch', reason, hidden=True, active=True) if response is not None: self.watched_users[user.id] = response - msg = f":white_check_mark: Messages sent by {user} will now be relayed to Big Brother." + msg = f":white_check_mark: Messages sent by {user.mention} will now be relayed to Big Brother." history = await self.bot.api_client.get( self.api_endpoint, @@ -120,7 +125,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): await ctx.send(msg) - async def apply_unwatch(self, ctx: Context, user: FetchedMember, reason: str, send_message: bool = True) -> None: + async def apply_unwatch(self, ctx: Context, user: MemberOrUser, reason: str, send_message: bool = True) -> None: """ Remove `user` from watched users and mark their infraction as inactive with `reason`. @@ -151,7 +156,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): log.debug(f"Perma-banned user {user} was unwatched.") return log.trace("User is not banned. Sending message to channel") - message = f":white_check_mark: Messages sent by {user} will no longer be relayed." + message = f":white_check_mark: Messages sent by {user.mention} will no longer be relayed." else: log.trace("No active watches found for user.") diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index 03326cab2..2fafaec97 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -1,69 +1,153 @@ -import logging import textwrap -from collections import ChainMap +from collections import ChainMap, defaultdict from io import StringIO -from typing import Union +from typing import Optional, Union import discord +from async_rediscache import RedisCache from discord import Color, Embed, Member, PartialMessage, RawReactionActionEvent, User -from discord.ext.commands import Cog, Context, group, has_any_role +from discord.ext.commands import BadArgument, Cog, Context, group, has_any_role from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks -from bot.converters import FetchedMember -from bot.exts.moderation.watchchannels._watchchannel import WatchChannel +from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES +from bot.converters import MemberOrUser, UnambiguousMemberOrUser from bot.exts.recruitment.talentpool._review import Reviewer +from bot.log import get_logger from bot.pagination import LinePaginator -from bot.utils import time +from bot.utils import scheduling, time +from bot.utils.members import get_or_fetch_member +from bot.utils.time import get_time_delta +AUTOREVIEW_ENABLED_KEY = "autoreview_enabled" REASON_MAX_CHARS = 1000 -log = logging.getLogger(__name__) +log = get_logger(__name__) -class TalentPool(WatchChannel, Cog, name="Talentpool"): - """Relays messages of helper candidates to a watch channel to observe them.""" +class TalentPool(Cog, name="Talentpool"): + """Used to nominate potential helper candidates.""" - def __init__(self, bot: Bot) -> None: - super().__init__( - bot, - destination=Channels.talent_pool, - webhook_id=Webhooks.talent_pool, - api_endpoint='bot/nominations', - api_default_params={'active': 'true', 'ordering': '-inserted_at'}, - logger=log, - disable_header=True, - ) + # RedisCache[str, bool] + # Can contain a single key, "autoreview_enabled", with the value a bool indicating if autoreview is enabled. + talentpool_settings = RedisCache() + def __init__(self, bot: Bot) -> None: + self.bot = bot self.reviewer = Reviewer(self.__class__.__name__, bot, self) - self.bot.loop.create_task(self.reviewer.reschedule_reviews()) + self.cache: Optional[defaultdict[dict]] = None + self.api_default_params = {'active': 'true', 'ordering': '-inserted_at'} + + self.initial_refresh_task = scheduling.create_task(self.refresh_cache(), event_loop=self.bot.loop) + scheduling.create_task(self.schedule_autoreviews(), event_loop=self.bot.loop) + + async def schedule_autoreviews(self) -> None: + """Reschedule reviews for active nominations if autoreview is enabled.""" + if await self.autoreview_enabled(): + # Wait for a populated cache first + await self.initial_refresh_task + await self.reviewer.reschedule_reviews() + else: + log.trace("Not scheduling reviews as autoreview is disabled.") + + async def autoreview_enabled(self) -> bool: + """Return whether automatic posting of nomination reviews is enabled.""" + return await self.talentpool_settings.get(AUTOREVIEW_ENABLED_KEY, True) + + async def refresh_cache(self) -> bool: + """Updates TalentPool users cache.""" + # Wait until logged in to ensure bot api client exists + await self.bot.wait_until_guild_available() + try: + data = await self.bot.api_client.get( + 'bot/nominations', + params=self.api_default_params + ) + except ResponseCodeError as err: + log.exception("Failed to fetch the currently nominated users from the API", exc_info=err) + return False + + self.cache = defaultdict(dict) + + for entry in data: + user_id = entry.pop('user') + self.cache[user_id] = entry + + return True @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) + @has_any_role(*STAFF_ROLES) async def nomination_group(self, ctx: Context) -> None: """Highlights the activity of helper nominees by relaying their messages to the talent pool channel.""" await ctx.send_help(ctx.command) - @nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",)) + @nomination_group.group(name="autoreview", aliases=("ar",), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def nomination_autoreview_group(self, ctx: Context) -> None: + """Commands for enabling or disabling autoreview.""" + await ctx.send_help(ctx.command) + + @nomination_autoreview_group.command(name="enable", aliases=("on",)) + @has_any_role(Roles.admins) + async def autoreview_enable(self, ctx: Context) -> None: + """ + Enable automatic posting of reviews. + + This will post reviews up to one day overdue. Older nominations can be + manually reviewed with the `tp post_review <user_id>` command. + """ + if await self.autoreview_enabled(): + await ctx.send(":x: Autoreview is already enabled") + return + + await self.talentpool_settings.set(AUTOREVIEW_ENABLED_KEY, True) + await self.reviewer.reschedule_reviews() + await ctx.send(":white_check_mark: Autoreview enabled") + + @nomination_autoreview_group.command(name="disable", aliases=("off",)) + @has_any_role(Roles.admins) + async def autoreview_disable(self, ctx: Context) -> None: + """Disable automatic posting of reviews.""" + if not await self.autoreview_enabled(): + await ctx.send(":x: Autoreview is already disabled") + return + + await self.talentpool_settings.set(AUTOREVIEW_ENABLED_KEY, False) + self.reviewer.cancel_all() + await ctx.send(":white_check_mark: Autoreview disabled") + + @nomination_autoreview_group.command(name="status") @has_any_role(*MODERATION_ROLES) - async def watched_command( + async def autoreview_status(self, ctx: Context) -> None: + """Show whether automatic posting of reviews is enabled or disabled.""" + if await self.autoreview_enabled(): + await ctx.send("Autoreview is currently enabled") + else: + await ctx.send("Autoreview is currently disabled") + + @nomination_group.command( + name="nominees", + aliases=("nominated", "all", "list", "watched"), + root_aliases=("nominees",) + ) + @has_any_role(*MODERATION_ROLES) + async def list_command( self, ctx: Context, oldest_first: bool = False, update_cache: bool = True ) -> None: """ - Shows the users that are currently being monitored in the talent pool. + Shows the users that are currently in the talent pool. The optional kwarg `oldest_first` can be used to order the list by oldest nomination. The optional kwarg `update_cache` can be used to update the user cache using the API before listing the users. """ - await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + await self.list_nominated_users(ctx, oldest_first=oldest_first, update_cache=update_cache) - async def list_watched_users( + async def list_nominated_users( self, ctx: Context, oldest_first: bool = False, @@ -80,16 +164,27 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): The optional kwarg `update_cache` specifies whether the cache should be refreshed by polling the API. """ - # TODO Once the watch channel is removed, this can be done in a smarter way, without splitting and overriding - # the list_watched_users function. - watched_data = await self.prepare_watched_users_data(ctx, oldest_first, update_cache) + successful_update = False + if update_cache: + if not (successful_update := await self.refresh_cache()): + await ctx.send(":warning: Unable to update cache. Data may be inaccurate.") - if update_cache and not watched_data["updated"]: - await ctx.send(f":x: Failed to update {self.__class__.__name__} user cache, serving from cache") + nominations = self.cache.items() + if oldest_first: + nominations = reversed(nominations) lines = [] - for user_id, line in watched_data["info"].items(): - if self.watched_users[user_id]['reviewed']: + + for user_id, user_data in nominations: + member = await get_or_fetch_member(ctx.guild, user_id) + line = f"• `{user_id}`" + if member: + line += f" ({member.name}#{member.discriminator})" + inserted_at = user_data['inserted_at'] + line += f", added {get_time_delta(inserted_at)}" + if not member: # Cross off users who left the server. + line = f"~~{line}~~" + if user_data['reviewed']: line += " *(reviewed)*" elif user_id in self.reviewer: line += " *(scheduled)*" @@ -99,7 +194,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): lines = ("There's nothing here yet.",) embed = Embed( - title=watched_data["title"], + title=f"Talent Pool active nominations ({'updated' if update_cache and successful_update else 'cached'})", color=Color.blue() ) await LinePaginator.paginate(lines, ctx, embed, empty=False) @@ -108,26 +203,30 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): @has_any_role(*MODERATION_ROLES) async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: """ - Shows talent pool monitored users ordered by oldest nomination. + Shows talent pool users ordered by oldest nomination. The optional kwarg `update_cache` can be used to update the user cache using the API before listing the users. """ - await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) + await ctx.invoke(self.list_command, oldest_first=True, update_cache=update_cache) - @nomination_group.command(name='forcewatch', aliases=('fw', 'forceadd', 'fa'), root_aliases=("forcenominate",)) + @nomination_group.command( + name="forcenominate", + aliases=("fw", "forceadd", "fa", "fn", "forcewatch"), + root_aliases=("forcenominate",) + ) @has_any_role(*MODERATION_ROLES) - async def force_watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: + async def force_nominate_command(self, ctx: Context, user: MemberOrUser, *, reason: str = '') -> None: """ Adds the given `user` to the talent pool, from any channel. A `reason` for adding the user to the talent pool is optional. """ - await self._watch_user(ctx, user, reason) + await self._nominate_user(ctx, user, reason) - @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) + @nomination_group.command(name='nominate', aliases=("w", "add", "a", "watch"), root_aliases=("nominate",)) @has_any_role(*STAFF_ROLES) - async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: + async def nominate_command(self, ctx: Context, user: MemberOrUser, *, reason: str = '') -> None: """ Adds the given `user` to the talent pool. @@ -138,26 +237,26 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): if any(role.id in MODERATION_ROLES for role in ctx.author.roles): await ctx.send( f":x: Nominations should be run in the <#{Channels.nominations}> channel. " - "Use `!tp forcewatch` to override this check." + "Use `!tp forcenominate` to override this check." ) else: await ctx.send(f":x: Nominations must be run in the <#{Channels.nominations}> channel") return - await self._watch_user(ctx, user, reason) + await self._nominate_user(ctx, user, reason) - async def _watch_user(self, ctx: Context, user: FetchedMember, reason: str) -> None: + async def _nominate_user(self, ctx: Context, user: MemberOrUser, reason: str) -> None: """Adds the given user to the talent pool.""" if user.bot: - await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") + await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. Only humans can be nominated.") return if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles): await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:") return - if not await self.fetch_user_cache(): - await ctx.send(f":x: Failed to update the user cache; can't add {user}") + if not await self.refresh_cache(): + await ctx.send(f":x: Failed to update the cache; can't add {user}") return if len(reason) > REASON_MAX_CHARS: @@ -166,7 +265,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): # Manual request with `raise_for_status` as False because we want the actual response session = self.bot.api_client.session - url = self.bot.api_client._url_for(self.api_endpoint) + url = self.bot.api_client._url_for('bot/nominations') kwargs = { 'json': { 'actor': ctx.author.id, @@ -188,32 +287,21 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): else: resp.raise_for_status() - self.watched_users[user.id] = response_data + self.cache[user.id] = response_data - if user.id not in self.reviewer: + if await self.autoreview_enabled() and user.id not in self.reviewer: self.reviewer.schedule_review(user.id) - history = await self.bot.api_client.get( - self.api_endpoint, - params={ - "user__id": str(user.id), - "active": "false", - "ordering": "-inserted_at" - } - ) - - msg = f"✅ The nomination for {user} has been added to the talent pool" - if history: - msg += f"\n\n({len(history)} previous nominations in total)" + msg = f"✅ The nomination for {user.mention} has been added to the talent pool" await ctx.send(msg) @nomination_group.command(name='history', aliases=('info', 'search')) @has_any_role(*MODERATION_ROLES) - async def history_command(self, ctx: Context, user: FetchedMember) -> None: + async def history_command(self, ctx: Context, user: MemberOrUser) -> None: """Shows the specified user's nomination history.""" result = await self.bot.api_client.get( - self.api_endpoint, + 'bot/nominations', params={ 'user__id': str(user.id), 'ordering': "-active,-inserted_at" @@ -227,7 +315,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): title=f"Nominations for {user.display_name} `({user.id})`", color=Color.blue() ) - lines = [self._nomination_to_string(nomination) for nomination in result] + lines = [await self._nomination_to_string(nomination) for nomination in result] await LinePaginator.paginate( lines, ctx=ctx, @@ -237,42 +325,99 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): max_size=1000 ) - @nomination_group.command(name='unwatch', aliases=('end', ), root_aliases=("unnominate",)) + @nomination_group.command(name="end", aliases=("unwatch", "unnominate"), root_aliases=("unnominate",)) @has_any_role(*MODERATION_ROLES) - async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: + async def end_nomination_command(self, ctx: Context, user: MemberOrUser, *, reason: str) -> None: """ Ends the active nomination of the specified user with the given reason. Providing a `reason` is required. """ if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.") + await ctx.send(f":x: Maximum allowed characters for the end reason is {REASON_MAX_CHARS}.") return - if await self.unwatch(user.id, reason): - await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") + if await self.end_nomination(user.id, reason): + await ctx.send(f":white_check_mark: Successfully un-nominated {user}") else: await ctx.send(":x: The specified user does not have an active nomination") @nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True) - @has_any_role(*MODERATION_ROLES) + @has_any_role(*STAFF_ROLES) async def nomination_edit_group(self, ctx: Context) -> None: """Commands to edit nominations.""" await ctx.send_help(ctx.command) @nomination_edit_group.command(name='reason') - @has_any_role(*MODERATION_ROLES) - async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None: - """Edits the reason of a specific nominator in a specific active nomination.""" + @has_any_role(*STAFF_ROLES) + async def edit_reason_command( + self, + ctx: Context, + nominee_or_nomination_id: Union[UnambiguousMemberOrUser, int], + nominator: Optional[UnambiguousMemberOrUser] = None, + *, + reason: str + ) -> None: + """ + Edit the nomination reason of a specific nominator for a given nomination. + + If nominee_or_nomination_id resolves to a member or user, edit the currently active nomination for that person. + Otherwise, if it's an int, look up that nomination ID to edit. + + If no nominator is specified, assume the invoker is editing their own nomination reason. + Otherwise, edit the reason from that specific nominator. + + Raise a permission error if a non-mod staff member invokes this command on a + specific nomination ID, or with an nominator other than themselves. + """ + # If not specified, assume the invoker is editing their own nomination reason. + nominator = nominator or ctx.author + + if not any(role.id in MODERATION_ROLES for role in ctx.author.roles): + if ctx.channel.id != Channels.nominations: + await ctx.send(f":x: Nomination edits must be run in the <#{Channels.nominations}> channel") + return + + if nominator != ctx.author or isinstance(nominee_or_nomination_id, int): + # Invoker has specified another nominator, or a specific nomination id + raise BadArgument( + "Only moderators can edit specific nomination IDs, " + "or the reason of a nominator other than themselves." + ) + + await self._edit_nomination_reason( + ctx, + target=nominee_or_nomination_id, + actor=nominator, + reason=reason + ) + + async def _edit_nomination_reason( + self, + ctx: Context, + *, + target: Union[int, Member, User], + actor: MemberOrUser, + reason: str, + ) -> None: + """Edit a nomination reason in the database after validating the input.""" if len(reason) > REASON_MAX_CHARS: - await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.") + await ctx.send(f":x: Maximum allowed characters for the reason is {REASON_MAX_CHARS}.") return + if isinstance(target, int): + nomination_id = target + else: + if nomination := self.cache.get(target.id): + nomination_id = nomination["id"] + else: + await ctx.send("No active nomination found for that member.") + return try: - nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") + nomination = await self.bot.api_client.get(f"bot/nominations/{nomination_id}") except ResponseCodeError as e: if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") + log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") return else: @@ -283,16 +428,16 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): return if not any(entry["actor"] == actor.id for entry in nomination["entries"]): - await ctx.send(f":x: {actor} doesn't have an entry in this nomination.") + await ctx.send(f":x: {actor.mention} doesn't have an entry in this nomination.") return - self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}") + log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}") await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination_id}", + f"bot/nominations/{nomination_id}", json={"actor": actor.id, "reason": reason} ) - await self.fetch_user_cache() # Update cache + await self.refresh_cache() # Update cache await ctx.send(":white_check_mark: Successfully updated nomination reason.") @nomination_edit_group.command(name='end_reason') @@ -304,10 +449,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): return try: - nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}") + nomination = await self.bot.api_client.get(f"bot/nominations/{nomination_id}") except ResponseCodeError as e: if e.response.status == 404: - self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") + log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}") await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`") return else: @@ -317,13 +462,13 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await ctx.send(":x: Can't edit the end reason of an active nomination.") return - self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}") + log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}") await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination_id}", + f"bot/nominations/{nomination_id}", json={"end_reason": reason} ) - await self.fetch_user_cache() # Update cache. + await self.refresh_cache() # Update cache. await ctx.send(":white_check_mark: Updated the end reason of the nomination!") @nomination_group.command(aliases=('mr',)) @@ -356,9 +501,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await ctx.message.add_reaction(Emojis.check_mark) @Cog.listener() - async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: + async def on_member_ban(self, guild: Guild, user: Union[MemberOrUser]) -> None: """Remove `user` from the talent pool after they are banned.""" - await self.unwatch(user.id, "User was banned.") + await self.end_nomination(user.id, "User was banned.") @Cog.listener() async def on_raw_reaction_add(self, payload: RawReactionActionEvent) -> None: @@ -380,10 +525,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): log.info(f"Archiving nomination {message.id}") await self.reviewer.archive_vote(message, emoji == Emojis.incident_actioned) - async def unwatch(self, user_id: int, reason: str) -> bool: + async def end_nomination(self, user_id: int, reason: str) -> bool: """End the active nomination of a user with the given reason and return True on success.""" active_nomination = await self.bot.api_client.get( - self.api_endpoint, + 'bot/nominations', params=ChainMap( {"user__id": str(user_id)}, self.api_default_params, @@ -398,22 +543,23 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): nomination = active_nomination[0] await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination['id']}", + f"bot/nominations/{nomination['id']}", json={'end_reason': reason, 'active': False} ) - self._remove_user(user_id) - self.reviewer.cancel(user_id) + self.cache.pop(user_id) + if await self.autoreview_enabled(): + self.reviewer.cancel(user_id) return True - def _nomination_to_string(self, nomination_object: dict) -> str: + async def _nomination_to_string(self, nomination_object: dict) -> str: """Creates a string representation of a nomination.""" guild = self.bot.get_guild(Guild.id) entries = [] for site_entry in nomination_object["entries"]: actor_id = site_entry["actor"] - actor = guild.get_member(actor_id) + actor = await get_or_fetch_member(guild, actor_id) reason = site_entry["reason"] or "*None*" created = time.format_infraction(site_entry["inserted_at"]) @@ -450,7 +596,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): {entries_string} End date: {end_date} - Unwatch reason: {nomination_object["end_reason"]} + Unnomination reason: {nomination_object["end_reason"]} =============== """ ) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index b9ff61986..dcf73c2cb 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -1,6 +1,5 @@ import asyncio import contextlib -import logging import random import re import textwrap @@ -10,32 +9,37 @@ from datetime import datetime, timedelta from typing import List, Optional, Union from dateutil.parser import isoparse -from dateutil.relativedelta import relativedelta from discord import Embed, Emoji, Member, Message, NoMoreItems, PartialMessage, TextChannel from discord.ext.commands import Context from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Channels, Colours, Emojis, Guild, Roles +from bot.constants import Channels, Colours, Emojis, Guild +from bot.log import get_logger +from bot.utils.members import get_or_fetch_member from bot.utils.messages import count_unique_users_reaction, pin_no_system_message from bot.utils.scheduling import Scheduler -from bot.utils.time import get_time_delta, humanize_delta, time_since +from bot.utils.time import get_time_delta, time_since if typing.TYPE_CHECKING: from bot.exts.recruitment.talentpool._cog import TalentPool -log = logging.getLogger(__name__) +log = get_logger(__name__) # Maximum amount of days before an automatic review is posted. MAX_DAYS_IN_POOL = 30 # Maximum amount of characters allowed in a message MAX_MESSAGE_SIZE = 2000 +# Maximum amount of characters allowed in an embed +MAX_EMBED_SIZE = 4000 -# Regex finding the user ID of a user mention -MENTION_RE = re.compile(r"<@!?(\d+?)>") -# Regex matching role pings -ROLE_MENTION_RE = re.compile(r"<@&\d+>") +# Regex for finding the first message of a nomination, and extracting the nominee. +# Historic nominations will have 2 role mentions at the start, new ones won't, optionally match for this. +NOMINATION_MESSAGE_REGEX = re.compile( + r"(?:<@&\d+> <@&\d+>\n)*?<@!?(\d+?)> \(.+#\d{4}\) for Helper!\n\n\*\*Nominated by:\*\*", + re.MULTILINE +) class Reviewer: @@ -54,10 +58,8 @@ class Reviewer: """Reschedule all active nominations to be reviewed at the appropriate time.""" log.trace("Rescheduling reviews") await self.bot.wait_until_guild_available() - # TODO Once the watch channel is removed, this can be done in a smarter way, e.g create a sync function. - await self._pool.fetch_user_cache() - for user_id, user_data in self._pool.watched_users.items(): + for user_id, user_data in self._pool.cache.items(): if not user_data["reviewed"]: self.schedule_review(user_id) @@ -65,7 +67,7 @@ class Reviewer: """Schedules a single user for review.""" log.trace(f"Scheduling review of user with ID {user_id}") - user_data = self._pool.watched_users.get(user_id) + user_data = self._pool.cache.get(user_id) inserted_at = isoparse(user_data['inserted_at']).replace(tzinfo=None) review_at = inserted_at + timedelta(days=MAX_DAYS_IN_POOL) @@ -75,7 +77,7 @@ class Reviewer: async def post_review(self, user_id: int, update_database: bool) -> None: """Format the review of a user and post it to the nomination voting channel.""" - review, seen_emoji = await self.make_review(user_id) + review, reviewed_emoji = await self.make_review(user_id) if not review: return @@ -88,36 +90,36 @@ class Reviewer: await pin_no_system_message(messages[0]) last_message = messages[-1] - if seen_emoji: - for reaction in (seen_emoji, "\N{THUMBS UP SIGN}", "\N{THUMBS DOWN SIGN}"): + if reviewed_emoji: + for reaction in (reviewed_emoji, "\N{THUMBS UP SIGN}", "\N{THUMBS DOWN SIGN}"): await last_message.add_reaction(reaction) if update_database: - nomination = self._pool.watched_users.get(user_id) - await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) + nomination = self._pool.cache.get(user_id) + await self.bot.api_client.patch(f"bot/nominations/{nomination['id']}", json={"reviewed": True}) async def make_review(self, user_id: int) -> typing.Tuple[str, Optional[Emoji]]: - """Format a generic review of a user and return it with the seen emoji.""" + """Format a generic review of a user and return it with the reviewed emoji.""" log.trace(f"Formatting the review of {user_id}") - # Since `watched_users` is a defaultdict, we should take care + # Since `cache` is a defaultdict, we should take care # not to accidentally insert the IDs of users that have no - # active nominated by using the `watched_users.get(user_id)` - # instead of `watched_users[user_id]`. - nomination = self._pool.watched_users.get(user_id) + # active nominated by using the `cache.get(user_id)` + # instead of `cache[user_id]`. + nomination = self._pool.cache.get(user_id) if not nomination: log.trace(f"There doesn't appear to be an active nomination for {user_id}") return "", None guild = self.bot.get_guild(Guild.id) - member = guild.get_member(user_id) + member = await get_or_fetch_member(guild, user_id) if not member: return ( f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server :pensive:" ), None - opening = f"<@&{Roles.mod_team}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" + opening = f"{member.mention} ({member}) for Helper!" current_nominations = "\n\n".join( f"**<@{entry['actor']}>:** {entry['reason'] or '*no reason given*'}" @@ -127,28 +129,28 @@ class Reviewer: review_body = await self._construct_review_body(member) - seen_emoji = self._random_ducky(guild) + reviewed_emoji = self._random_ducky(guild) vote_request = ( "*Refer to their nomination and infraction histories for further details*.\n" - f"*Please react {seen_emoji} if you've seen this post." - " Then react :+1: for approval, or :-1: for disapproval*." + f"*Please react {reviewed_emoji} once you have reviewed this user," + " and react :+1: for approval, or :-1: for disapproval*." ) review = "\n\n".join((opening, current_nominations, review_body, vote_request)) - return review, seen_emoji + return review, reviewed_emoji async def archive_vote(self, message: PartialMessage, passed: bool) -> None: """Archive this vote to #nomination-archive.""" message = await message.fetch() - # We consider the first message in the nomination to contain the two role pings + # We consider the first message in the nomination to contain the user ping, username#discrim, and fixed text messages = [message] - if not len(ROLE_MENTION_RE.findall(message.content)) >= 2: + if not NOMINATION_MESSAGE_REGEX.search(message.content): with contextlib.suppress(NoMoreItems): async for new_message in message.channel.history(before=message.created_at): messages.append(new_message) - if len(ROLE_MENTION_RE.findall(new_message.content)) >= 2: + if NOMINATION_MESSAGE_REGEX.search(new_message.content): break log.debug(f"Found {len(messages)} messages: {', '.join(str(m.id) for m in messages)}") @@ -160,10 +162,10 @@ class Reviewer: content = "".join(parts) # We assume that the first user mentioned is the user that we are voting on - user_id = int(MENTION_RE.search(content).group(1)) + user_id = int(NOMINATION_MESSAGE_REGEX.search(content).group(1)) # Get reaction counts - seen = await count_unique_users_reaction( + reviewed = await count_unique_users_reaction( messages[0], lambda r: "ducky" in str(r) or str(r) == "\N{EYES}", count_bots=False @@ -188,7 +190,7 @@ class Reviewer: embed_content = ( f"{result} on {timestamp}\n" - f"With {seen} {Emojis.ducky_dave} {upvotes} :+1: {downvotes} :-1:\n\n" + f"With {reviewed} {Emojis.ducky_dave} {upvotes} :+1: {downvotes} :-1:\n\n" f"{stripped_content}" ) @@ -199,7 +201,7 @@ class Reviewer: channel = self.bot.get_channel(Channels.nomination_archive) for number, part in enumerate( - textwrap.wrap(embed_content, width=MAX_MESSAGE_SIZE, replace_whitespace=False, placeholder="") + textwrap.wrap(embed_content, width=MAX_EMBED_SIZE, replace_whitespace=False, placeholder="") ): await channel.send(embed=Embed( title=embed_title if number == 0 else None, @@ -253,9 +255,9 @@ class Reviewer: last_channel = user_activity["top_channel_activity"][-1] channels += f", and {last_channel[1]} in {last_channel[0]}" - time_on_server = humanize_delta(relativedelta(datetime.utcnow(), member.joined_at), max_units=2) + joined_at_formatted = time_since(member.joined_at) review = ( - f"{member.name} has been on the server for **{time_on_server}**" + f"{member.name} joined the server **{joined_at_formatted}**" f" and has **{messages} messages**{channels}." ) @@ -329,7 +331,7 @@ class Reviewer: """ log.trace(f"Fetching the nomination history data for {member.id}'s review") history = await self.bot.api_client.get( - self._pool.api_endpoint, + "bot/nominations", params={ "user__id": str(member.id), "active": "false", @@ -345,7 +347,7 @@ class Reviewer: nomination_times = f"{num_entries} times" if num_entries > 1 else "once" rejection_times = f"{len(history)} times" if len(history) > 1 else "once" - end_time = time_since(isoparse(history[0]['ended_at']).replace(tzinfo=None), max_units=2) + end_time = time_since(isoparse(history[0]['ended_at']).replace(tzinfo=None)) review = ( f"They were nominated **{nomination_times}** before" @@ -357,7 +359,7 @@ class Reviewer: @staticmethod def _random_ducky(guild: Guild) -> Union[Emoji, str]: - """Picks a random ducky emoji to be used to mark the vote as seen. If no duckies found returns :eyes:.""" + """Picks a random ducky emoji. If no duckies found returns :eyes:.""" duckies = [emoji for emoji in guild.emojis if emoji.name.startswith("ducky")] if not duckies: return ":eyes:" @@ -387,18 +389,18 @@ class Reviewer: Returns True if the user was successfully marked as reviewed, False otherwise. """ log.trace(f"Updating user {user_id} as reviewed") - await self._pool.fetch_user_cache() - if user_id not in self._pool.watched_users: + await self._pool.refresh_cache() + if user_id not in self._pool.cache: log.trace(f"Can't find a nominated user with id {user_id}") await ctx.send(f":x: Can't find a currently nominated user with id `{user_id}`") return False - nomination = self._pool.watched_users.get(user_id) + nomination = self._pool.cache.get(user_id) if nomination["reviewed"]: await ctx.send(":x: This nomination was already reviewed, but here's a cookie :cookie:") return False - await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) + await self.bot.api_client.patch(f"bot/nominations/{nomination['id']}", json={"reviewed": True}) if user_id in self._review_scheduler: self._review_scheduler.cancel(user_id) diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py index a4c828f95..788692777 100644 --- a/bot/exts/utils/bot.py +++ b/bot/exts/utils/bot.py @@ -1,13 +1,14 @@ -import logging +from contextlib import suppress from typing import Optional -from discord import Embed, TextChannel +from discord import Embed, Forbidden, TextChannel, Thread from discord.ext.commands import Cog, Context, command, group, has_any_role from bot.bot import Bot from bot.constants import Guild, MODERATION_ROLES, URLs +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) class BotCog(Cog, name="Bot"): @@ -16,6 +17,20 @@ class BotCog(Cog, name="Bot"): def __init__(self, bot: Bot): self.bot = bot + @Cog.listener() + async def on_thread_join(self, thread: Thread) -> None: + """ + Try to join newly created threads. + + Despite the event name being misleading, this is dispatched when new threads are created. + """ + if thread.me: + # We have already joined this thread + return + + with suppress(Forbidden): + await thread.join() + @group(invoke_without_command=True, name="bot", hidden=True) async def botinfo_group(self, ctx: Context) -> None: """Bot informational commands.""" @@ -44,6 +59,8 @@ class BotCog(Cog, name="Bot"): """Repeat the given message in either a specified channel or the current channel.""" if channel is None: await ctx.send(text) + elif not channel.permissions_for(ctx.author).send_messages: + await ctx.send("You don't have permission to speak in that channel.") else: await channel.send(text) diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py index cb662e852..a2e2d3eed 100644 --- a/bot/exts/utils/clean.py +++ b/bot/exts/utils/clean.py @@ -1,4 +1,3 @@ -import logging import random import re from typing import Iterable, Optional @@ -8,12 +7,11 @@ from discord.ext import commands from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot -from bot.constants import ( - Channels, CleanMessages, Colours, Event, Icons, MODERATION_ROLES, NEGATIVE_REPLIES -) +from bot.constants import Channels, CleanMessages, Colours, Event, Icons, MODERATION_ROLES, NEGATIVE_REPLIES from bot.exts.moderation.modlog import ModLog +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) class Clean(Cog): @@ -107,7 +105,7 @@ class Clean(Cog): elif regex: predicate = predicate_regex # Delete messages that match regex else: - predicate = None # Delete all messages + predicate = lambda *_: True # Delete all messages # Default to using the invoking context's channel if not channels: diff --git a/bot/exts/utils/extensions.py b/bot/exts/utils/extensions.py index 8a1ed98f4..fa5d38917 100644 --- a/bot/exts/utils/extensions.py +++ b/bot/exts/utils/extensions.py @@ -1,5 +1,4 @@ import functools -import logging import typing as t from enum import Enum @@ -10,10 +9,12 @@ from discord.ext.commands import Context, group from bot import exts from bot.bot import Bot from bot.constants import Emojis, MODERATION_ROLES, Roles, URLs +from bot.converters import Extension +from bot.log import get_logger from bot.pagination import LinePaginator -from bot.utils.extensions import EXTENSIONS, unqualify +from bot.utils.extensions import EXTENSIONS -log = logging.getLogger(__name__) +log = get_logger(__name__) UNLOAD_BLACKLIST = {f"{exts.__name__}.utils.extensions", f"{exts.__name__}.moderation.modlog"} @@ -29,51 +30,13 @@ class Action(Enum): RELOAD = functools.partial(Bot.reload_extension) -class Extension(commands.Converter): - """ - Fully qualify the name of an extension and ensure it exists. - - The * and ** values bypass this when used with the reload command. - """ - - async def convert(self, ctx: Context, argument: str) -> str: - """Fully qualify the name of an extension and ensure it exists.""" - # Special values to reload all extensions - if argument == "*" or argument == "**": - return argument - - argument = argument.lower() - - if argument in EXTENSIONS: - return argument - elif (qualified_arg := f"{exts.__name__}.{argument}") in EXTENSIONS: - return qualified_arg - - matches = [] - for ext in EXTENSIONS: - if argument == unqualify(ext): - matches.append(ext) - - if len(matches) > 1: - matches.sort() - names = "\n".join(matches) - raise commands.BadArgument( - f":x: `{argument}` is an ambiguous extension name. " - f"Please use one of the following fully-qualified names.```\n{names}```" - ) - elif matches: - return matches[0] - else: - raise commands.BadArgument(f":x: Could not find the extension `{argument}`.") - - class Extensions(commands.Cog): """Extension management commands.""" def __init__(self, bot: Bot): self.bot = bot - @group(name="extensions", aliases=("ext", "exts", "c", "cogs"), invoke_without_command=True) + @group(name="extensions", aliases=("ext", "exts", "c", "cog", "cogs"), invoke_without_command=True) async def extensions_group(self, ctx: Context) -> None: """Load, unload, reload, and list loaded extensions.""" await ctx.send_help(ctx.command) diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py index 6f2da3131..96664929b 100644 --- a/bot/exts/utils/internal.py +++ b/bot/exts/utils/internal.py @@ -1,6 +1,5 @@ import contextlib import inspect -import logging import pprint import re import textwrap @@ -11,13 +10,14 @@ from io import StringIO from typing import Any, Optional, Tuple import discord -from discord.ext.commands import Cog, Context, group, has_any_role +from discord.ext.commands import Cog, Context, group, has_any_role, is_owner from bot.bot import Bot -from bot.constants import Roles +from bot.constants import DEBUG_MODE, Roles +from bot.log import get_logger from bot.utils import find_nth_occurrence, send_to_paste_service -log = logging.getLogger(__name__) +log = get_logger(__name__) class Internal(Cog): @@ -33,12 +33,14 @@ class Internal(Cog): self.socket_event_total = 0 self.socket_events = Counter() + if DEBUG_MODE: + self.eval.add_check(is_owner().predicate) + @Cog.listener() - async def on_socket_response(self, msg: dict) -> None: + async def on_socket_event_type(self, event_type: str) -> None: """When a websocket event is received, increase our counters.""" - if event_type := msg.get("t"): - self.socket_event_total += 1 - self.socket_events[event_type] += 1 + self.socket_event_total += 1 + self.socket_events[event_type] += 1 def _format(self, inp: str, out: Any) -> Tuple[str, Optional[discord.Embed]]: """Format the eval output into a string & attempt to format it into an Embed.""" diff --git a/bot/exts/utils/jams.py b/bot/exts/utils/jams.py deleted file mode 100644 index 98fbcb303..000000000 --- a/bot/exts/utils/jams.py +++ /dev/null @@ -1,145 +0,0 @@ -import logging -import typing as t - -from discord import CategoryChannel, Guild, Member, PermissionOverwrite, Role -from discord.ext import commands -from more_itertools import unique_everseen - -from bot.bot import Bot -from bot.constants import Roles - -log = logging.getLogger(__name__) - -MAX_CHANNELS = 50 -CATEGORY_NAME = "Code Jam" - - -class CodeJams(commands.Cog): - """Manages the code-jam related parts of our server.""" - - def __init__(self, bot: Bot): - self.bot = bot - - @commands.command() - @commands.has_any_role(Roles.admins) - async def createteam(self, ctx: commands.Context, team_name: str, members: commands.Greedy[Member]) -> None: - """ - Create team channels (voice and text) in the Code Jams category, assign roles, and add overwrites for the team. - - The first user passed will always be the team leader. - """ - # Ignore duplicate members - members = list(unique_everseen(members)) - - # We had a little issue during Code Jam 4 here, the greedy converter did it's job - # and ignored anything which wasn't a valid argument which left us with teams of - # two members or at some times even 1 member. This fixes that by checking that there - # are always 3 members in the members list. - if len(members) < 3: - await ctx.send( - ":no_entry_sign: One of your arguments was invalid\n" - f"There must be a minimum of 3 valid members in your team. Found: {len(members)}" - " members" - ) - return - - team_channel = await self.create_channels(ctx.guild, team_name, members) - await self.add_roles(ctx.guild, members) - - await ctx.send( - f":ok_hand: Team created: {team_channel}\n" - f"**Team Leader:** {members[0].mention}\n" - f"**Team Members:** {' '.join(member.mention for member in members[1:])}" - ) - - async def get_category(self, guild: Guild) -> CategoryChannel: - """ - Return a code jam category. - - If all categories are full or none exist, create a new category. - """ - for category in guild.categories: - # Need 2 available spaces: one for the text channel and one for voice. - if category.name == CATEGORY_NAME and MAX_CHANNELS - len(category.channels) >= 2: - return category - - return await self.create_category(guild) - - @staticmethod - async def create_category(guild: Guild) -> CategoryChannel: - """Create a new code jam category and return it.""" - log.info("Creating a new code jam category.") - - category_overwrites = { - guild.default_role: PermissionOverwrite(read_messages=False), - guild.me: PermissionOverwrite(read_messages=True) - } - - return await guild.create_category_channel( - CATEGORY_NAME, - overwrites=category_overwrites, - reason="It's code jam time!" - ) - - @staticmethod - def get_overwrites(members: t.List[Member], guild: Guild) -> t.Dict[t.Union[Member, Role], PermissionOverwrite]: - """Get code jam team channels permission overwrites.""" - # First member is always the team leader - team_channel_overwrites = { - members[0]: PermissionOverwrite( - manage_messages=True, - read_messages=True, - manage_webhooks=True, - connect=True - ), - guild.default_role: PermissionOverwrite(read_messages=False, connect=False), - } - - # Rest of members should just have read_messages - for member in members[1:]: - team_channel_overwrites[member] = PermissionOverwrite( - read_messages=True, - connect=True - ) - - return team_channel_overwrites - - async def create_channels(self, guild: Guild, team_name: str, members: t.List[Member]) -> str: - """Create team text and voice channels. Return the mention for the text channel.""" - # Get permission overwrites and category - team_channel_overwrites = self.get_overwrites(members, guild) - code_jam_category = await self.get_category(guild) - - # Create a text channel for the team - team_channel = await guild.create_text_channel( - team_name, - overwrites=team_channel_overwrites, - category=code_jam_category - ) - - # Create a voice channel for the team - team_voice_name = " ".join(team_name.split("-")).title() - - await guild.create_voice_channel( - team_voice_name, - overwrites=team_channel_overwrites, - category=code_jam_category - ) - - return team_channel.mention - - @staticmethod - async def add_roles(guild: Guild, members: t.List[Member]) -> None: - """Assign team leader and jammer roles.""" - # Assign team leader role - await members[0].add_roles(guild.get_role(Roles.team_leaders)) - - # Assign rest of roles - jammer_role = guild.get_role(Roles.jammers) - for member in members: - await member.add_roles(jammer_role) - - -def setup(bot: Bot) -> None: - """Load the CodeJams cog.""" - bot.add_cog(CodeJams(bot)) diff --git a/bot/exts/utils/ping.py b/bot/exts/utils/ping.py index 750ff46d2..43d371d87 100644 --- a/bot/exts/utils/ping.py +++ b/bot/exts/utils/ping.py @@ -1,18 +1,16 @@ -import socket -import urllib.parse from datetime import datetime -import aioping +from aiohttp import client_exceptions from discord import Embed from discord.ext import commands from bot.bot import Bot -from bot.constants import Channels, Emojis, STAFF_ROLES, URLs +from bot.constants import Channels, STAFF_PARTNERS_COMMUNITY_ROLES, URLs from bot.decorators import in_whitelist DESCRIPTIONS = ( "Command processing time", - "Python Discord website latency", + "Python Discord website status", "Discord API latency" ) ROUND_LATENCY = 3 @@ -25,7 +23,7 @@ class Latency(commands.Cog): self.bot = bot @commands.command() - @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) + @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_PARTNERS_COMMUNITY_ROLES) async def ping(self, ctx: commands.Context) -> None: """ Gets different measures of latency within the bot. @@ -34,30 +32,30 @@ class Latency(commands.Cog): """ # datetime.datetime objects do not have the "milliseconds" attribute. # It must be converted to seconds before converting to milliseconds. - bot_ping = (datetime.utcnow() - ctx.message.created_at).total_seconds() * 1000 + bot_ping = (datetime.utcnow() - ctx.message.created_at.replace(tzinfo=None)).total_seconds() * 1000 if bot_ping <= 0: bot_ping = "Your clock is out of sync, could not calculate ping." else: bot_ping = f"{bot_ping:.{ROUND_LATENCY}f} ms" try: - url = urllib.parse.urlparse(URLs.site_schema + URLs.site).hostname - try: - delay = await aioping.ping(url, family=socket.AddressFamily.AF_INET) * 1000 - site_ping = f"{delay:.{ROUND_LATENCY}f} ms" - except OSError: - # Some machines do not have permission to run ping - site_ping = "Permission denied, could not ping." + async with self.bot.http_session.get(f"{URLs.site_api_schema}{URLs.site_api}/healthcheck") as request: + request.raise_for_status() + site_status = "Healthy" - except TimeoutError: - site_ping = f"{Emojis.cross_mark} Connection timed out." + except client_exceptions.ClientResponseError as e: + """The site returned an unexpected response.""" + site_status = f"The site returned an error in the response: ({e.status}) {e}" + except client_exceptions.ClientConnectionError: + """Something went wrong with the connection.""" + site_status = "Could not establish connection with the site." # Discord Protocol latency return value is in seconds, must be multiplied by 1000 to get milliseconds. discord_ping = f"{self.bot.latency * 1000:.{ROUND_LATENCY}f} ms" embed = Embed(title="Pong!") - for desc, latency in zip(DESCRIPTIONS, [bot_ping, site_ping, discord_ping]): + for desc, latency in zip(DESCRIPTIONS, [bot_ping, site_status, discord_ping]): embed.add_field(name=desc, value=latency, inline=False) await ctx.send(embed=embed) diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py index 6c21920a1..3cb9307a9 100644 --- a/bot/exts/utils/reminders.py +++ b/bot/exts/utils/reminders.py @@ -1,33 +1,34 @@ -import asyncio -import logging import random import textwrap import typing as t -from datetime import datetime, timedelta +from datetime import datetime from operator import itemgetter import discord from dateutil.parser import isoparse -from dateutil.relativedelta import relativedelta from discord.ext.commands import Cog, Context, Greedy, group from bot.bot import Bot -from bot.constants import Guild, Icons, MODERATION_ROLES, POSITIVE_REPLIES, Roles, STAFF_ROLES -from bot.converters import Duration +from bot.constants import Guild, Icons, MODERATION_ROLES, POSITIVE_REPLIES, Roles, STAFF_PARTNERS_COMMUNITY_ROLES +from bot.converters import Duration, UnambiguousUser +from bot.log import get_logger from bot.pagination import LinePaginator +from bot.utils import scheduling from bot.utils.checks import has_any_role_check, has_no_roles_check from bot.utils.lock import lock_arg +from bot.utils.members import get_or_fetch_member from bot.utils.messages import send_denial from bot.utils.scheduling import Scheduler -from bot.utils.time import humanize_delta +from bot.utils.time import TimestampFormats, discord_timestamp -log = logging.getLogger(__name__) +log = get_logger(__name__) LOCK_NAMESPACE = "reminder" WHITELISTED_CHANNELS = Guild.reminder_whitelist MAXIMUM_REMINDERS = 5 Mentionable = t.Union[discord.Member, discord.Role] +ReminderMention = t.Union[UnambiguousUser, discord.Role] class Reminders(Cog): @@ -37,7 +38,7 @@ class Reminders(Cog): self.bot = bot self.scheduler = Scheduler(self.__class__.__name__) - self.bot.loop.create_task(self.reschedule_reminders()) + scheduling.create_task(self.reschedule_reminders(), event_loop=self.bot.loop) def cog_unload(self) -> None: """Cancel scheduled tasks.""" @@ -62,8 +63,7 @@ class Reminders(Cog): # If the reminder is already overdue ... if remind_at < now: - late = relativedelta(now, remind_at) - await self.send_reminder(reminder, late) + await self.send_reminder(reminder, remind_at) else: self.schedule_reminder(reminder) @@ -78,7 +78,7 @@ class Reminders(Cog): f"Reminder {reminder['id']} invalid: " f"User {reminder['author']}={user}, Channel {reminder['channel_id']}={channel}." ) - asyncio.create_task(self.bot.api_client.delete(f"bot/reminders/{reminder['id']}")) + scheduling.create_task(self.bot.api_client.delete(f"bot/reminders/{reminder['id']}")) return is_valid, user, channel @@ -86,8 +86,7 @@ class Reminders(Cog): async def _send_confirmation( ctx: Context, on_success: str, - reminder_id: t.Union[str, int], - delivery_dt: t.Optional[datetime], + reminder_id: t.Union[str, int] ) -> None: """Send an embed confirming the reminder change was made successfully.""" embed = discord.Embed( @@ -98,11 +97,6 @@ class Reminders(Cog): footer_str = f"ID: {reminder_id}" - if delivery_dt: - # Reminder deletion will have a `None` `delivery_dt` - footer_str += ', Due' - embed.timestamp = delivery_dt - embed.set_footer(text=footer_str) await ctx.send(embed=embed) @@ -118,10 +112,10 @@ class Reminders(Cog): If mentions aren't allowed, also return the type of mention(s) disallowed. """ - if await has_no_roles_check(ctx, *STAFF_ROLES): + if await has_no_roles_check(ctx, *STAFF_PARTNERS_COMMUNITY_ROLES): return False, "members/roles" elif await has_no_roles_check(ctx, *MODERATION_ROLES): - return all(isinstance(mention, discord.Member) for mention in mentions), "roles" + return all(isinstance(mention, (discord.User, discord.Member)) for mention in mentions), "roles" else: return True, "" @@ -140,11 +134,12 @@ class Reminders(Cog): await send_denial(ctx, f"You can't mention other {disallowed_mentions} in your reminder!") return False - def get_mentionables(self, mention_ids: t.List[int]) -> t.Iterator[Mentionable]: + async def get_mentionables(self, mention_ids: t.List[int]) -> t.Iterator[Mentionable]: """Converts Role and Member ids to their corresponding objects if possible.""" guild = self.bot.get_guild(Guild.id) for mention_id in mention_ids: - if (mentionable := (guild.get_member(mention_id) or guild.get_role(mention_id))): + member = await get_or_fetch_member(guild, mention_id) + if mentionable := (member or guild.get_role(mention_id)): yield mentionable def schedule_reminder(self, reminder: dict) -> None: @@ -174,59 +169,90 @@ class Reminders(Cog): self.schedule_reminder(reminder) @lock_arg(LOCK_NAMESPACE, "reminder", itemgetter("id"), raise_error=True) - async def send_reminder(self, reminder: dict, late: relativedelta = None) -> None: + async def send_reminder(self, reminder: dict, expected_time: datetime = None) -> None: """Send the reminder.""" is_valid, user, channel = self.ensure_valid_reminder(reminder) if not is_valid: # No need to cancel the task too; it'll simply be done once this coroutine returns. return - embed = discord.Embed() - embed.colour = discord.Colour.blurple() - embed.set_author( - icon_url=Icons.remind_blurple, - name="It has arrived!" - ) - - embed.description = f"Here's your reminder: `{reminder['content']}`." - - if reminder.get("jump_url"): # keep backward compatibility - embed.description += f"\n[Jump back to when you created the reminder]({reminder['jump_url']})" - - if late: + if expected_time: embed.colour = discord.Colour.red() embed.set_author( icon_url=Icons.remind_red, - name=f"Sorry it arrived {humanize_delta(late, max_units=2)} late!" + name="Sorry, your reminder should have arrived earlier!" + ) + else: + embed.colour = discord.Colour.blurple() + embed.set_author( + icon_url=Icons.remind_blurple, + name="It has arrived!" ) - additional_mentions = ' '.join( - mentionable.mention for mentionable in self.get_mentionables(reminder["mentions"]) - ) + # Let's not use a codeblock to keep emojis and mentions working. Embeds are safe anyway. + embed.description = f"Here's your reminder: {reminder['content']}" - await channel.send(content=f"{user.mention} {additional_mentions}", embed=embed) + # Here the jump URL is in the format of base_url/guild_id/channel_id/message_id + additional_mentions = ' '.join([ + mentionable.mention async for mentionable in self.get_mentionables(reminder["mentions"]) + ]) + + jump_url = reminder.get("jump_url") + embed.description += f"\n[Jump back to when you created the reminder]({jump_url})" + partial_message = channel.get_partial_message(int(jump_url.split("/")[-1])) + try: + await partial_message.reply(content=f"{additional_mentions}", embed=embed) + except discord.HTTPException as e: + log.info( + f"There was an error when trying to reply to a reminder invocation message, {e}, " + "fall back to using jump_url" + ) + await channel.send(content=f"{user.mention} {additional_mentions}", embed=embed) log.debug(f"Deleting reminder #{reminder['id']} (the user has been reminded).") await self.bot.api_client.delete(f"bot/reminders/{reminder['id']}") @group(name="remind", aliases=("reminder", "reminders", "remindme"), invoke_without_command=True) async def remind_group( - self, ctx: Context, mentions: Greedy[Mentionable], expiration: Duration, *, content: str + self, ctx: Context, mentions: Greedy[ReminderMention], expiration: Duration, *, content: str ) -> None: - """Commands for managing your reminders.""" + """ + Commands for managing your reminders. + + The `expiration` duration of `!remind new` supports the following symbols for each unit of time: + - years: `Y`, `y`, `year`, `years` + - months: `m`, `month`, `months` + - weeks: `w`, `W`, `week`, `weeks` + - days: `d`, `D`, `day`, `days` + - hours: `H`, `h`, `hour`, `hours` + - minutes: `M`, `minute`, `minutes` + - seconds: `S`, `s`, `second`, `seconds` + + For example, to set a reminder that expires in 3 days and 1 minute, you can do `!remind new 3d1M Do something`. + """ await self.new_reminder(ctx, mentions=mentions, expiration=expiration, content=content) @remind_group.command(name="new", aliases=("add", "create")) async def new_reminder( - self, ctx: Context, mentions: Greedy[Mentionable], expiration: Duration, *, content: str + self, ctx: Context, mentions: Greedy[ReminderMention], expiration: Duration, *, content: str ) -> None: """ Set yourself a simple reminder. - Expiration is parsed per: http://strftime.org/ + The `expiration` duration supports the following symbols for each unit of time: + - years: `Y`, `y`, `year`, `years` + - months: `m`, `month`, `months` + - weeks: `w`, `W`, `week`, `weeks` + - days: `d`, `D`, `day`, `days` + - hours: `H`, `h`, `hour`, `hours` + - minutes: `M`, `minute`, `minutes` + - seconds: `S`, `s`, `second`, `seconds` + + For example, to set a reminder that expires in 3 days and 1 minute, you can do `!remind new 3d1M Do something`. """ - # If the user is not staff, we need to verify whether or not to make a reminder at all. - if await has_no_roles_check(ctx, *STAFF_ROLES): + # If the user is not staff, partner or part of the python community, + # we need to verify whether or not to make a reminder at all. + if await has_no_roles_check(ctx, *STAFF_PARTNERS_COMMUNITY_ROLES): # If they don't have permission to set a reminder in this channel if ctx.channel.id not in WHITELISTED_CHANNELS: @@ -270,9 +296,7 @@ class Reminders(Cog): } ) - now = datetime.utcnow() - timedelta(seconds=1) - humanized_delta = humanize_delta(relativedelta(expiration, now)) - mention_string = f"Your reminder will arrive in {humanized_delta}" + mention_string = f"Your reminder will arrive on {discord_timestamp(expiration, TimestampFormats.DAY_TIME)}" if mentions: mention_string += f" and will mention {len(mentions)} other(s)" @@ -282,8 +306,7 @@ class Reminders(Cog): await self._send_confirmation( ctx, on_success=mention_string, - reminder_id=reminder["id"], - delivery_dt=expiration, + reminder_id=reminder["id"] ) self.schedule_reminder(reminder) @@ -297,8 +320,6 @@ class Reminders(Cog): params={'author__id': str(ctx.author.id)} ) - now = datetime.utcnow() - # Make a list of tuples so it can be sorted by time. reminders = sorted( ( @@ -313,16 +334,16 @@ class Reminders(Cog): for content, remind_at, id_, mentions in reminders: # Parse and humanize the time, make it pretty :D remind_datetime = isoparse(remind_at).replace(tzinfo=None) - time = humanize_delta(relativedelta(remind_datetime, now)) + time = discord_timestamp(remind_datetime, TimestampFormats.RELATIVE) - mentions = ", ".join( + mentions = ", ".join([ # Both Role and User objects have the `name` attribute - mention.name for mention in self.get_mentionables(mentions) - ) + mention.name async for mention in self.get_mentionables(mentions) + ]) mention_string = f"\n**Mentions:** {mentions}" if mentions else "" text = textwrap.dedent(f""" - **Reminder #{id_}:** *expires in {time}* (ID: {id_}){mention_string} + **Reminder #{id_}:** *expires {time}* (ID: {id_}){mention_string} {content} """).strip() @@ -350,7 +371,20 @@ class Reminders(Cog): @remind_group.group(name="edit", aliases=("change", "modify"), invoke_without_command=True) async def edit_reminder_group(self, ctx: Context) -> None: - """Commands for modifying your current reminders.""" + """ + Commands for modifying your current reminders. + + The `expiration` duration supports the following symbols for each unit of time: + - years: `Y`, `y`, `year`, `years` + - months: `m`, `month`, `months` + - weeks: `w`, `W`, `week`, `weeks` + - days: `d`, `D`, `day`, `days` + - hours: `H`, `h`, `hour`, `hours` + - minutes: `M`, `minute`, `minutes` + - seconds: `S`, `s`, `second`, `seconds` + + For example, to edit a reminder to expire in 3 days and 1 minute, you can do `!remind edit duration 1234 3d1M`. + """ await ctx.send_help(ctx.command) @edit_reminder_group.command(name="duration", aliases=("time",)) @@ -358,7 +392,16 @@ class Reminders(Cog): """ Edit one of your reminder's expiration. - Expiration is parsed per: http://strftime.org/ + The `expiration` duration supports the following symbols for each unit of time: + - years: `Y`, `y`, `year`, `years` + - months: `m`, `month`, `months` + - weeks: `w`, `W`, `week`, `weeks` + - days: `d`, `D`, `day`, `days` + - hours: `H`, `h`, `hour`, `hours` + - minutes: `M`, `minute`, `minutes` + - seconds: `S`, `s`, `second`, `seconds` + + For example, to edit a reminder to expire in 3 days and 1 minute, you can do `!remind edit duration 1234 3d1M`. """ await self.edit_reminder(ctx, id_, {'expiration': expiration.isoformat()}) @@ -368,7 +411,7 @@ class Reminders(Cog): await self.edit_reminder(ctx, id_, {"content": content}) @edit_reminder_group.command(name="mentions", aliases=("pings",)) - async def edit_reminder_mentions(self, ctx: Context, id_: int, mentions: Greedy[Mentionable]) -> None: + async def edit_reminder_mentions(self, ctx: Context, id_: int, mentions: Greedy[ReminderMention]) -> None: """Edit one of your reminder's mentions.""" # Remove duplicate mentions mentions = set(mentions) @@ -388,15 +431,11 @@ class Reminders(Cog): return reminder = await self._edit_reminder(id_, payload) - # Parse the reminder expiration back into a datetime - expiration = isoparse(reminder["expiration"]).replace(tzinfo=None) - # Send a confirmation message to the channel await self._send_confirmation( ctx, on_success="That reminder has been edited successfully!", reminder_id=id_, - delivery_dt=expiration, ) await self._reschedule_reminder(reminder) @@ -413,8 +452,7 @@ class Reminders(Cog): await self._send_confirmation( ctx, on_success="That reminder has been deleted successfully!", - reminder_id=id_, - delivery_dt=None, + reminder_id=id_ ) async def _can_modify(self, ctx: Context, reminder_id: t.Union[str, int]) -> bool: diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py index b1f1ba6a8..fbfc58d0b 100644 --- a/bot/exts/utils/snekbox.py +++ b/bot/exts/utils/snekbox.py @@ -1,7 +1,6 @@ import asyncio import contextlib import datetime -import logging import re import textwrap from functools import partial @@ -14,10 +13,11 @@ from discord.ext.commands import Cog, Context, command, guild_only from bot.bot import Bot from bot.constants import Categories, Channels, Roles, URLs from bot.decorators import redirect_output -from bot.utils import send_to_paste_service +from bot.log import get_logger +from bot.utils import scheduling, send_to_paste_service from bot.utils.messages import wait_for_deletion -log = logging.getLogger(__name__) +log = get_logger(__name__) ESCAPE_REGEX = re.compile("[`\u202E\u200B]{3,}") FORMATTED_CODE_REGEX = re.compile( @@ -219,7 +219,7 @@ class Snekbox(Cog): response = await ctx.send("Attempt to circumvent filter detected. Moderator team has been alerted.") else: response = await ctx.send(msg) - self.bot.loop.create_task(wait_for_deletion(response, (ctx.author.id,))) + scheduling.create_task(wait_for_deletion(response, (ctx.author.id,)), event_loop=self.bot.loop) log.info(f"{ctx.author}'s job had a return code of {results['returncode']}") return response diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index 4c39a7c2a..f69bab781 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -1,5 +1,4 @@ import difflib -import logging import re import unicodedata from typing import Tuple, Union @@ -9,15 +8,15 @@ from discord.ext.commands import BadArgument, Cog, Context, clean_content, comma from discord.utils import snowflake_time from bot.bot import Bot -from bot.constants import Channels, MODERATION_ROLES, Roles, STAFF_ROLES +from bot.constants import Channels, MODERATION_ROLES, Roles, STAFF_PARTNERS_COMMUNITY_ROLES from bot.converters import Snowflake from bot.decorators import in_whitelist +from bot.log import get_logger from bot.pagination import LinePaginator from bot.utils import messages -from bot.utils.checks import has_no_roles_check from bot.utils.time import time_since -log = logging.getLogger(__name__) +log = get_logger(__name__) ZEN_OF_PYTHON = """\ Beautiful is better than ugly. @@ -40,6 +39,7 @@ If the implementation is hard to explain, it's a bad idea. If the implementation is easy to explain, it may be a good idea. Namespaces are one honking great idea -- let's do more of those! """ +LEADS_AND_COMMUNITY = (Roles.project_leads, Roles.domain_leads, Roles.partners, Roles.python_community) class Utils(Cog): @@ -49,20 +49,22 @@ class Utils(Cog): self.bot = bot @command() - @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) + @in_whitelist(channels=(Channels.bot_commands, Channels.discord_py), roles=STAFF_PARTNERS_COMMUNITY_ROLES) async def charinfo(self, ctx: Context, *, characters: str) -> None: """Shows you information on up to 50 unicode characters.""" match = re.match(r"<(a?):(\w+):(\d+)>", characters) if match: - return await messages.send_denial( + await messages.send_denial( ctx, "**Non-Character Detected**\n" "Only unicode characters can be processed, but a custom Discord emoji " "was found. Please remove it and try again." ) + return if len(characters) > 50: - return await messages.send_denial(ctx, f"Too many characters ({len(characters)}/50)") + await messages.send_denial(ctx, f"Too many characters ({len(characters)}/50)") + return def get_info(char: str) -> Tuple[str, str]: digit = f"{ord(char):x}" @@ -156,12 +158,9 @@ class Utils(Cog): await ctx.send(embed=embed) @command(aliases=("snf", "snfl", "sf")) - @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) + @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_PARTNERS_COMMUNITY_ROLES) async def snowflake(self, ctx: Context, *snowflakes: Snowflake) -> None: """Get Discord snowflake creation time.""" - if len(snowflakes) > 1 and await has_no_roles_check(ctx, *STAFF_ROLES): - raise BadArgument("Cannot process more than one snowflake in one invocation.") - if not snowflakes: raise BadArgument("At least one snowflake must be provided.") @@ -174,7 +173,7 @@ class Utils(Cog): lines = [] for snowflake in snowflakes: created_at = snowflake_time(snowflake) - lines.append(f"**{snowflake}**\nCreated at {created_at} ({time_since(created_at, max_units=3)}).") + lines.append(f"**{snowflake}**\nCreated at {created_at} ({time_since(created_at)}).") await LinePaginator.paginate( lines, @@ -185,7 +184,7 @@ class Utils(Cog): ) @command(aliases=("poll",)) - @has_any_role(*MODERATION_ROLES, Roles.project_leads, Roles.domain_leads) + @has_any_role(*MODERATION_ROLES, *LEADS_AND_COMMUNITY) async def vote(self, ctx: Context, title: clean_content(fix_channel_mentions=True), *options: str) -> None: """ Build a quick voting poll with matching reactions with the provided options. diff --git a/bot/log.py b/bot/log.py index 4e20c005e..b3cecdcf2 100644 --- a/bot/log.py +++ b/bot/log.py @@ -3,6 +3,7 @@ import os import sys from logging import Logger, handlers from pathlib import Path +from typing import Optional, TYPE_CHECKING, cast import coloredlogs import sentry_sdk @@ -14,11 +15,38 @@ from bot import constants TRACE_LEVEL = 5 +if TYPE_CHECKING: + LoggerClass = Logger +else: + LoggerClass = logging.getLoggerClass() + + +class CustomLogger(LoggerClass): + """Custom implementation of the `Logger` class with an added `trace` method.""" + + def trace(self, msg: str, *args, **kwargs) -> None: + """ + Log 'msg % args' with severity 'TRACE'. + + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) + """ + if self.isEnabledFor(TRACE_LEVEL): + self.log(TRACE_LEVEL, msg, *args, **kwargs) + + +def get_logger(name: Optional[str] = None) -> CustomLogger: + """Utility to make mypy recognise that logger is of type `CustomLogger`.""" + return cast(CustomLogger, logging.getLogger(name)) + + def setup() -> None: """Set up loggers.""" logging.TRACE = TRACE_LEVEL logging.addLevelName(TRACE_LEVEL, "TRACE") - Logger.trace = _monkeypatch_trace + logging.setLoggerClass(CustomLogger) format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s" log_format = logging.Formatter(format_string) @@ -28,7 +56,7 @@ def setup() -> None: file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7, encoding="utf8") file_handler.setFormatter(log_format) - root_log = logging.getLogger() + root_log = get_logger() root_log.addHandler(file_handler) if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: @@ -42,16 +70,16 @@ def setup() -> None: if "COLOREDLOGS_LOG_FORMAT" not in os.environ: coloredlogs.DEFAULT_LOG_FORMAT = format_string - coloredlogs.install(level=logging.TRACE, logger=root_log, stream=sys.stdout) + coloredlogs.install(level=TRACE_LEVEL, logger=root_log, stream=sys.stdout) root_log.setLevel(logging.DEBUG if constants.DEBUG_MODE else logging.INFO) - logging.getLogger("discord").setLevel(logging.WARNING) - logging.getLogger("websockets").setLevel(logging.WARNING) - logging.getLogger("chardet").setLevel(logging.WARNING) - logging.getLogger("async_rediscache").setLevel(logging.WARNING) + get_logger("discord").setLevel(logging.WARNING) + get_logger("websockets").setLevel(logging.WARNING) + get_logger("chardet").setLevel(logging.WARNING) + get_logger("async_rediscache").setLevel(logging.WARNING) # Set back to the default of INFO even if asyncio's debug mode is enabled. - logging.getLogger("asyncio").setLevel(logging.INFO) + get_logger("asyncio").setLevel(logging.INFO) _set_trace_loggers() @@ -73,19 +101,6 @@ def setup_sentry() -> None: ) -def _monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: - """ - Log 'msg % args' with severity 'TRACE'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) - """ - if self.isEnabledFor(TRACE_LEVEL): - self._log(TRACE_LEVEL, msg, args, **kwargs) - - def _set_trace_loggers() -> None: """ Set loggers to the trace level according to the value from the BOT_TRACE_LOGGERS env var. @@ -101,13 +116,13 @@ def _set_trace_loggers() -> None: level_filter = constants.Bot.trace_loggers if level_filter: if level_filter.startswith("*"): - logging.getLogger().setLevel(logging.TRACE) + get_logger().setLevel(TRACE_LEVEL) elif level_filter.startswith("!"): - logging.getLogger().setLevel(logging.TRACE) + get_logger().setLevel(TRACE_LEVEL) for logger_name in level_filter.strip("!,").split(","): - logging.getLogger(logger_name).setLevel(logging.DEBUG) + get_logger(logger_name).setLevel(logging.DEBUG) else: for logger_name in level_filter.strip(",").split(","): - logging.getLogger(logger_name).setLevel(logging.TRACE) + get_logger(logger_name).setLevel(TRACE_LEVEL) diff --git a/bot/monkey_patches.py b/bot/monkey_patches.py new file mode 100644 index 000000000..e56a19da2 --- /dev/null +++ b/bot/monkey_patches.py @@ -0,0 +1,51 @@ +from datetime import datetime, timedelta + +from discord import Forbidden, http +from discord.ext import commands + +from bot.log import get_logger + +log = get_logger(__name__) + + +class Command(commands.Command): + """ + A `discord.ext.commands.Command` subclass which supports root aliases. + + A `root_aliases` keyword argument is added, which is a sequence of alias names that will act as + top-level commands rather than being aliases of the command's group. It's stored as an attribute + also named `root_aliases`. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.root_aliases = kwargs.get("root_aliases", []) + + if not isinstance(self.root_aliases, (list, tuple)): + raise TypeError("Root aliases of a command must be a list or a tuple of strings.") + + +def patch_typing() -> None: + """ + Sometimes discord turns off typing events by throwing 403's. + + Handle those issues by patching the trigger_typing method so it ignores 403's in general. + """ + log.debug("Patching send_typing, which should fix things breaking when discord disables typing events. Stay safe!") + + original = http.HTTPClient.send_typing + last_403 = None + + async def honeybadger_type(self, channel_id: int) -> None: # noqa: ANN001 + nonlocal last_403 + if last_403 and (datetime.utcnow() - last_403) < timedelta(minutes=5): + log.warning("Not sending typing event, we got a 403 less than 5 minutes ago.") + return + try: + await original(self, channel_id) + except Forbidden: + last_403 = datetime.utcnow() + log.warning("Got a 403 from typing event!") + pass + + http.HTTPClient.send_typing = honeybadger_type diff --git a/bot/pagination.py b/bot/pagination.py index c5c84afd9..8f4353eb1 100644 --- a/bot/pagination.py +++ b/bot/pagination.py @@ -1,5 +1,4 @@ import asyncio -import logging import typing as t from contextlib import suppress from functools import partial @@ -9,6 +8,7 @@ from discord.abc import User from discord.ext.commands import Context, Paginator from bot import constants +from bot.log import get_logger from bot.utils import messages FIRST_EMOJI = "\u23EE" # [:track_previous:] @@ -19,10 +19,10 @@ DELETE_EMOJI = constants.Emojis.trashcan # [:trashcan:] PAGINATION_EMOJI = (FIRST_EMOJI, LEFT_EMOJI, RIGHT_EMOJI, LAST_EMOJI, DELETE_EMOJI) -log = logging.getLogger(__name__) +log = get_logger(__name__) -class EmptyPaginatorEmbed(Exception): +class EmptyPaginatorEmbedError(Exception): """Raised when attempting to paginate with empty contents.""" pass @@ -49,30 +49,33 @@ class LinePaginator(Paginator): self, prefix: str = '```', suffix: str = '```', - max_size: int = 2000, - scale_to_size: int = 2000, - max_lines: t.Optional[int] = None + max_size: int = 4000, + scale_to_size: int = 4000, + max_lines: t.Optional[int] = None, + linesep: str = "\n" ) -> None: """ This function overrides the Paginator.__init__ from inside discord.ext.commands. It overrides in order to allow us to configure the maximum number of lines per page. """ - self.prefix = prefix - self.suffix = suffix - - # Embeds that exceed 2048 characters will result in an HTTPException - # (Discord API limit), so we've set a limit of 2000 - if max_size > 2000: - raise ValueError(f"max_size must be <= 2,000 characters. ({max_size} > 2000)") - - self.max_size = max_size - len(suffix) + # Embeds that exceed 4096 characters will result in an HTTPException + # (Discord API limit), so we've set a limit of 4000 + if max_size > 4000: + raise ValueError(f"max_size must be <= 4,000 characters. ({max_size} > 4000)") + + super().__init__( + prefix, + suffix, + max_size - len(suffix), + linesep + ) if scale_to_size < max_size: raise ValueError(f"scale_to_size must be >= max_size. ({scale_to_size} < {max_size})") - if scale_to_size > 2000: - raise ValueError(f"scale_to_size must be <= 2,000 characters. ({scale_to_size} > 2000)") + if scale_to_size > 4000: + raise ValueError(f"scale_to_size must be <= 4,000 characters. ({scale_to_size} > 4000)") self.scale_to_size = scale_to_size - len(suffix) self.max_lines = max_lines @@ -194,7 +197,7 @@ class LinePaginator(Paginator): suffix: str = "", max_lines: t.Optional[int] = None, max_size: int = 500, - scale_to_size: int = 2000, + scale_to_size: int = 4000, empty: bool = True, restrict_to_user: User = None, timeout: int = 300, @@ -230,7 +233,7 @@ class LinePaginator(Paginator): if not lines: if exception_on_empty_embed: log.exception("Pagination asked for empty lines iterable") - raise EmptyPaginatorEmbed("No lines to paginate") + raise EmptyPaginatorEmbedError("No lines to paginate") log.debug("No lines to add to paginator, adding '(nothing to display)' message") lines.append("(nothing to display)") diff --git a/bot/resources/tags/async-await.md b/bot/resources/tags/async-await.md index ff71ace07..01ab28fe3 100644 --- a/bot/resources/tags/async-await.md +++ b/bot/resources/tags/async-await.md @@ -2,27 +2,26 @@ Python provides the ability to run multiple tasks and coroutines simultaneously with the use of the `asyncio` library, which is included in the Python standard library. -This works by running these coroutines in an event loop, where the context of which coroutine is being run is switches periodically to allow all of them to run, giving the appearance of running at the same time. This is different to using threads or processes in that all code is run in the main process and thread, although it is possible to run coroutines in threads. +This works by running these coroutines in an event loop, where the context of the running coroutine switches periodically to allow all other coroutines to run, thus giving the appearance of running at the same time. This is different to using threads or processes in that all code runs in the main process and thread, although it is possible to run coroutines in other threads. To call an async function we can either `await` it, or run it in an event loop which we get from `asyncio`. -To create a coroutine that can be used with asyncio we need to define a function using the async keyword: +To create a coroutine that can be used with asyncio we need to define a function using the `async` keyword: ```py async def main(): await something_awaitable() ``` -Which means we can call `await something_awaitable()` directly from within the function. If this were a non-async function this would have raised an exception like: `SyntaxError: 'await' outside async function` +Which means we can call `await something_awaitable()` directly from within the function. If this were a non-async function, it would raise the exception `SyntaxError: 'await' outside async function` -To run the top level async function from outside of the event loop we can get an event loop from `asyncio`, and then use that loop to run the function: +To run the top level async function from outside the event loop we need to use [`asyncio.run()`](https://docs.python.org/3/library/asyncio-task.html#asyncio.run), like this: ```py -from asyncio import get_event_loop +import asyncio async def main(): await something_awaitable() -loop = get_event_loop() -loop.run_until_complete(main()) +asyncio.run(main()) ``` -Note that in the `run_until_complete()` where we appear to be calling `main()`, this does not execute the code in `main`, rather it returns a `coroutine` object which is then handled and run by the event loop via `run_until_complete()`. +Note that in the `asyncio.run()`, where we appear to be calling `main()`, this does not execute the code in `main`. Rather, it creates and returns a new `coroutine` object (i.e `main() is not main()`) which is then handled and run by the event loop via `asyncio.run()`. To learn more about asyncio and its use, see the [asyncio documentation](https://docs.python.org/3/library/asyncio.html). diff --git a/bot/resources/tags/blocking.md b/bot/resources/tags/blocking.md index 31d91294c..5554d7eba 100644 --- a/bot/resources/tags/blocking.md +++ b/bot/resources/tags/blocking.md @@ -1,9 +1,7 @@ **Why do we need asynchronous programming?** - Imagine that you're coding a Discord bot and every time somebody uses a command, you need to get some information from a database. But there's a catch: the database servers are acting up today and take a whole 10 seconds to respond. If you do **not** use asynchronous methods, your whole bot will stop running until it gets a response from the database. How do you fix this? Asynchronous programming. **What is asynchronous programming?** - An asynchronous program utilises the `async` and `await` keywords. An asynchronous program pauses what it's doing and does something else whilst it waits for some third-party service to complete whatever it's supposed to do. Any code within an `async` context manager or function marked with the `await` keyword indicates to Python, that whilst this operation is being completed, it can do something else. For example: ```py @@ -14,13 +12,10 @@ import discord async def ping(ctx): await ctx.send("Pong!") ``` - **What does the term "blocking" mean?** - A blocking operation is wherever you do something without `await`ing it. This tells Python that this step must be completed before it can do anything else. Common examples of blocking operations, as simple as they may seem, include: outputting text, adding two numbers and appending an item onto a list. Most common Python libraries have an asynchronous version available to use in asynchronous contexts. **`async` libraries** - The standard async library - `asyncio` Asynchronous web requests - `aiohttp` Talking to PostgreSQL asynchronously - `asyncpg` diff --git a/bot/resources/tags/botvar.md b/bot/resources/tags/botvar.md new file mode 100644 index 000000000..3db6ae7ac --- /dev/null +++ b/bot/resources/tags/botvar.md @@ -0,0 +1,23 @@ +Python allows you to set custom attributes to most objects, like your bot! By storing things as attributes of the bot object, you can access them anywhere you access your bot. In the discord.py library, these custom attributes are commonly known as "bot variables" and can be a lifesaver if your bot is divided into many different files. An example on how to use custom attributes on your bot is shown below: + +```py +bot = commands.Bot(command_prefix="!") +# Set an attribute on our bot +bot.test = "I am accessible everywhere!" + +async def get(ctx: commands.Context): + """A command to get the current value of `test`.""" + # Send what the test attribute is currently set to + await ctx.send(ctx.bot.test) + +async def setval(ctx: commands.Context, *, new_text: str): + """A command to set a new value of `test`.""" + # Here we change the attribute to what was specified in new_text + bot.test = new_text +``` + +This all applies to cogs as well! You can set attributes to `self` as you wish. + +*Be sure **not** to overwrite attributes discord.py uses, like `cogs` or `users`. Name your attributes carefully!* diff --git a/bot/resources/tags/contribute.md b/bot/resources/tags/contribute.md new file mode 100644 index 000000000..070975646 --- /dev/null +++ b/bot/resources/tags/contribute.md @@ -0,0 +1,12 @@ +**Contribute to Python Discord's Open Source Projects** +Looking to contribute to Open Source Projects for the first time? Want to add a feature or fix a bug on the bots on this server? We have on-going projects that people can contribute to, even if you've never contributed to open source before! + +**Projects to Contribute to** +• [Sir Lancebot](https://github.com/python-discord/sir-lancebot) - our fun, beginner-friendly bot +• [Python](https://github.com/python-discord/bot) - our utility & moderation bot +• [Site](https://github.com/python-discord/site) - resources, guides, and more + +**Where to start** +1. Read our [contributing guidelines](https://pythondiscord.com/pages/guides/pydis-guides/contributing/) +2. Chat with us in <#635950537262759947> if you're ready to jump in or have any questions +3. Open an issue or ask to be assigned to an issue to work on diff --git a/bot/resources/tags/docstring.md b/bot/resources/tags/docstring.md new file mode 100644 index 000000000..20043131e --- /dev/null +++ b/bot/resources/tags/docstring.md @@ -0,0 +1,18 @@ +A [`docstring`](https://docs.python.org/3/glossary.html#term-docstring) is a string - always using triple quotes - that's placed at the top of files, classes and functions. A docstring should contain a clear explanation of what it's describing. You can also include descriptions of the subject's parameter(s) and what it returns, as shown below: +```py +def greet(name: str, age: int) -> str: + """ + Return a string that greets the given person, using their name and age. + + :param name: The name of the person to greet. + :param age: The age of the person to greet. + + :return: The greeting. + """ + return f"Hello {name}, you are {age} years old!" +``` +You can get the docstring by using the [`inspect.getdoc`](https://docs.python.org/3/library/inspect.html#inspect.getdoc) function, from the built-in [`inspect`](https://docs.python.org/3/library/inspect.html) module, or by accessing the `.__doc__` attribute. `inspect.getdoc` is often preferred, as it clears indents from the docstring. + +For the last example, you can print it by doing this: `print(inspect.getdoc(greet))`. + +For more details about what a docstring is and its usage, check out this guide by [Real Python](https://realpython.com/documenting-python-code/#docstrings-background), or the [official docstring specification](https://www.python.org/dev/peps/pep-0257/#what-is-a-docstring). diff --git a/bot/resources/tags/dunder-methods.md b/bot/resources/tags/dunder-methods.md new file mode 100644 index 000000000..be2b97b7b --- /dev/null +++ b/bot/resources/tags/dunder-methods.md @@ -0,0 +1,28 @@ +**Dunder methods** + +Double-underscore methods, or "dunder" methods, are special methods defined in a class that are invoked implicitly. Like the name suggests, they are prefixed and suffixed with dunders. You've probably already seen some, such as the `__init__` dunder method, also known as the "constructor" of a class, which is implicitly invoked when you instantiate an instance of a class. + +When you create a new class, there will be default dunder methods inherited from the `object` class. However, we can override them by redefining these methods within the new class. For example, the default `__init__` method from `object` doesn't take any arguments, so we almost always override that to fit our needs. + +Other common dunder methods to override are `__str__` and `__repr__`. `__repr__` is the developer-friendly string representation of an object - usually the syntax to recreate it - and is implicitly called on arguments passed into the `repr` function. `__str__` is the user-friendly string representation of an object, and is called by the `str` function. Note here that, if not overriden, the default `__str__` invokes `__repr__` as a fallback. + +```py +class Foo: + def __init__(self, value): # constructor + self.value = value + def __str__(self): + return f"This is a Foo object, with a value of {self.value}!" # string representation + def __repr__(self): + return f"Foo({self.value!r})" # way to recreate this object + + +bar = Foo(5) + +# print also implicitly calls __str__ +print(bar) # Output: This is a Foo object, with a value of 5! + +# dev-friendly representation +print(repr(bar)) # Output: Foo(5) +``` + +Another example: did you know that when you use the `<left operand> + <right operand>` syntax, you're implicitly calling `<left operand>.__add__(<right operand>)`? The same applies to other operators, and you can look at the [`operator` built-in module documentation](https://docs.python.org/3/library/operator.html) for more information! diff --git a/bot/resources/tags/for-else.md b/bot/resources/tags/for-else.md new file mode 100644 index 000000000..e102e4e75 --- /dev/null +++ b/bot/resources/tags/for-else.md @@ -0,0 +1,17 @@ +**for-else** + +In Python it's possible to attach an `else` clause to a for loop. The code under the `else` block will be run when the iterable is exhausted (there are no more items to iterate over). Code within the else block will **not** run if the loop is broken out using `break`. + +Here's an example of its usage: +```py +numbers = [1, 3, 5, 7, 9, 11] + +for number in numbers: + if number % 2 == 0: + print(f"Found an even number: {number}") + break + print(f"{number} is odd.") +else: + print("All numbers are odd. How odd.") +``` +Try running this example but with an even number in the list, see how the output changes as you do so. diff --git a/bot/resources/tags/modmail.md b/bot/resources/tags/modmail.md index 412468174..8ac19c8a7 100644 --- a/bot/resources/tags/modmail.md +++ b/bot/resources/tags/modmail.md @@ -6,4 +6,4 @@ It supports attachments, codeblocks, and reactions. As communication happens ove **To use it, simply send a direct message to the bot.** -Should there be an urgent and immediate need for a moderator or admin to look at a channel, feel free to ping the <@&831776746206265384> or <@&267628507062992896> role instead. +Should there be an urgent and immediate need for a moderator to look at a channel, feel free to ping the <@&831776746206265384> role instead. diff --git a/bot/resources/tags/paste.md b/bot/resources/tags/paste.md index 2ed51def7..8c3c2985d 100644 --- a/bot/resources/tags/paste.md +++ b/bot/resources/tags/paste.md @@ -1,6 +1,6 @@ **Pasting large amounts of code** If your code is too long to fit in a codeblock in discord, you can paste your code here: -https://paste.pydis.com/ +https://paste.pythondiscord.com/ After pasting your code, **save** it by clicking the floppy disk icon in the top right, or by typing `ctrl + S`. After doing that, the URL should **change**. Copy the URL and post it here so others can see it. diff --git a/bot/resources/tags/string-formatting.md b/bot/resources/tags/string-formatting.md new file mode 100644 index 000000000..707d19c90 --- /dev/null +++ b/bot/resources/tags/string-formatting.md @@ -0,0 +1,24 @@ +**String Formatting Mini-Language** +The String Formatting Language in Python is a powerful way to tailor the display of strings and other data structures. This string formatting mini language works for f-strings and `.format()`. + +Take a look at some of these examples! +```py +>>> my_num = 2134234523 +>>> print(f"{my_num:,}") +2,134,234,523 + +>>> my_smaller_num = -30.0532234 +>>> print(f"{my_smaller_num:=09.2f}") +-00030.05 + +>>> my_str = "Center me!" +>>> print(f"{my_str:-^20}") +-----Center me!----- + +>>> repr_str = "Spam \t Ham" +>>> print(f"{repr_str!r}") +'Spam \t Ham' +``` +**Full Specification & Resources** +[String Formatting Mini Language Specification](https://docs.python.org/3/library/string.html#format-specification-mini-language) +[pyformat.info](https://pyformat.info/) diff --git a/bot/resources/tags/traceback.md b/bot/resources/tags/traceback.md index e770fa86d..321737aac 100644 --- a/bot/resources/tags/traceback.md +++ b/bot/resources/tags/traceback.md @@ -1,4 +1,4 @@ -Please provide a full traceback to your exception in order for us to identify your issue. +Please provide the full traceback for your exception in order to help us identify your issue. A full traceback could look like: ```py @@ -6,13 +6,13 @@ Traceback (most recent call last): File "tiny", line 3, in do_something() File "tiny", line 2, in do_something - a = 6 / 0 -ZeroDivisionError: integer division or modulo by zero + a = 6 / b +ZeroDivisionError: division by zero ``` The best way to read your traceback is bottom to top. -• Identify the exception raised (e.g. ZeroDivisionError) -• Make note of the line number, and navigate there in your program. -• Try to understand why the error occurred. +• Identify the exception raised (in this case `ZeroDivisionError`) +• Make note of the line number (in this case `2`), and navigate there in your program. +• Try to understand why the error occurred (in this case because `b` is `0`). -To read more about exceptions and errors, please refer to the [PyDis Wiki](https://pythondiscord.com/pages/asking-good-questions/#examining-tracebacks) or the [official Python tutorial.](https://docs.python.org/3.7/tutorial/errors.html) +To read more about exceptions and errors, please refer to the [PyDis Wiki](https://pythondiscord.com/pages/guides/pydis-guides/asking-good-questions/#examining-tracebacks) or the [official Python tutorial](https://docs.python.org/3.7/tutorial/errors.html). diff --git a/bot/resources/tags/venv.md b/bot/resources/tags/venv.md new file mode 100644 index 000000000..a4fc62151 --- /dev/null +++ b/bot/resources/tags/venv.md @@ -0,0 +1,20 @@ +**Virtual Environments** + +Virtual environments are isolated Python environments, which make it easier to keep your system clean and manage dependencies. By default, when activated, only libraries and scripts installed in the virtual environment are accessible, preventing cross-project dependency conflicts, and allowing easy isolation of requirements. + +To create a new virtual environment, you can use the standard library `venv` module: `python3 -m venv .venv` (replace `python3` with `python` or `py` on Windows) + +Then, to activate the new virtual environment: + +**Windows** (PowerShell): `.venv\Scripts\Activate.ps1` +or (Command Prompt): `.venv\Scripts\activate.bat` +**MacOS / Linux** (Bash): `source .venv/bin/activate` + +Packages can then be installed to the virtual environment using `pip`, as normal. + +For more information, take a read of the [documentation](https://docs.python.org/3/library/venv.html). If you run code through your editor, check its documentation on how to make it use your virtual environment. For example, see the [VSCode](https://code.visualstudio.com/docs/python/environments#_select-and-activate-an-environment) or [PyCharm](https://www.jetbrains.com/help/pycharm/creating-virtual-environment.html) docs. + +Tools such as [poetry](https://python-poetry.org/docs/basic-usage/) and [pipenv](https://pipenv.pypa.io/en/latest/) can manage the creation of virtual environments as well as project dependencies, making packaging and installing your project easier. + +**Note:** When using Windows PowerShell, you may need to change the [execution policy](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_execution_policies) first. This is only required once: +`Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser` diff --git a/bot/resources/tags/windows-path.md b/bot/resources/tags/windows-path.md index da8edf685..b2b0da029 100644 --- a/bot/resources/tags/windows-path.md +++ b/bot/resources/tags/windows-path.md @@ -1,30 +1,17 @@ **PATH on Windows** -If you have installed Python but you forgot to check the *Add Python to PATH* option during the installation you may still be able to access your installation with ease. +If you have installed Python but forgot to check the `Add Python to PATH` option during the installation, you may still be able to access your installation with ease. -If you did not uncheck the option to install the Python launcher then you will find a `py` command on your system. If you want to be able to open your Python installation by running `python` then your best option is to re-install Python. +If you did not uncheck the option to install the `py launcher`, then you'll instead have a `py` command which can be used in the same way. If you want to be able to access your Python installation via the `python` command, then your best option is to re-install Python (remembering to tick the `Add Python to PATH` checkbox). -Otherwise, you can access your install using the `py` command in Command Prompt. Where you may type something with the `python` command like: -``` -C:\Users\Username> python3 my_application_file.py -``` - -You can achieve the same result using the `py` command like this: -``` -C:\Users\Username> py -3 my_application_file.py -``` - -You can pass any options to the Python interpreter after you specify a version, for example, to install a Python module using `pip` you can run: -``` -C:\Users\Username> py -3 -m pip install numpy -``` +You can pass any options to the Python interpreter, e.g. to install the `[numpy](https://pypi.org/project/numpy/)` module from PyPI you can run `py -3 -m pip install numpy` or `python -m pip install numpy`. -You can also access different versions of Python using the version flag, like so: +You can also access different versions of Python using the version flag of the `py` command, like so: ``` C:\Users\Username> py -3.7 ... Python 3.7 starts ... C:\Users\Username> py -3.6 -... Python 3.6 stars ... +... Python 3.6 starts ... C:\Users\Username> py -2 ... Python 2 (any version installed) starts ... ``` diff --git a/bot/resources/tags/xy-problem.md b/bot/resources/tags/xy-problem.md index b77bd27e8..8c508f18c 100644 --- a/bot/resources/tags/xy-problem.md +++ b/bot/resources/tags/xy-problem.md @@ -1,7 +1,7 @@ **xy-problem** -Asking about your attempted solution rather than your actual problem. +The XY problem can be summarised as asking about your attempted solution, rather than your actual problem. Often programmers will get distracted with a potential solution they've come up with, and will try asking for help getting it to work. However, it's possible this solution either wouldn't work as they expect, or there's a much better solution instead. -For more information and examples: http://xyproblem.info/ +For more information and examples, see http://xyproblem.info/ diff --git a/bot/resources/tags/ytdl.md b/bot/resources/tags/ytdl.md index f96b7f853..68a0a0cdb 100644 --- a/bot/resources/tags/ytdl.md +++ b/bot/resources/tags/ytdl.md @@ -1,4 +1,4 @@ -Per [Python Discord's Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, pytube, or other YouTube video downloaders as their usage violates YouTube's Terms of Service. +Per [Python Discord's Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, pytube, or other YouTube video downloaders, as their usage violates YouTube's Terms of Service. For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?gl=GB&template=terms), as of 2021-03-17: ``` diff --git a/bot/resources/tags/zip.md b/bot/resources/tags/zip.md index 6b05f0282..6f3157f71 100644 --- a/bot/resources/tags/zip.md +++ b/bot/resources/tags/zip.md @@ -3,7 +3,7 @@ The zip function allows you to iterate through multiple iterables simultaneously ```py letters = 'abc' numbers = [1, 2, 3] -# zip(letters, numbers) --> [('a', 1), ('b', 2), ('c', 3)] +# list(zip(letters, numbers)) --> [('a', 1), ('b', 2), ('c', 3)] for letter, number in zip(letters, numbers): print(letter, number) ``` diff --git a/bot/rules/discord_emojis.py b/bot/rules/discord_emojis.py index 41faf7ee8..d979ac5e7 100644 --- a/bot/rules/discord_emojis.py +++ b/bot/rules/discord_emojis.py @@ -4,7 +4,6 @@ from typing import Dict, Iterable, List, Optional, Tuple from discord import Member, Message from emoji import demojize - DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>|:\w+:") CODE_BLOCK_RE = re.compile(r"```.*?```", flags=re.DOTALL) diff --git a/bot/rules/links.py b/bot/rules/links.py index ec75a19c5..c46b783c5 100644 --- a/bot/rules/links.py +++ b/bot/rules/links.py @@ -3,7 +3,6 @@ from typing import Dict, Iterable, List, Optional, Tuple from discord import Member, Message - LINK_RE = re.compile(r"(https?://[^\s]+)") diff --git a/bot/rules/mentions.py b/bot/rules/mentions.py index 79725a4b1..6f5addad1 100644 --- a/bot/rules/mentions.py +++ b/bot/rules/mentions.py @@ -13,7 +13,11 @@ async def apply( if msg.author == last_message.author ) - total_recent_mentions = sum(len(msg.mentions) for msg in relevant_messages) + total_recent_mentions = sum( + not user.bot + for msg in relevant_messages + for user in msg.mentions + ) if total_recent_mentions > config['max']: return ( diff --git a/bot/utils/cache.py b/bot/utils/caching.py index 68ce15607..68ce15607 100644 --- a/bot/utils/cache.py +++ b/bot/utils/caching.py diff --git a/bot/utils/channel.py b/bot/utils/channel.py index 72603c521..b9e234857 100644 --- a/bot/utils/channel.py +++ b/bot/utils/channel.py @@ -1,12 +1,11 @@ -import logging - import discord import bot from bot import constants from bot.constants import Categories +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) def is_help_channel(channel: discord.TextChannel) -> bool: @@ -53,7 +52,7 @@ def is_in_category(channel: discord.TextChannel, category_id: int) -> bool: return getattr(channel, "category_id", None) == category_id -async def try_get_channel(channel_id: int) -> discord.abc.GuildChannel: +async def get_or_fetch_channel(channel_id: int) -> discord.abc.GuildChannel: """Attempt to get or fetch a channel and return it.""" log.trace(f"Getting the channel {channel_id}.") diff --git a/bot/utils/checks.py b/bot/utils/checks.py index 3d0c8a50c..e7f2cfbda 100644 --- a/bot/utils/checks.py +++ b/bot/utils/checks.py @@ -1,23 +1,15 @@ import datetime -import logging from typing import Callable, Container, Iterable, Optional, Union from discord.ext.commands import ( - BucketType, - CheckFailure, - Cog, - Command, - CommandOnCooldown, - Context, - Cooldown, - CooldownMapping, - NoPrivateMessage, - has_any_role, + BucketType, CheckFailure, Cog, Command, CommandOnCooldown, Context, Cooldown, CooldownMapping, NoPrivateMessage, + has_any_role ) from bot import constants +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) class ContextCheckFailure(CheckFailure): @@ -134,7 +126,7 @@ def cooldown_with_role_bypass(rate: int, per: float, type: BucketType = BucketTy bypass = set(bypass_roles) # this handles the actual cooldown logic - buckets = CooldownMapping(Cooldown(rate, per, type)) + buckets = CooldownMapping(Cooldown(rate, per), type) # will be called after the command has been parse but before it has been invoked, ensures that # the cooldown won't be updated if the user screws up their input to the command @@ -149,7 +141,7 @@ def cooldown_with_role_bypass(rate: int, per: float, type: BucketType = BucketTy bucket = buckets.get_bucket(ctx.message) retry_after = bucket.update_rate_limit(current) if retry_after: - raise CommandOnCooldown(bucket, retry_after) + raise CommandOnCooldown(bucket, retry_after, type) def wrapper(command: Command) -> Command: # NOTE: this could be changed if a subclass of Command were to be used. I didn't see the need for it diff --git a/bot/utils/function.py b/bot/utils/function.py index 9bc44e753..55115d7d3 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -2,11 +2,12 @@ import functools import inspect -import logging import types import typing as t -log = logging.getLogger(__name__) +from bot.log import get_logger + +log = get_logger(__name__) Argument = t.Union[int, str] BoundArgs = t.OrderedDict[str, t.Any] diff --git a/bot/utils/lock.py b/bot/utils/lock.py index ec6f92cd4..c039a4f25 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -1,6 +1,5 @@ import asyncio import inspect -import logging import types from collections import defaultdict from functools import partial @@ -8,10 +7,11 @@ from typing import Any, Awaitable, Callable, Hashable, Union from weakref import WeakValueDictionary from bot.errors import LockedResourceError +from bot.log import get_logger from bot.utils import function from bot.utils.function import command_wraps -log = logging.getLogger(__name__) +log = get_logger(__name__) __lock_dicts = defaultdict(WeakValueDictionary) _IdCallableReturn = Union[Hashable, Awaitable[Hashable]] diff --git a/bot/utils/members.py b/bot/utils/members.py new file mode 100644 index 000000000..77ddf1696 --- /dev/null +++ b/bot/utils/members.py @@ -0,0 +1,25 @@ +import typing as t + +import discord + +from bot.log import get_logger + +log = get_logger(__name__) + + +async def get_or_fetch_member(guild: discord.Guild, member_id: int) -> t.Optional[discord.Member]: + """ + Attempt to get a member from cache; on failure fetch from the API. + + Return `None` to indicate the member could not be found. + """ + if member := guild.get_member(member_id): + log.trace("%s retrieved from cache.", member) + else: + try: + member = await guild.fetch_member(member_id) + except discord.errors.NotFound: + log.trace("Failed to fetch %d from API.", member_id) + return None + log.trace("%s fetched from API.", member) + return member diff --git a/bot/utils/message_cache.py b/bot/utils/message_cache.py new file mode 100644 index 000000000..f68d280c9 --- /dev/null +++ b/bot/utils/message_cache.py @@ -0,0 +1,197 @@ +import typing as t +from math import ceil + +from discord import Message + + +class MessageCache: + """ + A data structure for caching messages. + + The cache is implemented as a circular buffer to allow constant time append, prepend, pop from either side, + and lookup by index. The cache therefore does not support removal at an arbitrary index (although it can be + implemented to work in linear time relative to the maximum size). + + The object additionally holds a mapping from Discord message ID's to the index in which the corresponding message + is stored, to allow for constant time lookup by message ID. + + The cache has a size limit operating the same as with a collections.deque, and most of its method names mirror those + of a deque. + + The implementation is transparent to the user: to the user the first element is always at index 0, and there are + only as many elements as were inserted (meaning, without any pre-allocated placeholder values). + """ + + def __init__(self, maxlen: int, *, newest_first: bool = False): + if maxlen <= 0: + raise ValueError("maxlen must be positive") + self.maxlen = maxlen + self.newest_first = newest_first + + self._start = 0 + self._end = 0 + + self._messages: list[t.Optional[Message]] = [None] * self.maxlen + self._message_id_mapping = {} + + def append(self, message: Message) -> None: + """Add the received message to the cache, depending on the order of messages defined by `newest_first`.""" + if self.newest_first: + self._appendleft(message) + else: + self._appendright(message) + + def _appendright(self, message: Message) -> None: + """Add the received message to the end of the cache.""" + if self._is_full(): + del self._message_id_mapping[self._messages[self._start].id] + self._start = (self._start + 1) % self.maxlen + + self._messages[self._end] = message + self._message_id_mapping[message.id] = self._end + self._end = (self._end + 1) % self.maxlen + + def _appendleft(self, message: Message) -> None: + """Add the received message to the beginning of the cache.""" + if self._is_full(): + self._end = (self._end - 1) % self.maxlen + del self._message_id_mapping[self._messages[self._end].id] + + self._start = (self._start - 1) % self.maxlen + self._messages[self._start] = message + self._message_id_mapping[message.id] = self._start + + def pop(self) -> Message: + """Remove the last message in the cache and return it.""" + if self._is_empty(): + raise IndexError("pop from an empty cache") + + self._end = (self._end - 1) % self.maxlen + message = self._messages[self._end] + del self._message_id_mapping[message.id] + self._messages[self._end] = None + + return message + + def popleft(self) -> Message: + """Return the first message in the cache and return it.""" + if self._is_empty(): + raise IndexError("pop from an empty cache") + + message = self._messages[self._start] + del self._message_id_mapping[message.id] + self._messages[self._start] = None + self._start = (self._start + 1) % self.maxlen + + return message + + def clear(self) -> None: + """Remove all messages from the cache.""" + self._messages = [None] * self.maxlen + self._message_id_mapping = {} + + self._start = 0 + self._end = 0 + + def get_message(self, message_id: int) -> t.Optional[Message]: + """Return the message that has the given message ID, if it is cached.""" + index = self._message_id_mapping.get(message_id, None) + return self._messages[index] if index is not None else None + + def update(self, message: Message) -> bool: + """ + Update a cached message with new contents. + + Return True if the given message had a matching ID in the cache. + """ + index = self._message_id_mapping.get(message.id, None) + if index is None: + return False + self._messages[index] = message + return True + + def __contains__(self, message_id: int) -> bool: + """Return True if the cache contains a message with the given ID .""" + return message_id in self._message_id_mapping + + def __getitem__(self, item: t.Union[int, slice]) -> t.Union[Message, list[Message]]: + """ + Return the message(s) in the index or slice provided. + + This method makes the circular buffer implementation transparent to the user. + Providing 0 will return the message at the position perceived by the user to be the beginning of the cache, + meaning at `self._start`. + """ + # Keep in mind that for the modulo operator used throughout this function, Python modulo behaves similarly when + # the left operand is negative. E.g -1 % 5 == 4, because the closest number from the bottom that wholly divides + # by 5 is -5. + if isinstance(item, int): + if item >= len(self) or item < -len(self): + raise IndexError("cache index out of range") + return self._messages[(item + self._start) % self.maxlen] + + elif isinstance(item, slice): + length = len(self) + start, stop, step = item.indices(length) + + # This needs to be checked explicitly now, because otherwise self._start >= self._end is a valid state. + if (start >= stop and step >= 0) or (start <= stop and step <= 0): + return [] + + start = (start + self._start) % self.maxlen + stop = (stop + self._start) % self.maxlen + + # Having empty cells is an implementation detail. To the user the cache contains as many elements as they + # inserted, therefore any empty cells should be ignored. There can only be Nones at the tail. + if step > 0: + if ( + (self._start < self._end and not self._start < stop <= self._end) + or (self._start > self._end and self._end < stop <= self._start) + ): + stop = self._end + else: + lower_boundary = (self._start - 1) % self.maxlen + if ( + (self._start < self._end and not self._start - 1 <= stop < self._end) + or (self._start > self._end and self._end < stop < lower_boundary) + ): + stop = lower_boundary + + if (start < stop and step > 0) or (start > stop and step < 0): + return self._messages[start:stop:step] + # step != 1 may require a start offset in the second slicing. + if step > 0: + offset = ceil((self.maxlen - start) / step) * step + start - self.maxlen + return self._messages[start::step] + self._messages[offset:stop:step] + else: + offset = ceil((start + 1) / -step) * -step - start - 1 + return self._messages[start::step] + self._messages[self.maxlen - 1 - offset:stop:step] + + else: + raise TypeError(f"cache indices must be integers or slices, not {type(item)}") + + def __iter__(self) -> t.Iterator[Message]: + if self._is_empty(): + return + + if self._start < self._end: + yield from self._messages[self._start:self._end] + else: + yield from self._messages[self._start:] + yield from self._messages[:self._end] + + def __len__(self): + """Get the number of non-empty cells in the cache.""" + if self._is_empty(): + return 0 + if self._end > self._start: + return self._end - self._start + return self.maxlen - self._start + self._end + + def _is_empty(self) -> bool: + """Return True if the cache has no messages.""" + return self._messages[self._start] is None + + def _is_full(self) -> bool: + """Return True if every cell in the cache already contains a message.""" + return self._messages[self._end] is not None diff --git a/bot/utils/messages.py b/bot/utils/messages.py index d4a921161..e55c07062 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -1,6 +1,4 @@ import asyncio -import contextlib -import logging import random import re from functools import partial @@ -8,15 +6,14 @@ from io import BytesIO from typing import Callable, List, Optional, Sequence, Union import discord -from discord import Message, MessageType, Reaction, User -from discord.errors import HTTPException from discord.ext.commands import Context import bot from bot.constants import Emojis, MODERATION_ROLES, NEGATIVE_REPLIES +from bot.log import get_logger from bot.utils import scheduling -log = logging.getLogger(__name__) +log = get_logger(__name__) def reaction_check( @@ -54,7 +51,7 @@ def reaction_check( log.trace(f"Removing reaction {reaction} by {user} on {reaction.message.id}: disallowed user.") scheduling.create_task( reaction.message.remove_reaction(reaction.emoji, user), - suppressed_exceptions=(HTTPException,), + suppressed_exceptions=(discord.HTTPException,), name=f"remove_reaction-{reaction}-{reaction.message.id}-{user}" ) return False @@ -69,7 +66,9 @@ async def wait_for_deletion( allow_mods: bool = True ) -> None: """ - Wait for up to `timeout` seconds for a reaction by any of the specified `user_ids` to delete the message. + Wait for any of `user_ids` to react with one of the `deletion_emojis` within `timeout` seconds to delete `message`. + + If `timeout` expires then reactions are cleared to indicate the option to delete has expired. An `attach_emojis` bool may be specified to determine whether to attach the given `deletion_emojis` to the message in the given `context`. @@ -95,9 +94,15 @@ async def wait_for_deletion( allow_mods=allow_mods, ) - with contextlib.suppress(asyncio.TimeoutError): - await bot.instance.wait_for('reaction_add', check=check, timeout=timeout) - await message.delete() + try: + try: + await bot.instance.wait_for('reaction_add', check=check, timeout=timeout) + except asyncio.TimeoutError: + await message.clear_reactions() + else: + await message.delete() + except discord.NotFound: + log.trace(f"wait_for_deletion: message {message.id} deleted prematurely.") async def send_attachments( @@ -116,7 +121,7 @@ async def send_attachments( """ webhook_send_kwargs = { 'username': message.author.display_name, - 'avatar_url': message.author.avatar_url, + 'avatar_url': message.author.display_avatar.url, } webhook_send_kwargs.update(kwargs) webhook_send_kwargs['username'] = sub_clyde(webhook_send_kwargs['username']) @@ -146,7 +151,7 @@ async def send_attachments( large.append(attachment) else: log.info(f"{failure_msg} because it's too large.") - except HTTPException as e: + except discord.HTTPException as e: if link_large and e.status == 413: large.append(attachment) else: @@ -167,8 +172,8 @@ async def send_attachments( async def count_unique_users_reaction( message: discord.Message, - reaction_predicate: Callable[[Reaction], bool] = lambda _: True, - user_predicate: Callable[[User], bool] = lambda _: True, + reaction_predicate: Callable[[discord.Reaction], bool] = lambda _: True, + user_predicate: Callable[[discord.User], bool] = lambda _: True, count_bots: bool = True ) -> int: """ @@ -188,7 +193,7 @@ async def count_unique_users_reaction( return len(unique_users) -async def pin_no_system_message(message: Message) -> bool: +async def pin_no_system_message(message: discord.Message) -> bool: """Pin the given message, wait a couple of seconds and try to delete the system message.""" await message.pin() @@ -196,7 +201,7 @@ async def pin_no_system_message(message: Message) -> bool: await asyncio.sleep(2) # Search for the system message in the last 10 messages async for historical_message in message.channel.history(limit=10): - if historical_message.type == MessageType.pins_add: + if historical_message.type == discord.MessageType.pins_add: await historical_message.delete() return True diff --git a/bot/utils/regex.py b/bot/utils/regex.py index a8efe1446..d77f5950b 100644 --- a/bot/utils/regex.py +++ b/bot/utils/regex.py @@ -1,13 +1,14 @@ import re INVITE_RE = re.compile( - r"(?:discord(?:[\.,]|dot)gg|" # Could be discord.gg/ - r"discord(?:[\.,]|dot)com(?:\/|slash)invite|" # or discord.com/invite/ - r"discordapp(?:[\.,]|dot)com(?:\/|slash)invite|" # or discordapp.com/invite/ - r"discord(?:[\.,]|dot)me|" # or discord.me - r"discord(?:[\.,]|dot)li|" # or discord.li - r"discord(?:[\.,]|dot)io" # or discord.io. - r")(?:[\/]|slash)" # / or 'slash' - r"([a-zA-Z0-9\-]+)", # the invite code itself + r"(discord([\.,]|dot)gg|" # Could be discord.gg/ + r"discord([\.,]|dot)com(\/|slash)invite|" # or discord.com/invite/ + r"discordapp([\.,]|dot)com(\/|slash)invite|" # or discordapp.com/invite/ + r"discord([\.,]|dot)me|" # or discord.me + r"discord([\.,]|dot)li|" # or discord.li + r"discord([\.,]|dot)io|" # or discord.io. + r"((?<!\w)([\.,]|dot))gg" # or .gg/ + r")([\/]|slash)" # / or 'slash' + r"(?P<invite>[a-zA-Z0-9\-]+)", # the invite code itself flags=re.IGNORECASE ) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index bb83b5c0d..7b4c8e2de 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -1,11 +1,12 @@ import asyncio import contextlib import inspect -import logging import typing as t from datetime import datetime from functools import partial +from bot.log import get_logger + class Scheduler: """ @@ -27,7 +28,7 @@ class Scheduler: def __init__(self, name: str): self.name = name - self._log = logging.getLogger(f"{__name__}.{name}") + self._log = get_logger(f"{__name__}.{name}") self._scheduled_tasks: t.Dict[t.Hashable, asyncio.Task] = {} def __contains__(self, task_id: t.Hashable) -> bool: @@ -187,5 +188,5 @@ def _log_task_exception(task: asyncio.Task, *, suppressed_exceptions: t.Tuple[t. exception = task.exception() # Log the exception if one exists. if exception and not isinstance(exception, suppressed_exceptions): - log = logging.getLogger(__name__) + log = get_logger(__name__) log.error(f"Error in task {task.get_name()} {id(task)}!", exc_info=exception) diff --git a/bot/utils/services.py b/bot/utils/services.py index db9c93d0f..439c8d500 100644 --- a/bot/utils/services.py +++ b/bot/utils/services.py @@ -1,12 +1,12 @@ -import logging from typing import Optional from aiohttp import ClientConnectorError import bot from bot.constants import URLs +from bot.log import get_logger -log = logging.getLogger(__name__) +log = get_logger(__name__) FAILED_REQUEST_ATTEMPTS = 3 diff --git a/bot/utils/time.py b/bot/utils/time.py index d55a0e532..8cf7d623b 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -1,12 +1,13 @@ import datetime import re -from typing import Optional +from enum import Enum +from typing import Optional, Union import dateutil.parser from dateutil.relativedelta import relativedelta RFC1123_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" -INFRACTION_FORMAT = "%Y-%m-%d %H:%M" +DISCORD_TIMESTAMP_REGEX = re.compile(r"<t:(\d+):f>") _DURATION_REGEX = re.compile( r"((?P<years>\d+?) ?(years|year|Y|y) ?)?" @@ -19,6 +20,25 @@ _DURATION_REGEX = re.compile( ) +ValidTimestamp = Union[int, datetime.datetime, datetime.date, datetime.timedelta, relativedelta] + + +class TimestampFormats(Enum): + """ + Represents the different formats possible for Discord timestamps. + + Examples are given in epoch time. + """ + + DATE_TIME = "f" # January 1, 1970 1:00 AM + DAY_TIME = "F" # Thursday, January 1, 1970 1:00 AM + DATE_SHORT = "d" # 01/01/1970 + DATE = "D" # January 1, 1970 + TIME = "t" # 1:00 AM + TIME_SECONDS = "T" # 1:00:00 AM + RELATIVE = "R" # 52 years ago + + def _stringify_time_unit(value: int, unit: str) -> str: """ Returns a string to represent a value and time unit, ensuring that it uses the right plural form of the unit. @@ -40,6 +60,24 @@ def _stringify_time_unit(value: int, unit: str) -> str: return f"{value} {unit}" +def discord_timestamp(timestamp: ValidTimestamp, format: TimestampFormats = TimestampFormats.DATE_TIME) -> str: + """Create and format a Discord flavored markdown timestamp.""" + if format not in TimestampFormats: + raise ValueError(f"Format can only be one of {', '.join(TimestampFormats.args)}, not {format}.") + + # Convert each possible timestamp class to an integer. + if isinstance(timestamp, datetime.datetime): + timestamp = (timestamp.replace(tzinfo=None) - datetime.datetime.utcfromtimestamp(0)).total_seconds() + elif isinstance(timestamp, datetime.date): + timestamp = (timestamp - datetime.date.fromtimestamp(0)).total_seconds() + elif isinstance(timestamp, datetime.timedelta): + timestamp = timestamp.total_seconds() + elif isinstance(timestamp, relativedelta): + timestamp = timestamp.seconds + + return f"<t:{int(timestamp)}:{format.value}>" + + def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: int = 6) -> str: """ Returns a human-readable version of the relativedelta. @@ -87,7 +125,7 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: def get_time_delta(time_string: str) -> str: """Returns the time in human-readable time delta format.""" date_time = dateutil.parser.isoparse(time_string).replace(tzinfo=None) - time_delta = time_since(date_time, precision="minutes", max_units=1) + time_delta = time_since(date_time) return time_delta @@ -123,19 +161,9 @@ def relativedelta_to_timedelta(delta: relativedelta) -> datetime.timedelta: return utcnow + delta - utcnow -def time_since(past_datetime: datetime.datetime, precision: str = "seconds", max_units: int = 6) -> str: - """ - Takes a datetime and returns a human-readable string that describes how long ago that datetime was. - - precision specifies the smallest unit of time to include (e.g. "seconds", "minutes"). - max_units specifies the maximum number of units of time to include (e.g. 1 may include days but not hours). - """ - now = datetime.datetime.utcnow() - delta = abs(relativedelta(now, past_datetime)) - - humanized = humanize_delta(delta, precision, max_units) - - return f"{humanized} ago" +def time_since(past_datetime: datetime.datetime) -> str: + """Takes a datetime and returns a discord timestamp that describes how long ago that datetime was.""" + return discord_timestamp(past_datetime, TimestampFormats.RELATIVE) def parse_rfc1123(stamp: str) -> datetime.datetime: @@ -144,8 +172,8 @@ def parse_rfc1123(stamp: str) -> datetime.datetime: def format_infraction(timestamp: str) -> str: - """Format an infraction timestamp to a more readable ISO 8601 format.""" - return dateutil.parser.isoparse(timestamp).strftime(INFRACTION_FORMAT) + """Format an infraction timestamp to a discord timestamp.""" + return discord_timestamp(dateutil.parser.isoparse(timestamp)) def format_infraction_with_duration( @@ -155,11 +183,7 @@ def format_infraction_with_duration( absolute: bool = True ) -> Optional[str]: """ - Return `date_to` formatted as a readable ISO-8601 with the humanized duration since `date_from`. - - `date_from` must be an ISO-8601 formatted timestamp. The duration is calculated as from - `date_from` until `date_to` with a precision of seconds. If `date_from` is unspecified, the - current time is used. + Return `date_to` formatted as a discord timestamp with the timestamp duration since `date_from`. `max_units` specifies the maximum number of units of time to include in the duration. For example, a value of 1 may include days but not hours. @@ -186,25 +210,22 @@ def format_infraction_with_duration( def until_expiration( - expiry: Optional[str], - now: Optional[datetime.datetime] = None, - max_units: int = 2 + expiry: Optional[str] ) -> Optional[str]: """ - Get the remaining time until infraction's expiration, in a human-readable version of the relativedelta. + Get the remaining time until infraction's expiration, in a discord timestamp. Returns a human-readable version of the remaining duration between datetime.utcnow() and an expiry. - Unlike `humanize_delta`, this function will force the `precision` to be `seconds` by not passing it. - `max_units` specifies the maximum number of units of time to include (e.g. 1 may include days but not hours). - By default, max_units is 2. + Similar to time_since, except that this function doesn't error on a null input + and return null if the expiry is in the paste """ if not expiry: return None - now = now or datetime.datetime.utcnow() + now = datetime.datetime.utcnow() since = dateutil.parser.isoparse(expiry).replace(tzinfo=None, microsecond=0) if since < now: return None - return humanize_delta(relativedelta(since, now), max_units=max_units) + return discord_timestamp(since, TimestampFormats.RELATIVE) diff --git a/bot/utils/webhooks.py b/bot/utils/webhooks.py index 66f82ec66..9c916b63a 100644 --- a/bot/utils/webhooks.py +++ b/bot/utils/webhooks.py @@ -1,12 +1,12 @@ -import logging from typing import Optional import discord from discord import Embed +from bot.log import get_logger from bot.utils.messages import sub_clyde -log = logging.getLogger(__name__) +log = get_logger(__name__) async def send_webhook( diff --git a/config-default.yml b/config-default.yml index 48fd7c47e..b61d9c99c 100644 --- a/config-default.yml +++ b/config-default.yml @@ -1,3 +1,6 @@ +debug: !ENV ["BOT_DEBUG", "true"] + + bot: prefix: "!" sentry_dsn: !ENV "BOT_SENTRY_DSN" @@ -141,7 +144,10 @@ guild: logs: &LOGS 468520609152892958 moderators: &MODS_CATEGORY 749736277464842262 modmail: &MODMAIL 714494672835444826 + appeals: &APPEALS 890331800025563216 + appeals2: &APPEALS2 895417395261341766 voice: 356013253765234688 + summer_code_jam: 861692638540857384 channels: # Public announcement and news channels @@ -153,9 +159,10 @@ guild: reddit: &REDDIT_CHANNEL 458224812528238616 # Development - dev_contrib: &DEV_CONTRIB 635950537262759947 - dev_core: &DEV_CORE 411200599653351425 - dev_log: &DEV_LOG 622895325144940554 + dev_contrib: &DEV_CONTRIB 635950537262759947 + dev_core: &DEV_CORE 411200599653351425 + dev_voting: &DEV_CORE_VOTING 839162966519447552 + dev_log: &DEV_LOG 622895325144940554 # Discussion meta: 429409067623251969 @@ -188,6 +195,7 @@ guild: bot_commands: &BOT_CMD 267659945086812160 esoteric: 470884583684964352 voice_gate: 764802555427029012 + code_jam_planning: 490217981872177157 # Staff admins: &ADMINS 365960823622991872 @@ -198,6 +206,7 @@ guild: incidents: 714214212200562749 incidents_archive: 720668923636351037 mod_alerts: 473092532147060736 + mods: &MODS 305126844661760000 nominations: 822920136150745168 nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 @@ -211,34 +220,37 @@ guild: # Voice Channels admins_voice: &ADMINS_VOICE 500734494840717332 - code_help_voice_1: 751592231726481530 - code_help_voice_2: 764232549840846858 - general_voice: 751591688538947646 + code_help_voice_0: 751592231726481530 + code_help_voice_1: 764232549840846858 + general_voice_0: 751591688538947646 + general_voice_1: 799641437645701151 staff_voice: &STAFF_VOICE 412375055910043655 # Voice Chat - code_help_chat_1: 755154969761677312 - code_help_chat_2: 766330079135268884 + code_help_chat_0: 755154969761677312 + code_help_chat_1: 766330079135268884 staff_voice_chat: 541638762007101470 - voice_chat: 412357430186344448 + voice_chat_0: 412357430186344448 + voice_chat_1: 799647045886541885 # Watch big_brother_logs: &BB_LOGS 468507907357409333 - talent_pool: &TALENT_POOL 534321732593647616 moderation_categories: - *MODS_CATEGORY - *MODMAIL - *LOGS + - *APPEALS + - *APPEALS2 moderation_channels: - *ADMINS - *ADMIN_SPAM + - *MODS - # Modlog cog ignores events which occur in these channels + # Modlog cog explicitly ignores events which occur in these channels. + # This is on top of implicitly ignoring events in channels that the mod team cannot view. modlog_blacklist: - - *ADMINS - - *ADMINS_VOICE - *ATTACH_LOG - *MESSAGE_LOG - *MOD_LOG @@ -254,7 +266,7 @@ guild: contributors: 295488872404484098 help_cooldown: 699189276025421825 muted: &MUTED_ROLE 277914926603829249 - partners: 323426753857191936 + partners: &PY_PARTNER_ROLE 323426753857191936 python_community: &PY_COMMUNITY_ROLE 458226413825294336 sprinters: &SPRINTERS 758422482289426471 voice_verified: 764802720779337729 @@ -262,8 +274,10 @@ guild: # Staff admins: &ADMINS_ROLE 267628507062992896 core_developers: 587606783669829632 + code_jam_event_team: 787816728474288181 devops: 409416496733880320 domain_leads: 807415650778742785 + events_lead: 778361735739998228 helpers: &HELPERS_ROLE 267630620367257601 moderators: &MODS_ROLE 831776746206265384 mod_team: &MOD_TEAM_ROLE 267629731250176001 @@ -272,7 +286,6 @@ guild: # Code Jam jammers: 737249140966162473 - team_leaders: 737250302834638889 # Streaming video: 764245844798079016 @@ -295,7 +308,6 @@ guild: duck_pond: 637821475327311927 incidents_archive: 720671599790915702 python_news: &PYNEWS_WEBHOOK 704381182279942324 - talent_pool: 569145364800602132 filter: @@ -326,7 +338,6 @@ filter: - *MESSAGE_LOG - *MOD_LOG - *STAFF_LOUNGE - - *TALENT_POOL role_whitelist: - *ADMINS_ROLE @@ -335,6 +346,7 @@ filter: - *OWNERS_ROLE - *PY_COMMUNITY_ROLE - *SPRINTERS + - *PY_PARTNER_ROLE keys: @@ -347,14 +359,14 @@ urls: connect_max_retries: 3 connect_cooldown: 5 site: &DOMAIN "pythondiscord.com" - site_api: &API "pydis-api.default.svc.cluster.local" + site_api: &API "site.default.svc.cluster.local/api" site_api_schema: "http://" site_paste: &PASTE !JOIN ["paste.", *DOMAIN] site_schema: &SCHEMA "https://" - site_staff: &STAFF !JOIN ["staff.", *DOMAIN] + site_staff: &STAFF !JOIN [*SCHEMA, *DOMAIN, "/staff"] paste_service: !JOIN [*SCHEMA, *PASTE, "/{key}"] - site_logs_view: !JOIN [*SCHEMA, *STAFF, "/bot/logs"] + site_logs_view: !JOIN [*STAFF, "/bot/logs"] # Snekbox snekbox_eval_api: "http://snekbox.default.svc.cluster.local/eval" @@ -369,6 +381,8 @@ urls: anti_spam: + cache_size: 100 + # Clean messages that violate a rule. clean_offending: true ping_everyone: true @@ -396,7 +410,7 @@ anti_spam: chars: interval: 5 - max: 3_000 + max: 4_200 discord_emojis: interval: 10 @@ -424,14 +438,13 @@ anti_spam: max: 3 - metabase: - username: !ENV "METABASE_USERNAME" - password: !ENV "METABASE_PASSWORD" - url: "http://metabase.default.svc.cluster.local/api" + username: !ENV "METABASE_USERNAME" + password: !ENV "METABASE_PASSWORD" + base_url: "http://metabase.default.svc.cluster.local" + public_url: "https://metabase.pythondiscord.com" # 14 days, see https://www.metabase.com/docs/latest/operations-guide/environment-variables.html#max_session_age - max_session_age: 20160 - + max_session_age: 20160 big_brother: diff --git a/docker-compose.yml b/docker-compose.yml index 1761d8940..b3ca6baa4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,11 +18,16 @@ services: postgres: << : *logging << : *restart_policy - image: postgres:12-alpine + image: postgres:13-alpine environment: POSTGRES_DB: pysite POSTGRES_PASSWORD: pysite POSTGRES_USER: pysite + healthcheck: + test: ["CMD-SHELL", "pg_isready -U pysite"] + interval: 2s + timeout: 1s + retries: 5 redis: << : *logging @@ -31,6 +36,21 @@ services: ports: - "127.0.0.1:6379:6379" + metricity: + << : *logging + restart: on-failure # USE_METRICITY=false will stop the container, so this ensures it only restarts on error + depends_on: + postgres: + condition: service_healthy + image: ghcr.io/python-discord/metricity:latest + env_file: + - .env + environment: + DATABASE_URI: postgres://pysite:pysite@postgres/metricity + USE_METRICITY: ${USE_METRICITY-false} + volumes: + - .:/tmp/bot:ro + snekbox: << : *logging << : *restart_policy @@ -56,7 +76,7 @@ services: - "127.0.0.1:8000:8000" tty: true depends_on: - - postgres + - metricity environment: DATABASE_URL: postgres://pysite:pysite@postgres:5432/pysite METRICITY_DB_URL: postgres://pysite:pysite@postgres:5432/metricity diff --git a/poetry.lock b/poetry.lock index ba8b7af4b..16c599bd1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -44,18 +44,6 @@ yarl = ">=1.0,<2.0" speedups = ["aiodns", "brotlipy", "cchardet"] [[package]] -name = "aioping" -version = "0.3.1" -description = "Asyncio ping implementation" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -aiodns = "*" -async-timeout = "*" - -[[package]] name = "aioredis" version = "1.3.1" description = "asyncio (PEP 3156) Redis support" @@ -83,14 +71,6 @@ yarl = "*" develop = ["aiomisc (>=11.0,<12.0)", "async-generator", "coverage (!=4.3)", "coveralls", "pylava", "pytest", "pytest-cov", "tox (>=2.4)"] [[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = "*" - -[[package]] name = "arrow" version = "1.0.3" description = "Better dates & times for Python" @@ -125,6 +105,14 @@ optional = false python-versions = ">=3.5.3" [[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] name = "attrs" version = "21.2.0" description = "Classes Without Boilerplate" @@ -139,15 +127,27 @@ tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)" tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] [[package]] +name = "backports.entry-points-selectable" +version = "1.1.0" +description = "Compatibility shim providing selectable entry points for older implementations" +category = "dev" +optional = false +python-versions = ">=2.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] + +[[package]] name = "beautifulsoup4" -version = "4.9.3" +version = "4.10.0" description = "Screen-scraping library" category = "main" optional = false -python-versions = "*" +python-versions = ">3.0.0" [package.dependencies] -soupsieve = {version = ">1.2", markers = "python_version >= \"3.0\""} +soupsieve = ">1.2" [package.extras] html5lib = ["html5lib"] @@ -155,7 +155,7 @@ lxml = ["lxml"] [[package]] name = "certifi" -version = "2020.12.5" +version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -163,7 +163,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.5" +version = "1.15.0" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -174,7 +174,7 @@ pycparser = "*" [[package]] name = "cfgv" -version = "3.2.0" +version = "3.3.1" description = "Validate configuration and produce human readable error messages." category = "dev" optional = false @@ -189,6 +189,17 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] +name = "charset-normalizer" +version = "2.0.7" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." @@ -253,22 +264,26 @@ murmur = ["mmh3"] [[package]] name = "discord.py" -version = "1.6.0" +version = "2.0.0a0" description = "A Python wrapper for the Discord API" category = "main" optional = false -python-versions = ">=3.5.3" +python-versions = ">=3.8.0" [package.dependencies] aiohttp = ">=3.6.0,<3.8.0" [package.extras] -docs = ["sphinx (==3.0.3)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport"] +docs = ["sphinx (==4.0.2)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport"] +speed = ["orjson (>=3.5.4)"] voice = ["PyNaCl (>=1.3.0,<1.5)"] +[package.source] +type = "url" +url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip" [[package]] name = "distlib" -version = "0.3.1" +version = "0.3.3" description = "Distribution utilities" category = "dev" optional = false @@ -294,14 +309,26 @@ python-versions = "*" dev = ["pytest", "coverage", "coveralls"] [[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +testing = ["pre-commit"] + +[[package]] name = "fakeredis" -version = "1.5.0" +version = "1.6.1" description = "Fake implementation of redis API for testing purposes." category = "main" optional = false python-versions = ">=3.5" [package.dependencies] +packaging = "*" redis = "<3.6.0" six = ">=1.12" sortedcontainers = "*" @@ -312,7 +339,7 @@ lua = ["lupa"] [[package]] name = "feedparser" -version = "6.0.2" +version = "6.0.8" description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" category = "main" optional = false @@ -323,11 +350,15 @@ sgmllib3k = "*" [[package]] name = "filelock" -version = "3.0.12" +version = "3.3.1" description = "A platform independent file lock." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" + +[package.extras] +docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] +testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] [[package]] name = "flake8" @@ -344,14 +375,14 @@ pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "flake8-annotations" -version = "2.6.2" +version = "2.7.0" description = "Flake8 Type Annotation Checks" category = "dev" optional = false -python-versions = ">=3.6.1,<4.0.0" +python-versions = ">=3.6.2,<4.0.0" [package.dependencies] -flake8 = ">=3.7,<4.0" +flake8 = ">=3.7,<5.0" [[package]] name = "flake8-bugbear" @@ -381,15 +412,20 @@ flake8 = ">=3" pydocstyle = ">=2.1" [[package]] -name = "flake8-import-order" -version = "0.18.1" -description = "Flake8 and pylama plugin that checks the ordering of import statements." +name = "flake8-isort" +version = "4.1.1" +description = "flake8 plugin that integrates isort ." category = "dev" optional = false python-versions = "*" [package.dependencies] -pycodestyle = "*" +flake8 = ">=3.2.1,<5" +isort = ">=4.3.5,<6" +testfixtures = ">=6.8.0,<7" + +[package.extras] +test = ["pytest-cov"] [[package]] name = "flake8-polyfill" @@ -415,14 +451,14 @@ flake8 = "*" [[package]] name = "flake8-tidy-imports" -version = "4.3.0" +version = "4.5.0" description = "A flake8 plugin that helps you write tidier imports." category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" +flake8 = ">=3.8.0,<5" [[package]] name = "flake8-todo" @@ -436,17 +472,6 @@ python-versions = "*" pycodestyle = ">=2.0.0,<3.0.0" [[package]] -name = "fuzzywuzzy" -version = "0.18.0" -description = "Fuzzy string matching in python" -category = "main" -optional = false -python-versions = "*" - -[package.extras] -speedup = ["python-levenshtein (>=0.12)"] - -[[package]] name = "hiredis" version = "2.0.0" description = "Python wrapper for hiredis" @@ -456,18 +481,18 @@ python-versions = ">=3.6" [[package]] name = "humanfriendly" -version = "9.1" +version = "10.0" description = "Human friendly output for text interfaces using Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] -pyreadline = {version = "*", markers = "sys_platform == \"win32\""} +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} [[package]] name = "identify" -version = "2.2.4" +version = "2.3.0" description = "File identification library for Python" category = "dev" optional = false @@ -478,11 +503,33 @@ license = ["editdistance-s"] [[package]] name = "idna" -version = "3.1" +version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=3.4" +python-versions = ">=3.5" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.9.3" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] [[package]] name = "lxml" @@ -520,7 +567,7 @@ python-versions = "*" [[package]] name = "more-itertools" -version = "8.7.0" +version = "8.10.0" description = "More routines for operating on iterables, beyond itertools" category = "main" optional = false @@ -536,7 +583,7 @@ python-versions = ">=3.5" [[package]] name = "multidict" -version = "5.1.0" +version = "5.2.0" description = "multidict implementation" category = "main" optional = false @@ -559,6 +606,17 @@ optional = false python-versions = ">=3.5" [[package]] +name = "packaging" +version = "21.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2" + +[[package]] name = "pamqp" version = "2.3.0" description = "RabbitMQ Focused AMQP low-level library" @@ -571,18 +629,57 @@ codegen = ["lxml"] [[package]] name = "pep8-naming" -version = "0.11.1" +version = "0.12.1" description = "Check PEP-8 naming conventions, plugin for flake8" category = "dev" optional = false python-versions = "*" [package.dependencies] +flake8 = ">=3.9.1" flake8-polyfill = ">=1.0.2,<2" [[package]] +name = "pip-licenses" +version = "3.5.3" +description = "Dump the software license list of Python packages installed with pip." +category = "dev" +optional = false +python-versions = "~=3.6" + +[package.dependencies] +PTable = "*" + +[package.extras] +test = ["docutils", "pytest-cov", "pytest-pycodestyle", "pytest-runner"] + +[[package]] +name = "platformdirs" +version = "2.4.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] name = "pre-commit" -version = "2.12.1" +version = "2.15.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -608,8 +705,24 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] [[package]] +name = "ptable" +version = "0.9.2" +description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "py" +version = "1.10.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] name = "pycares" -version = "3.2.3" +version = "4.0.0" description = "Python interface for c-ares" category = "main" optional = false @@ -639,7 +752,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydocstyle" -version = "6.0.0" +version = "6.1.1" description = "Python docstring style checker" category = "dev" optional = false @@ -648,6 +761,9 @@ python-versions = ">=3.6" [package.dependencies] snowballstemmer = "*" +[package.extras] +toml = ["toml"] + [[package]] name = "pyflakes" version = "2.3.1" @@ -657,16 +773,91 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -name = "pyreadline" -version = "2.1" -description = "A python implmementation of GNU readline." +name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "pyreadline3" +version = "3.3" +description = "A python implementation of GNU readline." category = "main" optional = false python-versions = "*" [[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "2.12.1" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +coverage = ">=5.2.1" +pytest = ">=4.6" +toml = "*" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-forked" +version = "1.3.0" +description = "run tests in isolated forked subprocesses" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +py = "*" +pytest = ">=3.10" + +[[package]] +name = "pytest-xdist" +version = "2.3.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +execnet = ">=1.1" +psutil = {version = ">=3.0", optional = true, markers = "extra == \"psutil\""} +pytest = ">=6.0.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +testing = ["filelock"] + +[[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -710,6 +901,17 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] +name = "rapidfuzz" +version = "1.7.1" +description = "rapid fuzzy string matching" +category = "main" +optional = false +python-versions = ">=2.7" + +[package.extras] +full = ["numpy"] + +[[package]] name = "redis" version = "3.5.3" description = "Python client for Redis key-value store" @@ -730,19 +932,25 @@ python-versions = "*" [[package]] name = "requests" -version = "2.15.1" +version = "2.26.0" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "sentry-sdk" -version = "0.20.3" +version = "1.4.3" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = false @@ -761,6 +969,7 @@ chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] flask = ["flask (>=0.11)", "blinker (>=1.1)"] +httpx = ["httpx (>=0.16.0)"] pure_eval = ["pure-eval", "executing", "asttokens"] pyspark = ["pyspark (>=2.4.4)"] rq = ["rq (>=0.6)"] @@ -794,7 +1003,7 @@ python-versions = "*" [[package]] name = "sortedcontainers" -version = "2.3.0" +version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" category = "main" optional = false @@ -830,6 +1039,19 @@ psutil = ">=5.7.2,<6.0.0" toml = ">=0.10.0,<0.11.0" [[package]] +name = "testfixtures" +version = "6.18.3" +description = "A collection of helpers and mock objects for unit tests and doc tests." +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +build = ["setuptools-git", "wheel", "twine"] +docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] +test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] + +[[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" @@ -839,7 +1061,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "typing-extensions" -version = "3.10.0.0" +version = "3.10.0.2" description = "Backported and Experimental Type Hints for Python 3.5+" category = "main" optional = false @@ -847,38 +1069,39 @@ python-versions = "*" [[package]] name = "urllib3" -version = "1.26.4" +version = "1.26.7" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] +brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -brotli = ["brotlipy (>=0.6.0)"] [[package]] name = "virtualenv" -version = "20.4.6" +version = "20.8.1" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -appdirs = ">=1.4.3,<2" +"backports.entry-points-selectable" = ">=1.0.4" distlib = ">=0.3.1,<1" filelock = ">=3.0.0,<4" +platformdirs = ">=2,<3" six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] name = "yarl" -version = "1.6.3" +version = "1.7.0" description = "Yet another URL library" category = "main" optional = false @@ -891,7 +1114,7 @@ multidict = ">=4.0" [metadata] lock-version = "1.1" python-versions = "3.9.*" -content-hash = "ece3b915901a62911ff7ff4a616b3972e815c0e1c7097c8994163af13cadde0e" +content-hash = "e37923739c35ef349d57e324579acfe304cc7e6fc20ddc54205fc89f171ae94f" [metadata.files] aio-pika = [ @@ -941,10 +1164,6 @@ aiohttp = [ {file = "aiohttp-3.7.4.post0-cp39-cp39-win_amd64.whl", hash = "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe"}, {file = "aiohttp-3.7.4.post0.tar.gz", hash = "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf"}, ] -aioping = [ - {file = "aioping-0.3.1-py3-none-any.whl", hash = "sha256:8900ef2f5a589ba0c12aaa9c2d586f5371820d468d21b374ddb47ef5fc8f297c"}, - {file = "aioping-0.3.1.tar.gz", hash = "sha256:f983d86acab3a04c322731ce88d42c55d04d2842565fc8532fe10c838abfd275"}, -] aioredis = [ {file = "aioredis-1.3.1-py3-none-any.whl", hash = "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"}, {file = "aioredis-1.3.1.tar.gz", hash = "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a"}, @@ -953,10 +1172,6 @@ aiormq = [ {file = "aiormq-3.3.1-py3-none-any.whl", hash = "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e"}, {file = "aiormq-3.3.1.tar.gz", hash = "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573"}, ] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] arrow = [ {file = "arrow-1.0.3-py3-none-any.whl", hash = "sha256:3515630f11a15c61dcb4cdd245883270dd334c83f3e639824e65a4b79cc48543"}, {file = "arrow-1.0.3.tar.gz", hash = "sha256:399c9c8ae732270e1aa58ead835a79a40d7be8aa109c579898eb41029b5a231d"}, @@ -969,66 +1184,90 @@ async-timeout = [ {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, ] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] attrs = [ {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] +"backports.entry-points-selectable" = [ + {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"}, + {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"}, +] beautifulsoup4 = [ - {file = "beautifulsoup4-4.9.3-py2-none-any.whl", hash = "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35"}, - {file = "beautifulsoup4-4.9.3-py3-none-any.whl", hash = "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666"}, - {file = "beautifulsoup4-4.9.3.tar.gz", hash = "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25"}, + {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, + {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, ] certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] cffi = [ - {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, - {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, - {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, - {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, - {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, - {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, - {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, - {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, - {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, - {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, - {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, - {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, - {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, - {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, - {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, - {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, - {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, - {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, - {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] cfgv = [ - {file = "cfgv-3.2.0-py2.py3-none-any.whl", hash = "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d"}, - {file = "cfgv-3.2.0.tar.gz", hash = "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1"}, + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] chardet = [ {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] +charset-normalizer = [ + {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, + {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, +] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, @@ -1099,13 +1338,10 @@ deepdiff = [ {file = "deepdiff-4.3.2-py3-none-any.whl", hash = "sha256:59fc1e3e7a28dd0147b0f2b00e3e27181f0f0ef4286b251d5f214a5bcd9a9bc4"}, {file = "deepdiff-4.3.2.tar.gz", hash = "sha256:91360be1d9d93b1d9c13ae9c5048fa83d9cff17a88eb30afaa0d7ff2d0fee17d"}, ] -"discord.py" = [ - {file = "discord.py-1.6.0-py3-none-any.whl", hash = "sha256:3df148daf6fbcc7ab5b11042368a3cd5f7b730b62f09fb5d3cbceff59bcfbb12"}, - {file = "discord.py-1.6.0.tar.gz", hash = "sha256:ba8be99ff1b8c616f7b6dcb700460d0222b29d4c11048e74366954c465fdd05f"}, -] +"discord.py" = [] distlib = [ - {file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"}, - {file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"}, + {file = "distlib-0.3.3-py2.py3-none-any.whl", hash = "sha256:c8b54e8454e5bf6237cc84c20e8264c3e991e824ef27e8f1e81049867d861e31"}, + {file = "distlib-0.3.3.zip", hash = "sha256:d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05"}, ] docopt = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, @@ -1113,25 +1349,29 @@ docopt = [ emoji = [ {file = "emoji-0.6.0.tar.gz", hash = "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11"}, ] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] fakeredis = [ - {file = "fakeredis-1.5.0-py3-none-any.whl", hash = "sha256:e0416e4941cecd3089b0d901e60c8dc3c944f6384f5e29e2261c0d3c5fa99669"}, - {file = "fakeredis-1.5.0.tar.gz", hash = "sha256:1ac0cef767c37f51718874a33afb5413e69d132988cb6a80c6e6dbeddf8c7623"}, + {file = "fakeredis-1.6.1-py3-none-any.whl", hash = "sha256:5eb1516f1fe1813e9da8f6c482178fc067af09f53de587ae03887ef5d9d13024"}, + {file = "fakeredis-1.6.1.tar.gz", hash = "sha256:0d06a9384fb79da9f2164ce96e34eb9d4e2ea46215070805ea6fd3c174590b47"}, ] feedparser = [ - {file = "feedparser-6.0.2-py3-none-any.whl", hash = "sha256:f596c4b34fb3e2dc7e6ac3a8191603841e8d5d267210064e94d4238737452ddd"}, - {file = "feedparser-6.0.2.tar.gz", hash = "sha256:1b00a105425f492f3954fd346e5b524ca9cef3a4bbf95b8809470e9857aa1074"}, + {file = "feedparser-6.0.8-py3-none-any.whl", hash = "sha256:1b7f57841d9cf85074deb316ed2c795091a238adb79846bc46dccdaf80f9c59a"}, + {file = "feedparser-6.0.8.tar.gz", hash = "sha256:5ce0410a05ab248c8c7cfca3a0ea2203968ee9ff4486067379af4827a59f9661"}, ] filelock = [ - {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, - {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, + {file = "filelock-3.3.1-py3-none-any.whl", hash = "sha256:2b5eb3589e7fdda14599e7eb1a50e09b4cc14f34ed98b8ba56d33bfaafcbef2f"}, + {file = "filelock-3.3.1.tar.gz", hash = "sha256:34a9f35f95c441e7b38209775d6e0337f9a3759f3565f6c5798f19618527c76f"}, ] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] flake8-annotations = [ - {file = "flake8-annotations-2.6.2.tar.gz", hash = "sha256:0d6cd2e770b5095f09689c9d84cc054c51b929c41a68969ea1beb4b825cac515"}, - {file = "flake8_annotations-2.6.2-py3-none-any.whl", hash = "sha256:d10c4638231f8a50c0a597c4efce42bd7b7d85df4f620a0ddaca526138936a4f"}, + {file = "flake8-annotations-2.7.0.tar.gz", hash = "sha256:52e53c05b0c06cac1c2dec192ea2c36e85081238add3bd99421d56f574b9479b"}, + {file = "flake8_annotations-2.7.0-py3-none-any.whl", hash = "sha256:3edfbbfb58e404868834fe6ec3eaf49c139f64f0701259f707d043185545151e"}, ] flake8-bugbear = [ {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"}, @@ -1141,9 +1381,9 @@ flake8-docstrings = [ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, ] -flake8-import-order = [ - {file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"}, - {file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"}, +flake8-isort = [ + {file = "flake8-isort-4.1.1.tar.gz", hash = "sha256:d814304ab70e6e58859bc5c3e221e2e6e71c958e7005239202fee19c24f82717"}, + {file = "flake8_isort-4.1.1-py3-none-any.whl", hash = "sha256:c4e8b6dcb7be9b71a02e6e5d4196cefcef0f3447be51e82730fb336fff164949"}, ] flake8-polyfill = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, @@ -1154,16 +1394,12 @@ flake8-string-format = [ {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, ] flake8-tidy-imports = [ - {file = "flake8-tidy-imports-4.3.0.tar.gz", hash = "sha256:e66d46f58ed108f36da920e7781a728dc2d8e4f9269e7e764274105700c0a90c"}, - {file = "flake8_tidy_imports-4.3.0-py3-none-any.whl", hash = "sha256:d6e64cb565ca9474d13d5cb3f838b8deafb5fed15906998d4a674daf55bd6d89"}, + {file = "flake8-tidy-imports-4.5.0.tar.gz", hash = "sha256:ac637961d0f319012d099e49619f8c928e3221f74e00fe6eb89513bc64c40adb"}, + {file = "flake8_tidy_imports-4.5.0-py3-none-any.whl", hash = "sha256:87eed94ae6a2fda6a5918d109746feadf1311e0eb8274ab7a7920f6db00a41c9"}, ] flake8-todo = [ {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, ] -fuzzywuzzy = [ - {file = "fuzzywuzzy-0.18.0-py2.py3-none-any.whl", hash = "sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993"}, - {file = "fuzzywuzzy-0.18.0.tar.gz", hash = "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8"}, -] hiredis = [ {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, @@ -1208,16 +1444,24 @@ hiredis = [ {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, ] humanfriendly = [ - {file = "humanfriendly-9.1-py2.py3-none-any.whl", hash = "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72"}, - {file = "humanfriendly-9.1.tar.gz", hash = "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d"}, + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, ] identify = [ - {file = "identify-2.2.4-py2.py3-none-any.whl", hash = "sha256:ad9f3fa0c2316618dc4d840f627d474ab6de106392a4f00221820200f490f5a8"}, - {file = "identify-2.2.4.tar.gz", hash = "sha256:9bcc312d4e2fa96c7abebcdfb1119563b511b5e3985ac52f60d9116277865b2e"}, + {file = "identify-2.3.0-py2.py3-none-any.whl", hash = "sha256:d1e82c83d063571bb88087676f81261a4eae913c492dafde184067c584bc7c05"}, + {file = "identify-2.3.0.tar.gz", hash = "sha256:fd08c97f23ceee72784081f1ce5125c8f53a02d3f2716dde79a6ab8f1039fea5"}, ] idna = [ - {file = "idna-3.1-py3-none-any.whl", hash = "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16"}, - {file = "idna-3.1.tar.gz", hash = "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"}, + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +isort = [ + {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"}, + {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"}, ] lxml = [ {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"}, @@ -1276,51 +1520,86 @@ mccabe = [ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] more-itertools = [ - {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, - {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, + {file = "more-itertools-8.10.0.tar.gz", hash = "sha256:1debcabeb1df793814859d64a81ad7cb10504c24349368ccf214c664c474f41f"}, + {file = "more_itertools-8.10.0-py3-none-any.whl", hash = "sha256:56ddac45541718ba332db05f464bebfb0768110111affd27f66e0051f276fa43"}, ] mslex = [ {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"}, ] multidict = [ - {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, - {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, - {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, - {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, - {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, - {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, - {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, - {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, - {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, - {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, - {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, - {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, - {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, + {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3822c5894c72e3b35aae9909bef66ec83e44522faf767c0ad39e0e2de11d3b55"}, + {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:28e6d883acd8674887d7edc896b91751dc2d8e87fbdca8359591a13872799e4e"}, + {file = "multidict-5.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b61f85101ef08cbbc37846ac0e43f027f7844f3fade9b7f6dd087178caedeee7"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9b668c065968c5979fe6b6fa6760bb6ab9aeb94b75b73c0a9c1acf6393ac3bf"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517d75522b7b18a3385726b54a081afd425d4f41144a5399e5abd97ccafdf36b"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b4ac3ba7a97b35a5ccf34f41b5a8642a01d1e55454b699e5e8e7a99b5a3acf5"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:df23c83398715b26ab09574217ca21e14694917a0c857e356fd39e1c64f8283f"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e58a9b5cc96e014ddf93c2227cbdeca94b56a7eb77300205d6e4001805391747"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f76440e480c3b2ca7f843ff8a48dc82446b86ed4930552d736c0bac507498a52"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cfde464ca4af42a629648c0b0d79b8f295cf5b695412451716531d6916461628"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0fed465af2e0eb6357ba95795d003ac0bdb546305cc2366b1fc8f0ad67cc3fda"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b70913cbf2e14275013be98a06ef4b412329fe7b4f83d64eb70dce8269ed1e1a"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5635bcf1b75f0f6ef3c8a1ad07b500104a971e38d3683167b9454cb6465ac86"}, + {file = "multidict-5.2.0-cp310-cp310-win32.whl", hash = "sha256:77f0fb7200cc7dedda7a60912f2059086e29ff67cefbc58d2506638c1a9132d7"}, + {file = "multidict-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:9416cf11bcd73c861267e88aea71e9fcc35302b3943e45e1dbb4317f91a4b34f"}, + {file = "multidict-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd77c8f3cba815aa69cb97ee2b2ef385c7c12ada9c734b0f3b32e26bb88bbf1d"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ec9aea6223adf46999f22e2c0ab6cf33f5914be604a404f658386a8f1fba37"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5283c0a00f48e8cafcecadebfa0ed1dac8b39e295c7248c44c665c16dc1138b"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f79c19c6420962eb17c7e48878a03053b7ccd7b69f389d5831c0a4a7f1ac0a1"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e4a67f1080123de76e4e97a18d10350df6a7182e243312426d508712e99988d4"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:94b117e27efd8e08b4046c57461d5a114d26b40824995a2eb58372b94f9fca02"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2e77282fd1d677c313ffcaddfec236bf23f273c4fba7cdf198108f5940ae10f5"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:116347c63ba049c1ea56e157fa8aa6edaf5e92925c9b64f3da7769bdfa012858"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:dc3a866cf6c13d59a01878cd806f219340f3e82eed514485e094321f24900677"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac42181292099d91217a82e3fa3ce0e0ddf3a74fd891b7c2b347a7f5aa0edded"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f0bb0973f42ffcb5e3537548e0767079420aefd94ba990b61cf7bb8d47f4916d"}, + {file = "multidict-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:ea21d4d5104b4f840b91d9dc8cbc832aba9612121eaba503e54eaab1ad140eb9"}, + {file = "multidict-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e6453f3cbeb78440747096f239d282cc57a2997a16b5197c9bc839099e1633d0"}, + {file = "multidict-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3def943bfd5f1c47d51fd324df1e806d8da1f8e105cc7f1c76a1daf0f7e17b0"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35591729668a303a02b06e8dba0eb8140c4a1bfd4c4b3209a436a02a5ac1de11"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8cacda0b679ebc25624d5de66c705bc53dcc7c6f02a7fb0f3ca5e227d80422"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:baf1856fab8212bf35230c019cde7c641887e3fc08cadd39d32a421a30151ea3"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a43616aec0f0d53c411582c451f5d3e1123a68cc7b3475d6f7d97a626f8ff90d"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25cbd39a9029b409167aa0a20d8a17f502d43f2efebfe9e3ac019fe6796c59ac"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a2cbcfbea6dc776782a444db819c8b78afe4db597211298dd8b2222f73e9cd0"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d2d7d1fff8e09d99354c04c3fd5b560fb04639fd45926b34e27cfdec678a704"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a37e9a68349f6abe24130846e2f1d2e38f7ddab30b81b754e5a1fde32f782b23"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:637c1896497ff19e1ee27c1c2c2ddaa9f2d134bbb5e0c52254361ea20486418d"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9815765f9dcda04921ba467957be543423e5ec6a1136135d84f2ae092c50d87b"}, + {file = "multidict-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:8b911d74acdc1fe2941e59b4f1a278a330e9c34c6c8ca1ee21264c51ec9b67ef"}, + {file = "multidict-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:380b868f55f63d048a25931a1632818f90e4be71d2081c2338fcf656d299949a"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e7d81ce5744757d2f05fc41896e3b2ae0458464b14b5a2c1e87a6a9d69aefaa8"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d1d55cdf706ddc62822d394d1df53573d32a7a07d4f099470d3cb9323b721b6"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4771d0d0ac9d9fe9e24e33bed482a13dfc1256d008d101485fe460359476065"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da7d57ea65744d249427793c042094c4016789eb2562576fb831870f9c878d9e"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdd68778f96216596218b4e8882944d24a634d984ee1a5a049b300377878fa7c"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecc99bce8ee42dcad15848c7885197d26841cb24fa2ee6e89d23b8993c871c64"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:067150fad08e6f2dd91a650c7a49ba65085303fcc3decbd64a57dc13a2733031"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:78c106b2b506b4d895ddc801ff509f941119394b89c9115580014127414e6c2d"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6c4fa1ec16e01e292315ba76eb1d012c025b99d22896bd14a66628b245e3e01"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b227345e4186809d31f22087d0265655114af7cda442ecaf72246275865bebe4"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:06560fbdcf22c9387100979e65b26fba0816c162b888cb65b845d3def7a54c9b"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7878b61c867fb2df7a95e44b316f88d5a3742390c99dfba6c557a21b30180cac"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:246145bff76cc4b19310f0ad28bd0769b940c2a49fc601b86bfd150cbd72bb22"}, + {file = "multidict-5.2.0-cp38-cp38-win32.whl", hash = "sha256:c30ac9f562106cd9e8071c23949a067b10211917fdcb75b4718cf5775356a940"}, + {file = "multidict-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:f19001e790013ed580abfde2a4465388950728861b52f0da73e8e8a9418533c0"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c1ff762e2ee126e6f1258650ac641e2b8e1f3d927a925aafcfde943b77a36d24"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd6c9c50bf2ad3f0448edaa1a3b55b2e6866ef8feca5d8dbec10ec7c94371d21"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc66d4016f6e50ed36fb39cd287a3878ffcebfa90008535c62e0e90a7ab713ae"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9acb76d5f3dd9421874923da2ed1e76041cb51b9337fd7f507edde1d86535d6"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfc924a7e946dd3c6360e50e8f750d51e3ef5395c95dc054bc9eab0f70df4f9c"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32fdba7333eb2351fee2596b756d730d62b5827d5e1ab2f84e6cbb287cc67fe0"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b9aad49466b8d828b96b9e3630006234879c8d3e2b0a9d99219b3121bc5cdb17"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93de39267c4c676c9ebb2057e98a8138bade0d806aad4d864322eee0803140a0"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9bef5cff994ca3026fcc90680e326d1a19df9841c5e3d224076407cc21471a1"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5f841c4f14331fd1e36cbf3336ed7be2cb2a8f110ce40ea253e5573387db7621"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:38ba256ee9b310da6a1a0f013ef4e422fca30a685bcbec86a969bd520504e341"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3bc3b1621b979621cee9f7b09f024ec76ec03cc365e638126a056317470bde1b"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ee908c070020d682e9b42c8f621e8bb10c767d04416e2ebe44e37d0f44d9ad5"}, + {file = "multidict-5.2.0-cp39-cp39-win32.whl", hash = "sha256:1c7976cd1c157fa7ba5456ae5d31ccdf1479680dc9b8d8aa28afabc370df42b8"}, + {file = "multidict-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:c9631c642e08b9fff1c6255487e62971d8b8e821808ddd013d8ac058087591ac"}, + {file = "multidict-5.2.0.tar.gz", hash = "sha256:0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"}, ] nodeenv = [ {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, @@ -1329,17 +1608,33 @@ nodeenv = [ ordered-set = [ {file = "ordered-set-4.0.2.tar.gz", hash = "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95"}, ] +packaging = [ + {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, + {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, +] pamqp = [ {file = "pamqp-2.3.0-py2.py3-none-any.whl", hash = "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02"}, {file = "pamqp-2.3.0.tar.gz", hash = "sha256:5cd0f5a85e89f20d5f8e19285a1507788031cfca4a9ea6f067e3cf18f5e294e8"}, ] pep8-naming = [ - {file = "pep8-naming-0.11.1.tar.gz", hash = "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724"}, - {file = "pep8_naming-0.11.1-py2.py3-none-any.whl", hash = "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"}, + {file = "pep8-naming-0.12.1.tar.gz", hash = "sha256:bb2455947757d162aa4cad55dba4ce029005cd1692f2899a21d51d8630ca7841"}, + {file = "pep8_naming-0.12.1-py2.py3-none-any.whl", hash = "sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37"}, +] +pip-licenses = [ + {file = "pip-licenses-3.5.3.tar.gz", hash = "sha256:f44860e00957b791c6c6005a3328f2d5eaeee96ddb8e7d87d4b0aa25b02252e4"}, + {file = "pip_licenses-3.5.3-py3-none-any.whl", hash = "sha256:59c148d6a03784bf945d232c0dc0e9de4272a3675acaa0361ad7712398ca86ba"}, +] +platformdirs = [ + {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, + {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pre-commit = [ - {file = "pre_commit-2.12.1-py2.py3-none-any.whl", hash = "sha256:70c5ec1f30406250b706eda35e868b87e3e4ba099af8787e3e8b4b01e84f4712"}, - {file = "pre_commit-2.12.1.tar.gz", hash = "sha256:900d3c7e1bf4cf0374bb2893c24c23304952181405b4d88c9c40b72bda1bb8a9"}, + {file = "pre_commit-2.15.0-py2.py3-none-any.whl", hash = "sha256:a4ed01000afcb484d9eb8d504272e642c4c4099bbad3a6b27e519bd6a3e928a6"}, + {file = "pre_commit-2.15.0.tar.gz", hash = "sha256:3c25add78dbdfb6a28a651780d5c311ac40dd17f160eb3954a0c59da40a505a7"}, ] psutil = [ {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"}, @@ -1371,40 +1666,47 @@ psutil = [ {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"}, {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"}, ] +ptable = [ + {file = "PTable-0.9.2.tar.gz", hash = "sha256:aa7fc151cb40f2dabcd2275ba6f7fd0ff8577a86be3365cd3fb297cbe09cc292"}, +] +py = [ + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, +] pycares = [ - {file = "pycares-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ebff743643e54aa70dce0b7098094edefd371641cf79d9c944e9f4a25e9242b0"}, - {file = "pycares-3.2.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:55272411b46787936e8db475b9b6e9b81a8d8cdc253fa8779a45ef979f554fab"}, - {file = "pycares-3.2.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:f33ed0e403f98e746f721aeacde917f1bdc7558cb714d713c264848bddff660f"}, - {file = "pycares-3.2.3-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:72807e0c80b705e21c3a39347c12edf43aa4f80373bb37777facf810169372ed"}, - {file = "pycares-3.2.3-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:a51df0a8b3eaf225e0dae3a737fd6ce6f3cb2a3bc947e884582fdda9a159d55f"}, - {file = "pycares-3.2.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:663b5c7bd0f66436adac7257ee22ccfe185c3e7830b9bada3d19b79870e1d134"}, - {file = "pycares-3.2.3-cp36-cp36m-win32.whl", hash = "sha256:c2b1e19262ce91c3288b1905b0d41f7ad0fff4b258ce37b517aa2c8d22eb82f1"}, - {file = "pycares-3.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:e16399654a6c81cfaee2745857c119c20357b5d93de2f169f506b048b5e75d1d"}, - {file = "pycares-3.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88e5131570d7323b29866aa5ac245a9a5788d64677111daa1bde5817acdf012f"}, - {file = "pycares-3.2.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1552ffd823dc595fa8744c996926097a594f4f518d7c147657234b22cf17649d"}, - {file = "pycares-3.2.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f9e28b917373818817aca746238fcd621ec7e4ae9cbc8615f1a045e234eec298"}, - {file = "pycares-3.2.3-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:206d5a652990f10a1f1f3f62bc23d7fe46d99c2dc4b8b8a5101e5a472986cd02"}, - {file = "pycares-3.2.3-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:b8c9670225cdeeeb2b85ea92a807484622ca59f8f578ec73e8ec292515f35a91"}, - {file = "pycares-3.2.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:6329160885fc318f80692d4d0a83a8854f9144e7a80c4f25245d0c26f11a4b84"}, - {file = "pycares-3.2.3-cp37-cp37m-win32.whl", hash = "sha256:cd0f7fb40e1169f00b26a12793136bf5c711f155e647cd045a0ce6c98a527b57"}, - {file = "pycares-3.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a5d419215543d154587590d9d4485e985387ca10c7d3e1a2e5689dd6c0f20e5f"}, - {file = "pycares-3.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54f1c0642935515f27549f09486e72b6b2b1d51ad27a90ce17b760e9ce5e86d"}, - {file = "pycares-3.2.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6ce80eed538dd6106cd7e6136ceb3af10178d1254f07096a827c12e82e5e45c8"}, - {file = "pycares-3.2.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ed972a04067e91f552da84945d38b94c3984c898f699faa8bb066e9f3a114c32"}, - {file = "pycares-3.2.3-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:99a62b101cfb36ab6ebf19cb1ad60db2f9b080dc52db4ca985fe90924f60c758"}, - {file = "pycares-3.2.3-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:2246adcbc948dd31925c9bff5cc41c06fc640f7d982e6b41b6d09e4f201e5c11"}, - {file = "pycares-3.2.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7fd15d3f32be5548f38f95f4762ca73eef9fd623b101218a35d433ee0d4e3b58"}, - {file = "pycares-3.2.3-cp38-cp38-win32.whl", hash = "sha256:4bb0c708d8713741af7c4649d2f11e47c5f4e43131831243aeb18cff512c5469"}, - {file = "pycares-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:a53d921956d1e985e510ca0ffa84fbd7ecc6ac7d735d8355cba4395765efcd31"}, - {file = "pycares-3.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0312d25fa9d7c242f66115c4b3ae6ed8aedb457513ba33acef31fa265fc602b4"}, - {file = "pycares-3.2.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9960de8254525d9c3b485141809910c39d5eb1bb8119b1453702aacf72234934"}, - {file = "pycares-3.2.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:929f708a7bb4b2548cbbfc2094b2f90c4d8712056cdc0204788b570ab69c8838"}, - {file = "pycares-3.2.3-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4dd1237f01037cf5b90dd599c7fa79d9d8fb2ab2f401e19213d24228b2d17838"}, - {file = "pycares-3.2.3-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:5eea61a74097976502ce377bb75c4fed381d4986bc7fb85e70b691165133d3da"}, - {file = "pycares-3.2.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1c72c0fda4b08924fe04680475350e09b8d210365d950a6dcdde8c449b8d5b98"}, - {file = "pycares-3.2.3-cp39-cp39-win32.whl", hash = "sha256:b1555d51ce29510ffd20f9e0339994dff8c5d1cb093c8e81d5d98f474e345aa7"}, - {file = "pycares-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:43c15138f620ed28e61e51b884490eb8387e5954668f919313753f88dd8134fd"}, - {file = "pycares-3.2.3.tar.gz", hash = "sha256:da1899fde778f9b8736712283eccbf7b654248779b349d139cd28eb30b0fa8cd"}, + {file = "pycares-4.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db5a533111a3cfd481e7e4fb2bf8bef69f4fa100339803e0504dd5aecafb96a5"}, + {file = "pycares-4.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fdff88393c25016f417770d82678423fc7a56995abb2df3d2a1e55725db6977d"}, + {file = "pycares-4.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0aa97f900a7ffb259be77d640006585e2a907b0cd4edeee0e85cf16605995d5a"}, + {file = "pycares-4.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a34b0e3e693dceb60b8a1169668d606c75cb100ceba0a2df53c234a0eb067fbc"}, + {file = "pycares-4.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7661d6bbd51a337e7373cb356efa8be9b4655fda484e068f9455e939aec8d54e"}, + {file = "pycares-4.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:57315b8eb8fdbc56b3ad4932bc4b17132bb7c7fd2bd590f7fb84b6b522098aa9"}, + {file = "pycares-4.0.0-cp36-cp36m-win32.whl", hash = "sha256:dca9dc58845a9d083f302732a3130c68ded845ad5d463865d464e53c75a3dd45"}, + {file = "pycares-4.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c95c964d5dd307e104b44b193095c67bb6b10c9eda1ffe7d44ab7a9e84c476d9"}, + {file = "pycares-4.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26e67e4f81c80a5955dcf6193f3d9bee3c491fc0056299b383b84d792252fba4"}, + {file = "pycares-4.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:cd3011ffd5e1ad55880f7256791dbab9c43ebeda260474a968f19cd0319e1aef"}, + {file = "pycares-4.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1b959dd5921d207d759d421eece1b60416df33a7f862465739d5f2c363c2f523"}, + {file = "pycares-4.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6f258c1b74c048a9501a25f732f11b401564005e5e3c18f1ca6cad0c3dc0fb19"}, + {file = "pycares-4.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:b17ef48729786e62b574c6431f675f4cb02b27691b49e7428a605a50cd59c072"}, + {file = "pycares-4.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:82b3259cb590ddd107a6d2dc52da2a2e9a986bf242e893d58c786af2f8191047"}, + {file = "pycares-4.0.0-cp37-cp37m-win32.whl", hash = "sha256:4876fc790ae32832ae270c4a010a1a77e12ddf8d8e6ad70ad0b0a9d506c985f7"}, + {file = "pycares-4.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f60c04c5561b1ddf85ca4e626943cc09d7fb684e1adb22abb632095415a40fd7"}, + {file = "pycares-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:615406013cdcd1b445e5d1a551d276c6200b3abe77e534f8a7f7e1551208d14f"}, + {file = "pycares-4.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6580aef5d1b29a88c3d72fe73c691eacfd454f86e74d3fdd18f4bad8e8def98b"}, + {file = "pycares-4.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8ebb3ba0485f66cae8eed7ce3e9ed6f2c0bfd5e7319d5d0fbbb511064f17e1d4"}, + {file = "pycares-4.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c5362b7690ca481440f6b98395ac6df06aa50518ccb183c560464d1e5e2ab5d4"}, + {file = "pycares-4.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:eb60be66accc9a9ea1018b591a1f5800cba83491d07e9acc8c56bc6e6607ab54"}, + {file = "pycares-4.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:44896d6e191a6b5a914dbe3aa7c748481bf6ad19a9df33c1e76f8f2dc33fc8f0"}, + {file = "pycares-4.0.0-cp38-cp38-win32.whl", hash = "sha256:09b28fc7bc2cc05f7f69bf1636ddf46086e0a1837b62961e2092fcb40477320d"}, + {file = "pycares-4.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4a5081e232c1d181883dcac4675807f3a6cf33911c4173fbea00c0523687ed4"}, + {file = "pycares-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:103353577a6266a53e71bfee4cf83825f1401fefa60f0fb8bdec35f13be6a5f2"}, + {file = "pycares-4.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ad6caf580ee69806fc6534be93ddbb6e99bf94296d79ab351c37b2992b17abfd"}, + {file = "pycares-4.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3d5e50c95849f6905d2a9dbf02ed03f82580173e3c5604a39e2ad054185631f1"}, + {file = "pycares-4.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:53bc4f181b19576499b02cea4b45391e8dcbe30abd4cd01492f66bfc15615a13"}, + {file = "pycares-4.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:d52f9c725d2a826d5ffa37681eb07ffb996bfe21788590ef257664a3898fc0b5"}, + {file = "pycares-4.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3c7fb8d34ee11971c39acfaf98d0fac66725385ccef3bfe1b174c92b210e1aa4"}, + {file = "pycares-4.0.0-cp39-cp39-win32.whl", hash = "sha256:e9773e07684a55f54657df05237267611a77b294ec3bacb5f851c4ffca38a465"}, + {file = "pycares-4.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:38e54037f36c149146ff15f17a4a963fbdd0f9871d4a21cd94ff9f368140f57e"}, + {file = "pycares-4.0.0.tar.gz", hash = "sha256:d0154fc5753b088758fbec9bc137e1b24bb84fc0c6a09725c8bac25a342311cd"}, ] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, @@ -1415,21 +1717,40 @@ pycparser = [ {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, ] pydocstyle = [ - {file = "pydocstyle-6.0.0-py3-none-any.whl", hash = "sha256:d4449cf16d7e6709f63192146706933c7a334af7c0f083904799ccb851c50f6d"}, - {file = "pydocstyle-6.0.0.tar.gz", hash = "sha256:164befb520d851dbcf0e029681b91f4f599c62c5cd8933fd54b1bfbd50e89e1f"}, + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] -pyreadline = [ - {file = "pyreadline-2.1.win-amd64.exe", hash = "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b"}, - {file = "pyreadline-2.1.win32.exe", hash = "sha256:65540c21bfe14405a3a77e4c085ecfce88724743a4ead47c66b84defcf82c32e"}, - {file = "pyreadline-2.1.zip", hash = "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1"}, +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pyreadline3 = [ + {file = "pyreadline3-3.3-py3-none-any.whl", hash = "sha256:0003fd0079d152ecbd8111202c5a7dfa6a5569ffd65b235e45f3c2ecbee337b4"}, + {file = "pyreadline3-3.3.tar.gz", hash = "sha256:ff3b5a1ac0010d0967869f723e687d42cabc7dccf33b14934c92aa5168d260b3"}, +] +pytest = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] +pytest-cov = [ + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, +] +pytest-forked = [ + {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, + {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, +] +pytest-xdist = [ + {file = "pytest-xdist-2.3.0.tar.gz", hash = "sha256:e8ecde2f85d88fbcadb7d28cb33da0fa29bca5cf7d5967fa89fc0e97e5299ea5"}, + {file = "pytest_xdist-2.3.0-py3-none-any.whl", hash = "sha256:ed3d7da961070fce2a01818b51f6888327fb88df4379edeb6b9d990e789d9c8d"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] python-dotenv = [ {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, @@ -1446,22 +1767,83 @@ pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] +rapidfuzz = [ + {file = "rapidfuzz-1.7.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1ca9888e867aed2bb8d51571270e5f8393d718bb189fe1a7c0b047b8fd72bad3"}, + {file = "rapidfuzz-1.7.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:f336cd32a2a72eb9d7694618c9065ef3a2af330ab7e54bc0ec69d3b2eb08080e"}, + {file = "rapidfuzz-1.7.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:76124767ac3d3213a1aad989f80b156b225defef8addc825a5b631d3164c3213"}, + {file = "rapidfuzz-1.7.1-cp27-cp27m-win32.whl", hash = "sha256:c1090deb95e5369fff47c223c0ed3472644efc56817e288ebeaaa34822a1235c"}, + {file = "rapidfuzz-1.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:83f94c89e8f16679e0def3c7afa6c9ba477d837fd01250d6a1e3fea12267ce24"}, + {file = "rapidfuzz-1.7.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cdd5962bd009b1457e280b5619d312cd6305b5b8afeff6c27869f98fee839c36"}, + {file = "rapidfuzz-1.7.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:2940960e212b66f00fc58f9b4a13e6f80221141dcbaee9c51f97e0a1f30ff1ab"}, + {file = "rapidfuzz-1.7.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5ed4304a91043d27b92fe9af5eb87d1586548da6d03cbda5bbc98b00fee227cb"}, + {file = "rapidfuzz-1.7.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:be18495bd84bf2bd3e888270a3cd4dea868ff4b9b8ec6e540f0e195cda554140"}, + {file = "rapidfuzz-1.7.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d5779e6f548b6f3edfbdfbeeda4158286684dcb2bae3515ce68c510ea48e1b4d"}, + {file = "rapidfuzz-1.7.1-cp35-cp35m-win32.whl", hash = "sha256:80d780c4f6da08eb6801489df54fdbdc5ef2b882bd73f9585ef6e0cf09f1690d"}, + {file = "rapidfuzz-1.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:3b205c63b8606c2b8595ba8403a8c3ebd39de9f7f44631a2f651f3efe106ae9a"}, + {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8f96588a8a7d021debb4c60d82b15a80995daa99159bbeddd8a37f68f75ee06c"}, + {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b8139116a937691dde17f27aafe774647808339305f4683b3a6d9bae6518aa2a"}, + {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba574801c8410cc1f2d690ef65f898f6a660bba22ec8213e0f34dd0f0590bc71"}, + {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5194e3cb638af0cc7c02daa61cef07e332fd3f790ec113006302131be9afa6"}, + {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd9d8eaae888b966422cbcba954390a63b4933d8c513ea0056fd6e42d421d08"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3725c61b9cf57b6b7a765b92046e7d9e5ccce845835b523954b410a70dc32692"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e417961e5ca450d6c7448accc5a7e4e9ab0dd3c63729f76215d5e672785920fc"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:26d756284c8c6274b5d558e759415bfb4016fcdf168159b34702c346875d8cc0"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4887766f0dcc5df43fe4315df4b3c642829e06dc60d5bcb5e682fb76657e8ed1"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec0a29671d59998b97998b757ab1c636dd3b7721eda41746ae897abe709681a9"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dff55750fecd8c0f07bc199e48427c86873be2d0e6a3a80df98972847287f5d3"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:e113f741bb18b0ddd14d714d80ce9c6d5322724f3023b920708e82491e7aef28"}, + {file = "rapidfuzz-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef20654be0aed240ee44c98ce02639c37422adc3e144d28c4b6d3da043d9fd20"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e27eb57745a4d2a390b056f6f490b712c2f54250c5d2c794dd76062065a8aef"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de2b0ebb67ee0b78973141dba91f574a325a3425664dbdbad37fd7aca7b28cab"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88c65d91dcd3c0595112d16555536c60ac5bcab1a43e517e155a242a39525057"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:afd525a9b593cc1099f0210e116bcb4d9fc5585728d7bd929e6a4133dacd2d59"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e6d77f104a8d67c01ae4248ced6f0d4ef05e63931afdf49c20decf962318877f"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7db9d6ad0ab80e9e0f66f157b8e31b1d04ce5fa767b936ca1c212b98092572b1"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0195c57f4beea0e7691594f59faf62a4be3c818c1955a8b9b712f37adc479d2d"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ffca8c8b74d12cd36c051e9befa7c4eb2d34624ce71f22dbfc659af15bf4a1e"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-win32.whl", hash = "sha256:234cb75aa1e21cabad6a8c0718f84e2bfafdd4756b5232d5739545f97e343e59"}, + {file = "rapidfuzz-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:058977e93ab736071fcd8828fc6289ec026e9ca4a19f2a0967f9260e63910da8"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9d02bb0724326826b1884cc9b9d9fd97ac352c18213f45e465a39ef069a33115"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:212d6fa5b824aaa49a921c81d7cdc1d079b3545a30563ae14dc88e17918e76bf"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a0cd8117deba10e2a1d6dccb6ff44a4c737adda3048dc45860c5f53cf64db14f"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:61faa47b6b5d5a0cbe9fa6369df44d3f9435c4cccdb4d38d9de437f18b69dc4d"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1daa756be52a7ee60d553ba667cda3a188ee811c92a9c21df43a4cdadb1eb8ca"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c98ac10782dadf507e922963c8b8456a79151b4f10dbb08cfc86c1572db366dc"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:358d80061ca107df6c3e1f67fa7af0f94a62827cb9c44ac09a16e78b38f7c3d5"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5f90fc31d54fcd74a97d175892555786a8214a3cff43077463915b8a45a191d"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-win32.whl", hash = "sha256:55dffdcdccea6f077a4f09164039411f01f621633be5883c58ceaf94f007a688"}, + {file = "rapidfuzz-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:d712a7f680d2074b587650f81865ca838c04fcc6b77c9d2d742de0853aaa24ce"}, + {file = "rapidfuzz-1.7.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:729d73a8db5a2b444a19d4aa2be009b2e628d207d7c754f6d280e3c6a59b94cb"}, + {file = "rapidfuzz-1.7.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:a1cabbc645395b6175cad79164d9ec621866a004b476e44cac534020b9f6bddb"}, + {file = "rapidfuzz-1.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ae697294f456f7f76e5bd30db5a65e8b855e7e09f9a65e144efa1e2c5009553c"}, + {file = "rapidfuzz-1.7.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8ae51c1cf1f034f15216fec2e1eef658c8b3a9cbdcc1a053cc7133ede9d616d"}, + {file = "rapidfuzz-1.7.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dccc072f2a0eeb98d46a79427ef793836ebc5184b1fe544b34607be10705ddc3"}, + {file = "rapidfuzz-1.7.1.tar.gz", hash = "sha256:99495c679174b2a02641f7dc2364a208135cacca77fc4825a86efbfe1e23b0ff"}, +] redis = [ {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, @@ -1510,12 +1892,12 @@ regex = [ {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, ] requests = [ - {file = "requests-2.15.1-py2.py3-none-any.whl", hash = "sha256:ff753b2196cd18b1bbeddc9dcd5c864056599f7a7d9a4fb5677e723efa2b7fb9"}, - {file = "requests-2.15.1.tar.gz", hash = "sha256:e5659b9315a0610505e050bb7190bf6fa2ccee1ac295f2b760ef9d8a03ebbb2e"}, + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] sentry-sdk = [ - {file = "sentry-sdk-0.20.3.tar.gz", hash = "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237"}, - {file = "sentry_sdk-0.20.3-py2.py3-none-any.whl", hash = "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b"}, + {file = "sentry-sdk-1.4.3.tar.gz", hash = "sha256:b9844751e40710e84a457c5bc29b21c383ccb2b63d76eeaad72f7f1c808c8828"}, + {file = "sentry_sdk-1.4.3-py2.py3-none-any.whl", hash = "sha256:c091cc7115ff25fe3a0e410dbecd7a996f81a3f6137d2272daef32d6c3cfa6dc"}, ] sgmllib3k = [ {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, @@ -1529,8 +1911,8 @@ snowballstemmer = [ {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, ] sortedcontainers = [ - {file = "sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f"}, - {file = "sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1"}, + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] soupsieve = [ {file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"}, @@ -1544,59 +1926,98 @@ taskipy = [ {file = "taskipy-1.7.0-py3-none-any.whl", hash = "sha256:9e284c10898e9dee01a3e72220b94b192b1daa0f560271503a6df1da53d03844"}, {file = "taskipy-1.7.0.tar.gz", hash = "sha256:960e480b1004971e76454ecd1a0484e640744a30073a1069894a311467f85ed8"}, ] +testfixtures = [ + {file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"}, + {file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, + {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, + {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, + {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, ] urllib3 = [ - {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, - {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, + {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, + {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, ] virtualenv = [ - {file = "virtualenv-20.4.6-py2.py3-none-any.whl", hash = "sha256:307a555cf21e1550885c82120eccaf5acedf42978fd362d32ba8410f9593f543"}, - {file = "virtualenv-20.4.6.tar.gz", hash = "sha256:72cf267afc04bf9c86ec932329b7e94db6a0331ae9847576daaa7ca3c86b29a4"}, + {file = "virtualenv-20.8.1-py2.py3-none-any.whl", hash = "sha256:10062e34c204b5e4ec5f62e6ef2473f8ba76513a9a617e873f1f8fb4a519d300"}, + {file = "virtualenv-20.8.1.tar.gz", hash = "sha256:bcc17f0b3a29670dd777d6f0755a4c04f28815395bca279cdcb213b97199a6b8"}, ] yarl = [ - {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366"}, - {file = "yarl-1.6.3-cp36-cp36m-win32.whl", hash = "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721"}, - {file = "yarl-1.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643"}, - {file = "yarl-1.6.3-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970"}, - {file = "yarl-1.6.3-cp37-cp37m-win32.whl", hash = "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e"}, - {file = "yarl-1.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50"}, - {file = "yarl-1.6.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2"}, - {file = "yarl-1.6.3-cp38-cp38-win32.whl", hash = "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896"}, - {file = "yarl-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a"}, - {file = "yarl-1.6.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4"}, - {file = "yarl-1.6.3-cp39-cp39-win32.whl", hash = "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424"}, - {file = "yarl-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6"}, - {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, + {file = "yarl-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e35d8230e4b08d86ea65c32450533b906a8267a87b873f2954adeaecede85169"}, + {file = "yarl-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb4b3f277880c314e47720b4b6bb2c85114ab3c04c5442c9bc7006b3787904d8"}, + {file = "yarl-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7015dcedb91d90a138eebdc7e432aec8966e0147ab2a55f2df27b1904fa7291"}, + {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb3e478175e15e00d659fb0354a6a8db71a7811a2a5052aed98048bc972e5d2b"}, + {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8c409aa3a7966647e7c1c524846b362a6bcbbe120bf8a176431f940d2b9a2e"}, + {file = "yarl-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b22ea41c7e98170474a01e3eded1377d46b2dfaef45888a0005c683eaaa49285"}, + {file = "yarl-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a7dfc46add4cfe5578013dbc4127893edc69fe19132d2836ff2f6e49edc5ecd6"}, + {file = "yarl-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:82ff6f85f67500a4f74885d81659cd270eb24dfe692fe44e622b8a2fd57e7279"}, + {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f3cd2158b2ed0fb25c6811adfdcc47224efe075f2d68a750071dacc03a7a66e4"}, + {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:59c0f13f9592820c51280d1cf811294d753e4a18baf90f0139d1dc93d4b6fc5f"}, + {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7f7655ad83d1a8afa48435a449bf2f3009293da1604f5dd95b5ddcf5f673bd69"}, + {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aa9f0d9b62d15182341b3e9816582f46182cab91c1a57b2d308b9a3c4e2c4f78"}, + {file = "yarl-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fdd1b90c225a653b1bd1c0cae8edf1957892b9a09c8bf7ee6321eeb8208eac0f"}, + {file = "yarl-1.7.0-cp310-cp310-win32.whl", hash = "sha256:7c8d0bb76eabc5299db203e952ec55f8f4c53f08e0df4285aac8c92bd9e12675"}, + {file = "yarl-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:622a36fa779efb4ff9eff5fe52730ff17521431379851a31e040958fc251670c"}, + {file = "yarl-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d461b7a8e139b9e4b41f62eb417ffa0b98d1c46d4caf14c845e6a3b349c0bb1"}, + {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81cfacdd1e40bc931b5519499342efa388d24d262c30a3d31187bfa04f4a7001"}, + {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:821b978f2152be7695d4331ef0621d207aedf9bbd591ba23a63412a3efc29a01"}, + {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b64bd24c8c9a487f4a12260dc26732bf41028816dbf0c458f17864fbebdb3131"}, + {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:98c9ddb92b60a83c21be42c776d3d9d5ec632a762a094c41bda37b7dfbd2cd83"}, + {file = "yarl-1.7.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a532d75ca74431c053a88a802e161fb3d651b8bf5821a3440bc3616e38754583"}, + {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:053e09817eafb892e94e172d05406c1b3a22a93bc68f6eff5198363a3d764459"}, + {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:98c51f02d542945d306c8e934aa2c1e66ba5e9c1c86b5bf37f3a51c8a747067e"}, + {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:15ec41a5a5fdb7bace6d7b16701f9440007a82734f69127c0fbf6d87e10f4a1e"}, + {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a7f08819dba1e1255d6991ed37448a1bf4b1352c004bcd899b9da0c47958513d"}, + {file = "yarl-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8e3ffab21db0542ffd1887f3b9575ddd58961f2cf61429cb6458afc00c4581e0"}, + {file = "yarl-1.7.0-cp36-cp36m-win32.whl", hash = "sha256:50127634f519b2956005891507e3aa4ac345f66a7ea7bbc2d7dcba7401f41898"}, + {file = "yarl-1.7.0-cp36-cp36m-win_amd64.whl", hash = "sha256:36ec44f15193f6d5288d42ebb8e751b967ebdfb72d6830983838d45ab18edb4f"}, + {file = "yarl-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ec1b5a25a25c880c976d0bb3d107def085bb08dbb3db7f4442e0a2b980359d24"}, + {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b36f5a63c891f813c6f04ef19675b382efc190fd5ce7e10ab19386d2548bca06"}, + {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38173b8c3a29945e7ecade9a3f6ff39581eee8201338ee6a2c8882db5df3e806"}, + {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba402f32184f0b405fb281b93bd0d8ab7e3257735b57b62a6ed2e94cdf4fe50"}, + {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:be52bc5208d767cdd8308a9e93059b3b36d1e048fecbea0e0346d0d24a76adc0"}, + {file = "yarl-1.7.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:08c2044a956f4ef30405f2f433ce77f1f57c2c773bf81ae43201917831044d5a"}, + {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:484d61c047c45670ef5967653a1d0783e232c54bf9dd786a7737036828fa8d54"}, + {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b7de92a4af85cfcaf4081f8aa6165b1d63ee5de150af3ee85f954145f93105a7"}, + {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:376e41775aab79c5575534924a386c8e0f1a5d91db69fc6133fd27a489bcaf10"}, + {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:8a8b10d0e7bac154f959b709fcea593cda527b234119311eb950096653816a86"}, + {file = "yarl-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f46cd4c43e6175030e2a56def8f1d83b64e6706eeb2bb9ab0ef4756f65eab23f"}, + {file = "yarl-1.7.0-cp37-cp37m-win32.whl", hash = "sha256:b28cfb46140efe1a6092b8c5c4994a1fe70dc83c38fbcea4992401e0c6fb9cce"}, + {file = "yarl-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9624154ec9c02a776802da1086eed7f5034bd1971977f5146233869c2ac80297"}, + {file = "yarl-1.7.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:69945d13e1bbf81784a9bc48824feb9cd66491e6a503d4e83f6cd7c7cc861361"}, + {file = "yarl-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:46a742ed9e363bd01be64160ce7520e92e11989bd4cb224403cfd31c101cc83d"}, + {file = "yarl-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb4ff1ac7cb4500f43581b3f4cbd627d702143aa6be1fdc1fa3ebffaf4dc1be5"}, + {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ad51e17cd65ea3debb0e10f0120cf8dd987c741fe423ed2285087368090b33d"}, + {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e37786ea89a5d3ffbbf318ea9790926f8dfda83858544f128553c347ad143c6"}, + {file = "yarl-1.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c63c1e208f800daad71715786bfeb1cecdc595d87e2e9b1cd234fd6e597fd71d"}, + {file = "yarl-1.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91cbe24300c11835ef186436363352b3257db7af165e0a767f4f17aa25761388"}, + {file = "yarl-1.7.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e510dbec7c59d32eaa61ffa48173d5e3d7170a67f4a03e8f5e2e9e3971aca622"}, + {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3def6e681cc02397e5d8141ee97b41d02932b2bcf0fb34532ad62855eab7c60e"}, + {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:263c81b94e6431942b27f6f671fa62f430a0a5c14bb255f2ab69eeb9b2b66ff7"}, + {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e78c91faefe88d601ddd16e3882918dbde20577a2438e2320f8239c8b7507b8f"}, + {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:22b2430c49713bfb2f0a0dd4a8d7aab218b28476ba86fd1c78ad8899462cbcf2"}, + {file = "yarl-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e7ad9db939082f5d0b9269cfd92c025cb8f2fbbb1f1b9dc5a393c639db5bd92"}, + {file = "yarl-1.7.0-cp38-cp38-win32.whl", hash = "sha256:3a31e4a8dcb1beaf167b7e7af61b88cb961b220db8d3ba1c839723630e57eef7"}, + {file = "yarl-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:d579957439933d752358c6a300c93110f84aae67b63dd0c19dde6ecbf4056f6b"}, + {file = "yarl-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:87721b549505a546eb003252185103b5ec8147de6d3ad3714d148a5a67b6fe53"}, + {file = "yarl-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1fa866fa24d9f4108f9e58ea8a2135655419885cdb443e36b39a346e1181532"}, + {file = "yarl-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d3b8449dfedfe94eaff2b77954258b09b24949f6818dfa444b05dbb05ae1b7e"}, + {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db2372e350794ce8b9f810feb094c606b7e0e4aa6807141ac4fadfe5ddd75bb0"}, + {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a06d9d0b9a97fa99b84fee71d9dd11e69e21ac8a27229089f07b5e5e50e8d63c"}, + {file = "yarl-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3455c2456d6307bcfa80bc1157b8603f7d93573291f5bdc7144489ca0df4628"}, + {file = "yarl-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d30d67e3486aea61bb2cbf7cf81385364c2e4f7ce7469a76ed72af76a5cdfe6b"}, + {file = "yarl-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c18a4b286e8d780c3a40c31d7b79836aa93b720f71d5743f20c08b7e049ca073"}, + {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d54c925396e7891666cabc0199366ca55b27d003393465acef63fd29b8b7aa92"}, + {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:64773840952de17851a1c7346ad7f71688c77e74248d1f0bc230e96680f84028"}, + {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:acbf1756d9dc7cd0ae943d883be72e84e04396f6c2ff93a6ddeca929d562039f"}, + {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2e48f27936aa838939c798f466c851ba4ae79e347e8dfce43b009c64b930df12"}, + {file = "yarl-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1beef4734ca1ad40a9d8c6b20a76ab46e3a2ed09f38561f01e4aa2ea82cafcef"}, + {file = "yarl-1.7.0-cp39-cp39-win32.whl", hash = "sha256:8ee78c9a5f3c642219d4607680a4693b59239c27a3aa608b64ef79ddc9698039"}, + {file = "yarl-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:d750503682605088a14d29a4701548c15c510da4f13c8b17409c4097d5b04c52"}, + {file = "yarl-1.7.0.tar.gz", hash = "sha256:8e7ebaf62e19c2feb097ffb7c94deb0f0c9fab52590784c8cd679d30ab009162"}, ] diff --git a/pyproject.toml b/pyproject.toml index 320bf88cc..e227ffaa6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,10 +7,10 @@ license = "MIT" [tool.poetry.dependencies] python = "3.9.*" +"discord.py" = {url = "https://github.com/Rapptz/discord.py/archive/45d498c1b76deaf3b394d17ccf56112fa691d160.zip"} aio-pika = "~=6.1" aiodns = "~=2.0" aiohttp = "~=3.7" -aioping = "~=0.3.1" aioredis = "~=1.3.1" arrow = "~=1.0.3" async-rediscache = { version = "~=0.1.2", extras = ["fakeredis"] } @@ -18,10 +18,9 @@ beautifulsoup4 = "~=4.9" colorama = { version = "~=0.4.3", markers = "sys_platform == 'win32'" } coloredlogs = "~=14.0" deepdiff = "~=4.0" -"discord.py" = "~=1.6.0" emoji = "~=0.6" feedparser = "~=6.0.2" -fuzzywuzzy = "~=0.17" +rapidfuzz = "~=1.4" lxml = "~=4.4" markdownify = "==0.6.1" more_itertools = "~=8.2" @@ -29,7 +28,7 @@ python-dateutil = "~=2.8" python-frontmatter = "~=1.0.0" pyyaml = "~=5.1" regex = "==2021.4.4" -sentry-sdk = "~=0.19" +sentry-sdk = "~=1.3" statsd = "~=3.3" [tool.poetry.dev-dependencies] @@ -39,14 +38,18 @@ flake8 = "~=3.8" flake8-annotations = "~=2.0" flake8-bugbear = "~=20.1" flake8-docstrings = "~=1.4" -flake8-import-order = "~=0.18" flake8-string-format = "~=0.2" flake8-tidy-imports = "~=4.0" flake8-todo = "~=0.7" +flake8-isort = "~=4.0" pep8-naming = "~=0.9" pre-commit = "~=2.1" taskipy = "~=1.7.0" +pip-licenses = "~=3.5.2" python-dotenv = "~=0.17.1" +pytest = "~=6.2.4" +pytest-cov = "~=2.12.1" +pytest-xdist = { version = "~=2.3.0", extras = ["psutil"] } [build-system] requires = ["poetry-core>=1.0.0"] @@ -58,6 +61,22 @@ lint = "pre-commit run --all-files" precommit = "pre-commit install" build = "docker build -t ghcr.io/python-discord/bot:latest -f Dockerfile ." push = "docker push ghcr.io/python-discord/bot:latest" -test = "coverage run -m unittest" +test-nocov = "pytest -n auto" +test = "pytest -n auto --cov-report= --cov --ff" +retest = "pytest -n auto --cov-report= --cov --lf" html = "coverage html" report = "coverage report" +isort = "isort ." + +[tool.coverage.run] +branch = true +source_pkgs = ["bot"] +source = ["tests"] + +[tool.isort] +multi_line_output = 6 +order_by_type = false +case_sensitive = true +combine_as_imports = true +line_length = 120 +atomic = true diff --git a/tests/README.md b/tests/README.md index 1a17c09bd..b7fddfaa2 100644 --- a/tests/README.md +++ b/tests/README.md @@ -4,6 +4,14 @@ Our bot is one of the most important tools we have for running our community. As _**Note:** This is a practical guide to getting started with writing tests for our bot, not a general introduction to writing unit tests in Python. If you're looking for a more general introduction, you can take a look at the [Additional resources](#additional-resources) section at the bottom of this page._ +### Table of contents: +- [Tools](#tools) +- [Running tests](#running-tests) +- [Writing tests](#writing-tests) +- [Mocking](#mocking) +- [Some considerations](#some-considerations) +- [Additional resources](#additional-resources) + ## Tools We are using the following modules and packages for our unit tests: @@ -11,15 +19,43 @@ We are using the following modules and packages for our unit tests: - [unittest](https://docs.python.org/3/library/unittest.html) (standard library) - [unittest.mock](https://docs.python.org/3/library/unittest.mock.html) (standard library) - [coverage.py](https://coverage.readthedocs.io/en/stable/) +- [pytest-cov](https://pytest-cov.readthedocs.io/en/latest/index.html) + +We also use the following package as a test runner: +- [pytest](https://docs.pytest.org/en/6.2.x/) -To ensure the results you obtain on your personal machine are comparable to those generated in the CI, please make sure to run your tests with the virtual environment defined by our [Poetry Project](/pyproject.toml). To run your tests with `poetry`, we've provided two "scripts" shortcuts: +To ensure the results you obtain on your personal machine are comparable to those generated in the CI, please make sure to run your tests with the virtual environment defined by our [Poetry Project](/pyproject.toml). To run your tests with `poetry`, we've provided the following "script" shortcuts: -- `poetry run task test` will run `unittest` with `coverage.py` +- `poetry run task test-nocov` will run `pytest`. +- `poetry run task test` will run `pytest` with `pytest-cov`. - `poetry run task test path/to/test.py` will run a specific test. - `poetry run task report` will generate a coverage report of the tests you've run with `poetry run task test`. If you append the `-m` flag to this command, the report will include the lines and branches not covered by tests in addition to the test coverage report. If you want a coverage report, make sure to run the tests with `poetry run task test` *first*. +## Running tests +There are multiple ways to run the tests, which one you use will be determined by your goal, and stage in development. + +When actively developing, you'll most likely be working on one portion of the codebase, and as a result, won't need to run the entire test suite. +To run just one file, and save time, you can use the following command: +```shell +poetry run task test-nocov <path/to/file.py> +``` + +For example: +```shell +poetry run task test-nocov tests/bot/exts/test_cogs.py +``` +will run the test suite in the `test_cogs` file. + +If you'd like to collect coverage as well, you can append `--cov` to the command above. + + +If you're done and are preparing to commit and push your code, it's a good idea to run the entire test suite as a sanity check: +```shell +poetry run task test +``` + ## Writing tests Since consistency is an important consideration for collaborative projects, we have written some guidelines on writing tests for the bot. In addition to these guidelines, it's a good idea to look at the existing code base for examples (e.g., [`test_converters.py`](/tests/bot/test_converters.py)). diff --git a/tests/__init__.py b/tests/__init__.py index 2228110ad..c2b9d12dc 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,6 @@ import logging +from bot.log import get_logger -log = logging.getLogger() +log = get_logger() log.setLevel(logging.CRITICAL) diff --git a/tests/base.py b/tests/base.py index d99b9ac31..5e304ea9d 100644 --- a/tests/base.py +++ b/tests/base.py @@ -6,6 +6,7 @@ from typing import Dict import discord from discord.ext import commands +from bot.log import get_logger from tests import helpers @@ -42,7 +43,7 @@ class LoggingTestsMixin: manager when we're testing under the assumption that no log records will be emitted. """ if not isinstance(logger, logging.Logger): - logger = logging.getLogger(logger) + logger = get_logger(logger) if level: level = logging._nameToLevel.get(level, level) @@ -102,4 +103,4 @@ class CommandTestCase(unittest.IsolatedAsyncioTestCase): with self.assertRaises(commands.MissingPermissions) as cm: await cmd.can_run(ctx) - self.assertCountEqual(permissions.keys(), cm.exception.missing_perms) + self.assertCountEqual(permissions.keys(), cm.exception.missing_permissions) diff --git a/tests/bot/exts/backend/sync/test_base.py b/tests/bot/exts/backend/sync/test_base.py index 3ad9db9c3..9dc46005b 100644 --- a/tests/bot/exts/backend/sync/test_base.py +++ b/tests/bot/exts/backend/sync/test_base.py @@ -1,7 +1,6 @@ import unittest from unittest import mock - from bot.api import ResponseCodeError from bot.exts.backend.sync._syncers import Syncer from tests import helpers diff --git a/tests/bot/exts/backend/sync/test_cog.py b/tests/bot/exts/backend/sync/test_cog.py index 22a07313e..fdd0ab74a 100644 --- a/tests/bot/exts/backend/sync/test_cog.py +++ b/tests/bot/exts/backend/sync/test_cog.py @@ -60,13 +60,13 @@ class SyncCogTestCase(unittest.IsolatedAsyncioTestCase): class SyncCogTests(SyncCogTestCase): """Tests for the Sync cog.""" + @mock.patch("bot.utils.scheduling.create_task") @mock.patch.object(Sync, "sync_guild", new_callable=mock.MagicMock) - def test_sync_cog_init(self, sync_guild): + def test_sync_cog_init(self, sync_guild, create_task): """Should instantiate syncers and run a sync for the guild.""" # Reset because a Sync cog was already instantiated in setUp. self.RoleSyncer.reset_mock() self.UserSyncer.reset_mock() - self.bot.loop.create_task = mock.MagicMock() mock_sync_guild_coro = mock.MagicMock() sync_guild.return_value = mock_sync_guild_coro @@ -74,7 +74,8 @@ class SyncCogTests(SyncCogTestCase): Sync(self.bot) sync_guild.assert_called_once_with() - self.bot.loop.create_task.assert_called_once_with(mock_sync_guild_coro) + create_task.assert_called_once() + self.assertEqual(create_task.call_args.args[0], mock_sync_guild_coro) async def test_sync_cog_sync_guild(self): """Roles and users should be synced only if a guild is successfully retrieved.""" diff --git a/tests/bot/exts/backend/sync/test_users.py b/tests/bot/exts/backend/sync/test_users.py index 27932be95..2fc97af2d 100644 --- a/tests/bot/exts/backend/sync/test_users.py +++ b/tests/bot/exts/backend/sync/test_users.py @@ -1,6 +1,8 @@ import unittest from unittest import mock +from discord.errors import NotFound + from bot.exts.backend.sync._syncers import UserSyncer, _Diff from tests import helpers @@ -10,7 +12,7 @@ def fake_user(**kwargs): kwargs.setdefault("id", 43) kwargs.setdefault("name", "bob the test man") kwargs.setdefault("discriminator", 1337) - kwargs.setdefault("roles", [666]) + kwargs.setdefault("roles", [helpers.MockRole(id=666)]) kwargs.setdefault("in_guild", True) return kwargs @@ -134,6 +136,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.get_mock_member(fake_user()), None ] + guild.fetch_member.side_effect = NotFound(mock.Mock(status=404), "Not found") actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([], [{"id": 63, "in_guild": False}], None) @@ -158,6 +161,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.get_mock_member(updated_user), None ] + guild.fetch_member.side_effect = NotFound(mock.Mock(status=404), "Not found") actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([new_user], [{"id": 55, "name": "updated"}, {"id": 63, "in_guild": False}], None) @@ -177,6 +181,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.get_mock_member(fake_user()), None ] + guild.fetch_member.side_effect = NotFound(mock.Mock(status=404), "Not found") actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([], [], None) diff --git a/tests/bot/exts/backend/test_error_handler.py b/tests/bot/exts/backend/test_error_handler.py index bd4fb5942..462f718e6 100644 --- a/tests/bot/exts/backend/test_error_handler.py +++ b/tests/bot/exts/backend/test_error_handler.py @@ -4,12 +4,12 @@ from unittest.mock import AsyncMock, MagicMock, call, patch from discord.ext.commands import errors from bot.api import ResponseCodeError -from bot.errors import InvalidInfractedUser, LockedResourceError +from bot.errors import InvalidInfractedUserError, LockedResourceError from bot.exts.backend.error_handler import ErrorHandler, setup from bot.exts.info.tags import Tags from bot.exts.moderation.silence import Silence from bot.utils.checks import InWhitelistCheckFailure -from tests.helpers import MockBot, MockContext, MockGuild, MockRole +from tests.helpers import MockBot, MockContext, MockGuild, MockRole, MockTextChannel class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase): @@ -107,7 +107,7 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase): """Should send error with `ctx.send` when error is `CommandOnCooldown`.""" self.ctx.reset_mock() cog = ErrorHandler(self.bot) - error = errors.CommandOnCooldown(10, 9) + error = errors.CommandOnCooldown(10, 9, type=None) self.assertIsNone(await cog.on_command_error(self.ctx, error)) self.ctx.send.assert_awaited_once_with(error) @@ -130,7 +130,7 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase): "expect_mock_call": "send" }, { - "args": (self.ctx, errors.CommandInvokeError(InvalidInfractedUser(self.ctx.author))), + "args": (self.ctx, errors.CommandInvokeError(InvalidInfractedUserError(self.ctx.author))), "expect_mock_call": "send" } ) @@ -226,8 +226,8 @@ class TrySilenceTests(unittest.IsolatedAsyncioTestCase): self.bot.get_command.return_value.can_run = AsyncMock(side_effect=errors.CommandError()) self.assertFalse(await self.cog.try_silence(self.ctx)) - async def test_try_silence_silencing(self): - """Should run silence command with correct arguments.""" + async def test_try_silence_silence_duration(self): + """Should run silence command with correct duration argument.""" self.bot.get_command.return_value.can_run = AsyncMock(return_value=True) test_cases = ("shh", "shhh", "shhhhhh", "shhhhhhhhhhhhhhhhhhh") @@ -238,21 +238,85 @@ class TrySilenceTests(unittest.IsolatedAsyncioTestCase): self.assertTrue(await self.cog.try_silence(self.ctx)) self.ctx.invoke.assert_awaited_once_with( self.bot.get_command.return_value, - duration=min(case.count("h")*2, 15) + duration_or_channel=None, + duration=min(case.count("h")*2, 15), + kick=False ) + async def test_try_silence_silence_arguments(self): + """Should run silence with the correct channel, duration, and kick arguments.""" + self.bot.get_command.return_value.can_run = AsyncMock(return_value=True) + + test_cases = ( + (MockTextChannel(), None), # None represents the case when no argument is passed + (MockTextChannel(), False), + (MockTextChannel(), True) + ) + + for channel, kick in test_cases: + with self.subTest(kick=kick, channel=channel): + self.ctx.reset_mock() + self.ctx.invoked_with = "shh" + + self.ctx.message.content = f"!shh {channel.name} {kick if kick is not None else ''}" + self.ctx.guild.text_channels = [channel] + + self.assertTrue(await self.cog.try_silence(self.ctx)) + self.ctx.invoke.assert_awaited_once_with( + self.bot.get_command.return_value, + duration_or_channel=channel, + duration=4, + kick=(kick if kick is not None else False) + ) + + async def test_try_silence_silence_message(self): + """If the words after the command could not be converted to a channel, None should be passed as channel.""" + self.bot.get_command.return_value.can_run = AsyncMock(return_value=True) + self.ctx.invoked_with = "shh" + self.ctx.message.content = "!shh not_a_channel true" + + self.assertTrue(await self.cog.try_silence(self.ctx)) + self.ctx.invoke.assert_awaited_once_with( + self.bot.get_command.return_value, + duration_or_channel=None, + duration=4, + kick=False + ) + async def test_try_silence_unsilence(self): - """Should call unsilence command.""" + """Should call unsilence command with correct duration and channel arguments.""" self.silence.silence.can_run = AsyncMock(return_value=True) - test_cases = ("unshh", "unshhhhh", "unshhhhhhhhh") + test_cases = ( + ("unshh", None), + ("unshhhhh", None), + ("unshhhhhhhhh", None), + ("unshh", MockTextChannel()) + ) - for case in test_cases: - with self.subTest(message=case): + for invoke, channel in test_cases: + with self.subTest(message=invoke, channel=channel): self.bot.get_command.side_effect = (self.silence.silence, self.silence.unsilence) self.ctx.reset_mock() - self.ctx.invoked_with = case + + self.ctx.invoked_with = invoke + self.ctx.message.content = f"!{invoke}" + if channel is not None: + self.ctx.message.content += f" {channel.name}" + self.ctx.guild.text_channels = [channel] + self.assertTrue(await self.cog.try_silence(self.ctx)) - self.ctx.invoke.assert_awaited_once_with(self.silence.unsilence) + self.ctx.invoke.assert_awaited_once_with(self.silence.unsilence, channel=channel) + + async def test_try_silence_unsilence_message(self): + """If the words after the command could not be converted to a channel, None should be passed as channel.""" + self.silence.silence.can_run = AsyncMock(return_value=True) + self.bot.get_command.side_effect = (self.silence.silence, self.silence.unsilence) + + self.ctx.invoked_with = "unshh" + self.ctx.message.content = "!unshh not_a_channel" + + self.assertTrue(await self.cog.try_silence(self.ctx)) + self.ctx.invoke.assert_awaited_once_with(self.silence.unsilence, channel=None) async def test_try_silence_no_match(self): """Should return `False` when message don't match.""" diff --git a/tests/bot/exts/events/__init__.py b/tests/bot/exts/events/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/events/__init__.py diff --git a/tests/bot/exts/events/test_code_jams.py b/tests/bot/exts/events/test_code_jams.py new file mode 100644 index 000000000..0856546af --- /dev/null +++ b/tests/bot/exts/events/test_code_jams.py @@ -0,0 +1,170 @@ +import unittest +from unittest.mock import AsyncMock, MagicMock, create_autospec, patch + +from discord import CategoryChannel +from discord.ext.commands import BadArgument + +from bot.constants import Roles +from bot.exts.events import code_jams +from bot.exts.events.code_jams import _channels, _cog +from tests.helpers import ( + MockAttachment, MockBot, MockCategoryChannel, MockContext, MockGuild, MockMember, MockRole, MockTextChannel, + autospec +) + +TEST_CSV = b"""\ +Team Name,Team Member Discord ID,Team Leader +Annoyed Alligators,12345,Y +Annoyed Alligators,54321,N +Oscillating Otters,12358,Y +Oscillating Otters,74832,N +Oscillating Otters,19903,N +Annoyed Alligators,11111,N +""" + + +def get_mock_category(channel_count: int, name: str) -> CategoryChannel: + """Return a mocked code jam category.""" + category = create_autospec(CategoryChannel, spec_set=True, instance=True) + category.name = name + category.channels = [MockTextChannel() for _ in range(channel_count)] + + return category + + +class JamCodejamCreateTests(unittest.IsolatedAsyncioTestCase): + """Tests for `codejam create` command.""" + + def setUp(self): + self.bot = MockBot() + self.admin_role = MockRole(name="Admins", id=Roles.admins) + self.command_user = MockMember([self.admin_role]) + self.guild = MockGuild([self.admin_role]) + self.ctx = MockContext(bot=self.bot, author=self.command_user, guild=self.guild) + self.cog = _cog.CodeJams(self.bot) + + async def test_message_without_attachments(self): + """If no link or attachments are provided, commands.BadArgument should be raised.""" + self.ctx.message.attachments = [] + + with self.assertRaises(BadArgument): + await self.cog.create(self.cog, self.ctx, None) + + @patch.object(_channels, "create_team_channel") + @patch.object(_channels, "create_team_leader_channel") + async def test_result_sending(self, create_leader_channel, create_team_channel): + """Should call `ctx.send` when everything goes right.""" + self.ctx.message.attachments = [MockAttachment()] + self.ctx.message.attachments[0].read = AsyncMock() + self.ctx.message.attachments[0].read.return_value = TEST_CSV + + team_leaders = MockRole() + + self.guild.get_member.return_value = MockMember() + + self.ctx.guild.create_role = AsyncMock() + self.ctx.guild.create_role.return_value = team_leaders + self.cog.add_roles = AsyncMock() + + await self.cog.create(self.cog, self.ctx, None) + + create_team_channel.assert_awaited() + create_leader_channel.assert_awaited_once_with( + self.ctx.guild, team_leaders + ) + self.ctx.send.assert_awaited_once() + + async def test_link_returning_non_200_status(self): + """When the URL passed returns a non 200 status, it should send a message informing them.""" + self.bot.http_session.get.return_value = mock = MagicMock() + mock.status = 404 + await self.cog.create(self.cog, self.ctx, "https://not-a-real-link.com") + + self.ctx.send.assert_awaited_once() + + @patch.object(_channels, "_send_status_update") + async def test_category_doesnt_exist(self, update): + """Should create a new code jam category.""" + subtests = ( + [], + [get_mock_category(_channels.MAX_CHANNELS, _channels.CATEGORY_NAME)], + [get_mock_category(_channels.MAX_CHANNELS - 2, "other")], + ) + + for categories in subtests: + update.reset_mock() + self.guild.reset_mock() + self.guild.categories = categories + + with self.subTest(categories=categories): + actual_category = await _channels._get_category(self.guild) + + update.assert_called_once() + self.guild.create_category_channel.assert_awaited_once() + category_overwrites = self.guild.create_category_channel.call_args[1]["overwrites"] + + self.assertFalse(category_overwrites[self.guild.default_role].read_messages) + self.assertTrue(category_overwrites[self.guild.me].read_messages) + self.assertEqual(self.guild.create_category_channel.return_value, actual_category) + + async def test_category_channel_exist(self): + """Should not try to create category channel.""" + expected_category = get_mock_category(_channels.MAX_CHANNELS - 2, _channels.CATEGORY_NAME) + self.guild.categories = [ + get_mock_category(_channels.MAX_CHANNELS - 2, "other"), + expected_category, + get_mock_category(0, _channels.CATEGORY_NAME), + ] + + actual_category = await _channels._get_category(self.guild) + self.assertEqual(expected_category, actual_category) + + async def test_channel_overwrites(self): + """Should have correct permission overwrites for users and roles.""" + leader = (MockMember(), True) + members = [leader] + [(MockMember(), False) for _ in range(4)] + overwrites = _channels._get_overwrites(members, self.guild) + + for member, _ in members: + self.assertTrue(overwrites[member].read_messages) + + @patch.object(_channels, "_get_overwrites") + @patch.object(_channels, "_get_category") + @autospec(_channels, "_add_team_leader_roles", pass_mocks=False) + async def test_team_channels_creation(self, get_category, get_overwrites): + """Should create a text channel for a team.""" + team_leaders = MockRole() + members = [(MockMember(), True)] + [(MockMember(), False) for _ in range(5)] + category = MockCategoryChannel() + category.create_text_channel = AsyncMock() + + get_category.return_value = category + await _channels.create_team_channel(self.guild, "my-team", members, team_leaders) + + category.create_text_channel.assert_awaited_once_with( + "my-team", + overwrites=get_overwrites.return_value + ) + + async def test_jam_roles_adding(self): + """Should add team leader role to leader and jam role to every team member.""" + leader_role = MockRole(name="Team Leader") + + leader = MockMember() + members = [(leader, True)] + [(MockMember(), False) for _ in range(4)] + await _channels._add_team_leader_roles(members, leader_role) + + leader.add_roles.assert_awaited_once_with(leader_role) + for member, is_leader in members: + if not is_leader: + member.add_roles.assert_not_awaited() + + +class CodeJamSetup(unittest.TestCase): + """Test for `setup` function of `CodeJam` cog.""" + + def test_setup(self): + """Should call `bot.add_cog`.""" + bot = MockBot() + code_jams.setup(bot) + bot.add_cog.assert_called_once() diff --git a/tests/bot/exts/filters/test_token_remover.py b/tests/bot/exts/filters/test_token_remover.py index 51feae9cb..4db27269a 100644 --- a/tests/bot/exts/filters/test_token_remover.py +++ b/tests/bot/exts/filters/test_token_remover.py @@ -26,7 +26,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.msg.guild.get_member.return_value.bot = False self.msg.guild.get_member.return_value.__str__.return_value = "Woody" self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) - self.msg.author.avatar_url_as.return_value = "picture-lemon.png" + self.msg.author.display_avatar.url = "picture-lemon.png" def test_extract_user_id_valid(self): """Should consider user IDs valid if they decode into an integer ID.""" @@ -295,20 +295,21 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): ) @autospec("bot.exts.filters.token_remover", "UNKNOWN_USER_LOG_MESSAGE") - def test_format_userid_log_message_unknown(self, unknown_user_log_message): + async def test_format_userid_log_message_unknown(self, unknown_user_log_message,): """Should correctly format the user ID portion when the actual user it belongs to is unknown.""" token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") unknown_user_log_message.format.return_value = " Partner" msg = MockMessage(id=555, content="hello world") msg.guild.get_member.return_value = None + msg.guild.fetch_member.side_effect = NotFound(mock.Mock(status=404), "Not found") - return_value = TokenRemover.format_userid_log_message(msg, token) + return_value = await TokenRemover.format_userid_log_message(msg, token) self.assertEqual(return_value, (unknown_user_log_message.format.return_value, False)) unknown_user_log_message.format.assert_called_once_with(user_id=472265943062413332) @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE") - def test_format_userid_log_message_bot(self, known_user_log_message): + async def test_format_userid_log_message_bot(self, known_user_log_message): """Should correctly format the user ID portion when the ID belongs to a known bot.""" token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") known_user_log_message.format.return_value = " Partner" @@ -316,7 +317,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): msg.guild.get_member.return_value.__str__.return_value = "Sam" msg.guild.get_member.return_value.bot = True - return_value = TokenRemover.format_userid_log_message(msg, token) + return_value = await TokenRemover.format_userid_log_message(msg, token) self.assertEqual(return_value, (known_user_log_message.format.return_value, True)) @@ -327,12 +328,12 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): ) @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE") - def test_format_log_message_user_token_user(self, user_token_message): + async def test_format_log_message_user_token_user(self, user_token_message): """Should correctly format the user ID portion when the ID belongs to a known user.""" token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") user_token_message.format.return_value = "Partner" - return_value = TokenRemover.format_userid_log_message(self.msg, token) + return_value = await TokenRemover.format_userid_log_message(self.msg, token) self.assertEqual(return_value, (user_token_message.format.return_value, True)) user_token_message.format.assert_called_once_with( @@ -375,7 +376,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): colour=Colour(constants.Colours.soft_red), title="Token removed!", text=log_msg + "\n" + userid_log_message, - thumbnail=self.msg.author.avatar_url_as.return_value, + thumbnail=self.msg.author.display_avatar.url, channel_id=constants.Channels.mod_alerts, ping_everyone=True, ) diff --git a/tests/bot/exts/info/test_help.py b/tests/bot/exts/info/test_help.py new file mode 100644 index 000000000..604c69671 --- /dev/null +++ b/tests/bot/exts/info/test_help.py @@ -0,0 +1,23 @@ +import unittest + +import rapidfuzz + +from bot.exts.info import help +from tests.helpers import MockBot, MockContext, autospec + + +class HelpCogTests(unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + """Attach an instance of the cog to the class for tests.""" + self.bot = MockBot() + self.cog = help.Help(self.bot) + self.ctx = MockContext(bot=self.bot) + self.bot.help_command.context = self.ctx + + @autospec(help.CustomHelpCommand, "get_all_help_choices", return_value={"help"}, pass_mocks=False) + async def test_help_fuzzy_matching(self): + """Test fuzzy matching of commands when called from help.""" + result = await self.bot.help_command.command_not_found("holp") + + match = {"help": rapidfuzz.fuzz.ratio("help", "holp")} + self.assertEqual(match, result.possible_matches) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index 770660fe3..4b50c3fd9 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -84,7 +84,7 @@ class InformationCogTests(unittest.IsolatedAsyncioTestCase): self.assertEqual(dummy_embed.fields[0].value, str(dummy_role.id)) self.assertEqual(dummy_embed.fields[1].value, f"#{dummy_role.colour.value:0>6x}") - self.assertEqual(dummy_embed.fields[2].value, "0.63 0.48 218") + self.assertEqual(dummy_embed.fields[2].value, "0.65 0.64 242") self.assertEqual(dummy_embed.fields[3].value, "1") self.assertEqual(dummy_embed.fields[4].value, "10") self.assertEqual(dummy_embed.fields[5].value, "0") @@ -262,7 +262,6 @@ class UserInfractionHelperMethodTests(unittest.IsolatedAsyncioTestCase): await self._method_subtests(self.cog.user_nomination_counts, test_values, header) [email protected]("bot.exts.info.information.time_since", new=unittest.mock.MagicMock(return_value="1 year ago")) @unittest.mock.patch("bot.exts.info.information.constants.MODERATION_CHANNELS", new=[50]) class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """Tests for the creation of the `!user` embed.""" @@ -347,7 +346,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): self.assertEqual( textwrap.dedent(f""" - Created: {"1 year ago"} + Created: {"<t:1:R>"} Profile: {user.mention} ID: {user.id} """).strip(), @@ -356,7 +355,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): self.assertEqual( textwrap.dedent(f""" - Joined: {"1 year ago"} + Joined: {"<t:1:R>"} Verified: {"True"} Roles: &Moderators """).strip(), @@ -379,7 +378,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): self.assertEqual( textwrap.dedent(f""" - Created: {"1 year ago"} + Created: {"<t:1:R>"} Profile: {user.mention} ID: {user.id} """).strip(), @@ -388,7 +387,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): self.assertEqual( textwrap.dedent(f""" - Joined: {"1 year ago"} + Joined: {"<t:1:R>"} Roles: &Moderators """).strip(), embed.fields[1].value @@ -436,10 +435,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ctx = helpers.MockContext() user = helpers.MockMember(id=217, colour=0) - user.avatar_url_as.return_value = "avatar url" + user.display_avatar.url = "avatar url" embed = await self.cog.create_user_embed(ctx, user) - user.avatar_url_as.assert_called_once_with(static_format="png") self.assertEqual(embed.thumbnail.url, "avatar url") @@ -508,7 +506,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase): @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") async def test_staff_members_can_bypass_channel_restriction(self, create_embed, constants): """Staff members should be able to bypass the bot-commands channel restriction.""" - constants.STAFF_ROLES = [self.moderator_role.id] + constants.STAFF_PARTNERS_COMMUNITY_ROLES = [self.moderator_role.id] ctx = helpers.MockContext(author=self.moderator, channel=helpers.MockTextChannel(id=200)) await self.cog.user_info(self.cog, ctx) @@ -520,7 +518,7 @@ class UserCommandTests(unittest.IsolatedAsyncioTestCase): async def test_moderators_can_target_another_member(self, create_embed, constants): """A moderator should be able to use `!user` targeting another user.""" constants.MODERATION_ROLES = [self.moderator_role.id] - constants.STAFF_ROLES = [self.moderator_role.id] + constants.STAFF_PARTNERS_COMMUNITY_ROLES = [self.moderator_role.id] ctx = helpers.MockContext(author=self.moderator, channel=helpers.MockTextChannel(id=50)) await self.cog.user_info(self.cog, ctx, self.target) diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py index b9d527770..4d01e18a5 100644 --- a/tests/bot/exts/moderation/infraction/test_infractions.py +++ b/tests/bot/exts/moderation/infraction/test_infractions.py @@ -3,6 +3,8 @@ import textwrap import unittest from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch +from discord.errors import NotFound + from bot.constants import Event from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction.infractions import Infractions @@ -13,12 +15,13 @@ class TruncationTests(unittest.IsolatedAsyncioTestCase): """Tests for ban and kick command reason truncation.""" def setUp(self): + self.me = MockMember(id=7890, roles=[MockRole(id=7890, position=5)]) self.bot = MockBot() self.cog = Infractions(self.bot) - self.user = MockMember(id=1234, top_role=MockRole(id=3577, position=10)) - self.target = MockMember(id=1265, top_role=MockRole(id=9876, position=0)) + self.user = MockMember(id=1234, roles=[MockRole(id=3577, position=10)]) + self.target = MockMember(id=1265, roles=[MockRole(id=9876, position=1)]) self.guild = MockGuild(id=4567) - self.ctx = MockContext(bot=self.bot, author=self.user, guild=self.guild) + self.ctx = MockContext(me=self.me, bot=self.bot, author=self.user, guild=self.guild) @patch("bot.exts.moderation.infraction._utils.get_active_infraction") @patch("bot.exts.moderation.infraction._utils.post_infraction") @@ -64,8 +67,8 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): def setUp(self): self.bot = MockBot() - self.mod = MockMember(top_role=10) - self.user = MockMember(top_role=1, roles=[MockRole(id=123456)]) + self.mod = MockMember(roles=[MockRole(id=7890123, position=10)]) + self.user = MockMember(roles=[MockRole(id=123456, position=1)]) self.guild = MockGuild() self.ctx = MockContext(bot=self.bot, author=self.mod) self.cog = Infractions(self.bot) @@ -195,7 +198,8 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): async def test_voice_unban_user_not_found(self): """Should include info to return dict when user was not found from guild.""" self.guild.get_member.return_value = None - result = await self.cog.pardon_voice_ban(self.user.id, self.guild, "foobar") + self.guild.fetch_member.side_effect = NotFound(Mock(status=404), "Not found") + result = await self.cog.pardon_voice_ban(self.user.id, self.guild) self.assertEqual(result, {"Info": "User was not found in the guild."}) @patch("bot.exts.moderation.infraction.infractions._utils.notify_pardon") @@ -206,7 +210,7 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): notify_pardon_mock.return_value = True format_user_mock.return_value = "my-user" - result = await self.cog.pardon_voice_ban(self.user.id, self.guild, "foobar") + result = await self.cog.pardon_voice_ban(self.user.id, self.guild) self.assertEqual(result, { "Member": "my-user", "DM": "Sent" @@ -221,7 +225,7 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): notify_pardon_mock.return_value = False format_user_mock.return_value = "my-user" - result = await self.cog.pardon_voice_ban(self.user.id, self.guild, "foobar") + result = await self.cog.pardon_voice_ban(self.user.id, self.guild) self.assertEqual(result, { "Member": "my-user", "DM": "**Failed**" diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py index 50a717bb5..72eebb254 100644 --- a/tests/bot/exts/moderation/infraction/test_utils.py +++ b/tests/bot/exts/moderation/infraction/test_utils.py @@ -94,8 +94,8 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): test_case = namedtuple("test_case", ["get_return_value", "expected_output", "infraction_nr", "send_msg"]) test_cases = [ test_case([], None, None, True), - test_case([{"id": 123987}], {"id": 123987}, "123987", False), - test_case([{"id": 123987}], {"id": 123987}, "123987", True) + test_case([{"id": 123987, "type": "ban"}], {"id": 123987, "type": "ban"}, "123987", False), + test_case([{"id": 123987, "type": "ban"}], {"id": 123987, "type": "ban"}, "123987", True) ] for case in test_cases: @@ -137,16 +137,16 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( type="Ban", - expires="2020-02-26 09:20 (23 hours and 59 minutes) UTC", + expires="2020-02-26 09:20 (23 hours and 59 minutes)", reason="No reason provided." - ), + ) + utils.INFRACTION_APPEAL_SERVER_FOOTER, colour=Colours.soft_red, url=utils.RULES_URL ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.token_removed - ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), + ), "send_result": True }, { @@ -157,14 +157,14 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): type="Warning", expires="N/A", reason="Test reason." - ), + ) + utils.INFRACTION_APPEAL_MODMAIL_FOOTER, colour=Colours.soft_red, url=utils.RULES_URL ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.token_removed - ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), + ), "send_result": False }, # Note that this test case asserts that the DM that *would* get sent to the user is formatted @@ -177,14 +177,14 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): type="Note", expires="N/A", reason="No reason provided." - ), + ) + utils.INFRACTION_APPEAL_MODMAIL_FOOTER, colour=Colours.soft_red, url=utils.RULES_URL ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), + ), "send_result": False }, { @@ -193,16 +193,16 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( type="Mute", - expires="2020-02-26 09:20 (23 hours and 59 minutes) UTC", + expires="2020-02-26 09:20 (23 hours and 59 minutes)", reason="Test" - ), + ) + utils.INFRACTION_APPEAL_MODMAIL_FOOTER, colour=Colours.soft_red, url=utils.RULES_URL ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), + ), "send_result": False }, { @@ -213,14 +213,14 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): type="Mute", expires="N/A", reason="foo bar" * 4000 - )[:2045] + "...", + )[:4093-utils.LONGEST_EXTRAS] + "..." + utils.INFRACTION_APPEAL_MODMAIL_FOOTER, colour=Colours.soft_red, url=utils.RULES_URL ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, icon_url=Icons.defcon_denied - ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER), + ), "send_result": True } ] diff --git a/tests/bot/exts/moderation/test_incidents.py b/tests/bot/exts/moderation/test_incidents.py index cbf7f7bcf..ccc842050 100644 --- a/tests/bot/exts/moderation/test_incidents.py +++ b/tests/bot/exts/moderation/test_incidents.py @@ -3,7 +3,7 @@ import enum import logging import typing as t import unittest -from unittest.mock import AsyncMock, MagicMock, call, patch +from unittest.mock import AsyncMock, MagicMock, Mock, call, patch import aiohttp import discord @@ -11,15 +11,8 @@ import discord from bot.constants import Colours from bot.exts.moderation import incidents from tests.helpers import ( - MockAsyncWebhook, - MockAttachment, - MockBot, - MockMember, - MockMessage, - MockReaction, - MockRole, - MockTextChannel, - MockUser, + MockAsyncWebhook, MockAttachment, MockBot, MockMember, MockMessage, MockReaction, MockRole, MockTextChannel, + MockUser ) @@ -379,7 +372,7 @@ class TestArchive(TestIncidents): # Define our own `incident` to be archived incident = MockMessage( content="this is an incident", - author=MockUser(name="author_name", avatar_url="author_avatar"), + author=MockUser(name="author_name", display_avatar=Mock(url="author_avatar")), id=123, ) built_embed = MagicMock(discord.Embed, id=123) # We patch `make_embed` to return this diff --git a/tests/bot/exts/moderation/test_modlog.py b/tests/bot/exts/moderation/test_modlog.py index f8f142484..79e04837d 100644 --- a/tests/bot/exts/moderation/test_modlog.py +++ b/tests/bot/exts/moderation/test_modlog.py @@ -25,5 +25,5 @@ class ModLogTests(unittest.IsolatedAsyncioTestCase): ) embed = self.channel.send.call_args[1]["embed"] self.assertEqual( - embed.description, ("foo bar" * 3000)[:2045] + "..." + embed.description, ("foo bar" * 3000)[:4093] + "..." ) diff --git a/tests/bot/exts/moderation/test_silence.py b/tests/bot/exts/moderation/test_silence.py index fa5fc9e81..92ce3418a 100644 --- a/tests/bot/exts/moderation/test_silence.py +++ b/tests/bot/exts/moderation/test_silence.py @@ -1,15 +1,19 @@ import asyncio +import itertools import unittest from datetime import datetime, timezone +from typing import List, Tuple from unittest import mock -from unittest.mock import Mock +from unittest.mock import AsyncMock, Mock from async_rediscache import RedisSession from discord import PermissionOverwrite -from bot.constants import Channels, Guild, Roles +from bot.constants import Channels, Guild, MODERATION_ROLES, Roles from bot.exts.moderation import silence -from tests.helpers import MockBot, MockContext, MockTextChannel, autospec +from tests.helpers import ( + MockBot, MockContext, MockGuild, MockMember, MockRole, MockTextChannel, MockVoiceChannel, autospec +) redis_session = None redis_loop = asyncio.get_event_loop() @@ -149,7 +153,7 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase): self.assertTrue(self.cog._init_task.cancelled()) @autospec("discord.ext.commands", "has_any_role") - @mock.patch.object(silence, "MODERATION_ROLES", new=(1, 2, 3)) + @mock.patch.object(silence.constants, "MODERATION_ROLES", new=(1, 2, 3)) async def test_cog_check(self, role_check): """Role check was called with `MODERATION_ROLES`""" ctx = MockContext() @@ -159,6 +163,170 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase): role_check.assert_called_once_with(*(1, 2, 3)) role_check.return_value.predicate.assert_awaited_once_with(ctx) + async def test_force_voice_sync(self): + """Tests the _force_voice_sync helper function.""" + await self.cog._async_init() + + # Create a regular member, and one member for each of the moderation roles + moderation_members = [MockMember(roles=[MockRole(id=role)]) for role in MODERATION_ROLES] + members = [MockMember(), *moderation_members] + + channel = MockVoiceChannel(members=members) + + await self.cog._force_voice_sync(channel) + for member in members: + if member in moderation_members: + member.move_to.assert_not_called() + else: + self.assertEqual(member.move_to.call_count, 2) + calls = member.move_to.call_args_list + + # Tests that the member was moved to the afk channel, and back. + self.assertEqual((channel.guild.afk_channel,), calls[0].args) + self.assertEqual((channel,), calls[1].args) + + async def test_force_voice_sync_no_channel(self): + """Test to ensure _force_voice_sync can create its own voice channel if one is not available.""" + await self.cog._async_init() + + channel = MockVoiceChannel(guild=MockGuild(afk_channel=None)) + new_channel = MockVoiceChannel(delete=AsyncMock()) + channel.guild.create_voice_channel.return_value = new_channel + + await self.cog._force_voice_sync(channel) + + # Check channel creation + overwrites = { + channel.guild.default_role: PermissionOverwrite(speak=False, connect=False, view_channel=False) + } + channel.guild.create_voice_channel.assert_awaited_once_with("mute-temp", overwrites=overwrites) + + # Check bot deleted channel + new_channel.delete.assert_awaited_once() + + async def test_voice_kick(self): + """Test to ensure kick function can remove all members from a voice channel.""" + await self.cog._async_init() + + # Create a regular member, and one member for each of the moderation roles + moderation_members = [MockMember(roles=[MockRole(id=role)]) for role in MODERATION_ROLES] + members = [MockMember(), *moderation_members] + + channel = MockVoiceChannel(members=members) + await self.cog._kick_voice_members(channel) + + for member in members: + if member in moderation_members: + member.move_to.assert_not_called() + else: + self.assertEqual((None,), member.move_to.call_args_list[0].args) + + @staticmethod + def create_erroneous_members() -> Tuple[List[MockMember], List[MockMember]]: + """ + Helper method to generate a list of members that error out on move_to call. + + Returns the list of erroneous members, + as well as a list of regular and erroneous members combined, in that order. + """ + erroneous_member = MockMember(move_to=AsyncMock(side_effect=Exception())) + members = [MockMember(), erroneous_member] + + return erroneous_member, members + + async def test_kick_move_to_error(self): + """Test to ensure move_to gets called on all members during kick, even if some fail.""" + await self.cog._async_init() + _, members = self.create_erroneous_members() + + await self.cog._kick_voice_members(MockVoiceChannel(members=members)) + for member in members: + member.move_to.assert_awaited_once() + + async def test_sync_move_to_error(self): + """Test to ensure move_to gets called on all members during sync, even if some fail.""" + await self.cog._async_init() + failing_member, members = self.create_erroneous_members() + + await self.cog._force_voice_sync(MockVoiceChannel(members=members)) + for member in members: + self.assertEqual(member.move_to.call_count, 1 if member == failing_member else 2) + + +class SilenceArgumentParserTests(unittest.IsolatedAsyncioTestCase): + """Tests for the silence argument parser utility function.""" + + def setUp(self): + self.bot = MockBot() + self.cog = silence.Silence(self.bot) + self.cog._init_task = asyncio.Future() + self.cog._init_task.set_result(None) + + @autospec(silence.Silence, "send_message", pass_mocks=False) + @autospec(silence.Silence, "_set_silence_overwrites", return_value=False, pass_mocks=False) + @autospec(silence.Silence, "parse_silence_args") + async def test_command(self, parser_mock): + """Test that the command passes in the correct arguments for different calls.""" + test_cases = ( + (), + (15, ), + (MockTextChannel(),), + (MockTextChannel(), 15), + ) + + ctx = MockContext() + parser_mock.return_value = (ctx.channel, 10) + + for case in test_cases: + with self.subTest("Test command converters", args=case): + await self.cog.silence.callback(self.cog, ctx, *case) + + try: + first_arg = case[0] + except IndexError: + # Default value when the first argument is not passed + first_arg = None + + try: + second_arg = case[1] + except IndexError: + # Default value when the second argument is not passed + second_arg = 10 + + parser_mock.assert_called_with(ctx, first_arg, second_arg) + + async def test_no_arguments(self): + """Test the parser when no arguments are passed to the command.""" + ctx = MockContext() + channel, duration = self.cog.parse_silence_args(ctx, None, 10) + + self.assertEqual(ctx.channel, channel) + self.assertEqual(10, duration) + + async def test_channel_only(self): + """Test the parser when just the channel argument is passed.""" + expected_channel = MockTextChannel() + actual_channel, duration = self.cog.parse_silence_args(MockContext(), expected_channel, 10) + + self.assertEqual(expected_channel, actual_channel) + self.assertEqual(10, duration) + + async def test_duration_only(self): + """Test the parser when just the duration argument is passed.""" + ctx = MockContext() + channel, duration = self.cog.parse_silence_args(ctx, 15, 10) + + self.assertEqual(ctx.channel, channel) + self.assertEqual(15, duration) + + async def test_all_args(self): + """Test the parser when both channel and duration are passed.""" + expected_channel = MockTextChannel() + actual_channel, duration = self.cog.parse_silence_args(MockContext(), expected_channel, 15) + + self.assertEqual(expected_channel, actual_channel) + self.assertEqual(15, duration) + @autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False) class RescheduleTests(unittest.IsolatedAsyncioTestCase): @@ -235,6 +403,16 @@ class RescheduleTests(unittest.IsolatedAsyncioTestCase): self.cog.notifier.add_channel.assert_not_called() +def voice_sync_helper(function): + """Helper wrapper to test the sync and kick functions for voice channels.""" + @autospec(silence.Silence, "_force_voice_sync", "_kick_voice_members", "_set_silence_overwrites") + async def inner(self, sync, kick, overwrites): + overwrites.return_value = True + await function(self, MockContext(), sync, kick) + + return inner + + @autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False) class SilenceTests(unittest.IsolatedAsyncioTestCase): """Tests for the silence command and its related helper methods.""" @@ -242,7 +420,7 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase): @autospec(silence.Silence, "_reschedule", pass_mocks=False) @autospec(silence, "Scheduler", "SilenceNotifier", pass_mocks=False) def setUp(self) -> None: - self.bot = MockBot() + self.bot = MockBot(get_channel=lambda _: MockTextChannel()) self.cog = silence.Silence(self.bot) self.cog._init_task = asyncio.Future() self.cog._init_task.set_result(None) @@ -252,62 +430,188 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase): asyncio.run(self.cog._async_init()) # Populate instance attributes. - self.channel = MockTextChannel() - self.overwrite = PermissionOverwrite(stream=True, send_messages=True, add_reactions=False) - self.channel.overwrites_for.return_value = self.overwrite + self.text_channel = MockTextChannel() + self.text_overwrite = PermissionOverwrite( + send_messages=True, + add_reactions=False, + create_private_threads=True, + create_public_threads=False, + send_messages_in_threads=True + ) + self.text_channel.overwrites_for.return_value = self.text_overwrite + + self.voice_channel = MockVoiceChannel() + self.voice_overwrite = PermissionOverwrite(connect=True, speak=True) + self.voice_channel.overwrites_for.return_value = self.voice_overwrite async def test_sent_correct_message(self): - """Appropriate failure/success message was sent by the command.""" + """Appropriate failure/success message was sent by the command to the correct channel.""" + # The following test tuples are made up of: + # duration, expected message, and the success of the _set_silence_overwrites function test_cases = ( (0.0001, silence.MSG_SILENCE_SUCCESS.format(duration=0.0001), True,), (None, silence.MSG_SILENCE_PERMANENT, True,), (5, silence.MSG_SILENCE_FAIL, False,), ) - for duration, message, was_silenced in test_cases: - ctx = MockContext() + + targets = (MockTextChannel(), MockVoiceChannel(), None) + + for (duration, message, was_silenced), target in itertools.product(test_cases, targets): with mock.patch.object(self.cog, "_set_silence_overwrites", return_value=was_silenced): - with self.subTest(was_silenced=was_silenced, message=message, duration=duration): - await self.cog.silence.callback(self.cog, ctx, duration) - ctx.send.assert_called_once_with(message) + with self.subTest(was_silenced=was_silenced, target=target, message=message): + with mock.patch.object(self.cog, "send_message") as send_message: + ctx = MockContext() + await self.cog.silence.callback(self.cog, ctx, target, duration) + send_message.assert_called_once_with( + message, + ctx.channel, + target or ctx.channel, + alert_target=was_silenced + ) + + @voice_sync_helper + async def test_sync_called(self, ctx, sync, kick): + """Tests if silence command calls sync on a voice channel.""" + channel = MockVoiceChannel() + await self.cog.silence.callback(self.cog, ctx, channel, 10, kick=False) + + sync.assert_awaited_once_with(self.cog, channel) + kick.assert_not_called() + + @voice_sync_helper + async def test_kick_called(self, ctx, sync, kick): + """Tests if silence command calls kick on a voice channel.""" + channel = MockVoiceChannel() + await self.cog.silence.callback(self.cog, ctx, channel, 10, kick=True) + + kick.assert_awaited_once_with(channel) + sync.assert_not_called() + + @voice_sync_helper + async def test_sync_not_called(self, ctx, sync, kick): + """Tests that silence command does not call sync on a text channel.""" + channel = MockTextChannel() + await self.cog.silence.callback(self.cog, ctx, channel, 10, kick=False) + + sync.assert_not_called() + kick.assert_not_called() + + @voice_sync_helper + async def test_kick_not_called(self, ctx, sync, kick): + """Tests that silence command does not call kick on a text channel.""" + channel = MockTextChannel() + await self.cog.silence.callback(self.cog, ctx, channel, 10, kick=True) + + sync.assert_not_called() + kick.assert_not_called() async def test_skipped_already_silenced(self): """Permissions were not set and `False` was returned for an already silenced channel.""" subtests = ( - (False, PermissionOverwrite(send_messages=False, add_reactions=False)), - (True, PermissionOverwrite(send_messages=True, add_reactions=True)), - (True, PermissionOverwrite(send_messages=False, add_reactions=False)), + ( + False, + MockTextChannel(), + PermissionOverwrite( + send_messages=False, + add_reactions=False, + create_private_threads=False, + create_public_threads=False, + send_messages_in_threads=False + ) + ), + ( + True, + MockTextChannel(), + PermissionOverwrite( + send_messages=True, + add_reactions=True, + create_private_threads=True, + create_public_threads=True, + send_messages_in_threads=True + ) + ), + ( + True, + MockTextChannel(), + PermissionOverwrite( + send_messages=False, + add_reactions=False, + create_private_threads=False, + create_public_threads=False, + send_messages_in_threads=False + ) + ), + (False, MockVoiceChannel(), PermissionOverwrite(connect=False, speak=False)), + (True, MockVoiceChannel(), PermissionOverwrite(connect=True, speak=True)), + (True, MockVoiceChannel(), PermissionOverwrite(connect=False, speak=False)), ) - for contains, overwrite in subtests: - with self.subTest(contains=contains, overwrite=overwrite): + for contains, channel, overwrite in subtests: + with self.subTest(contains=contains, is_text=isinstance(channel, MockTextChannel), overwrite=overwrite): self.cog.scheduler.__contains__.return_value = contains - channel = MockTextChannel() channel.overwrites_for.return_value = overwrite self.assertFalse(await self.cog._set_silence_overwrites(channel)) channel.set_permissions.assert_not_called() - async def test_silenced_channel(self): + async def test_silenced_text_channel(self): """Channel had `send_message` and `add_reactions` permissions revoked for verified role.""" - self.assertTrue(await self.cog._set_silence_overwrites(self.channel)) - self.assertFalse(self.overwrite.send_messages) - self.assertFalse(self.overwrite.add_reactions) - self.channel.set_permissions.assert_awaited_once_with( + self.assertTrue(await self.cog._set_silence_overwrites(self.text_channel)) + self.assertFalse(self.text_overwrite.send_messages) + self.assertFalse(self.text_overwrite.add_reactions) + self.text_channel.set_permissions.assert_awaited_once_with( self.cog._everyone_role, - overwrite=self.overwrite + overwrite=self.text_overwrite ) - async def test_preserved_other_overwrites(self): - """Channel's other unrelated overwrites were not changed.""" - prev_overwrite_dict = dict(self.overwrite) - await self.cog._set_silence_overwrites(self.channel) - new_overwrite_dict = dict(self.overwrite) + async def test_silenced_voice_channel_speak(self): + """Channel had `speak` permissions revoked for verified role.""" + self.assertTrue(await self.cog._set_silence_overwrites(self.voice_channel)) + self.assertFalse(self.voice_overwrite.speak) + self.voice_channel.set_permissions.assert_awaited_once_with( + self.cog._verified_voice_role, + overwrite=self.voice_overwrite + ) - # Remove 'send_messages' & 'add_reactions' keys because they were changed by the method. - del prev_overwrite_dict['send_messages'] - del prev_overwrite_dict['add_reactions'] - del new_overwrite_dict['send_messages'] - del new_overwrite_dict['add_reactions'] + async def test_silenced_voice_channel_full(self): + """Channel had `speak` and `connect` permissions revoked for verified role.""" + self.assertTrue(await self.cog._set_silence_overwrites(self.voice_channel, kick=True)) + self.assertFalse(self.voice_overwrite.speak or self.voice_overwrite.connect) + self.voice_channel.set_permissions.assert_awaited_once_with( + self.cog._verified_voice_role, + overwrite=self.voice_overwrite + ) + + async def test_preserved_other_overwrites_text(self): + """Channel's other unrelated overwrites were not changed for a text channel mute.""" + prev_overwrite_dict = dict(self.text_overwrite) + await self.cog._set_silence_overwrites(self.text_channel) + new_overwrite_dict = dict(self.text_overwrite) + + # Remove related permission keys because they were changed by the method. + for perm_name in ( + "send_messages", + "add_reactions", + "create_private_threads", + "create_public_threads", + "send_messages_in_threads" + ): + del prev_overwrite_dict[perm_name] + del new_overwrite_dict[perm_name] + + self.assertDictEqual(prev_overwrite_dict, new_overwrite_dict) + + async def test_preserved_other_overwrites_voice(self): + """Channel's other unrelated overwrites were not changed for a voice channel mute.""" + prev_overwrite_dict = dict(self.voice_overwrite) + await self.cog._set_silence_overwrites(self.voice_channel) + new_overwrite_dict = dict(self.voice_overwrite) + + # Remove 'connect' & 'speak' keys because they were changed by the method. + del prev_overwrite_dict['connect'] + del prev_overwrite_dict['speak'] + del new_overwrite_dict['connect'] + del new_overwrite_dict['speak'] self.assertDictEqual(prev_overwrite_dict, new_overwrite_dict) @@ -320,7 +624,7 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase): async def test_indefinite_added_to_notifier(self): """Channel was added to notifier if a duration was not set for the silence.""" with mock.patch.object(self.cog, "_set_silence_overwrites", return_value=True): - await self.cog.silence.callback(self.cog, MockContext(), None) + await self.cog.silence.callback(self.cog, MockContext(), None, None) self.cog.notifier.add_channel.assert_called_once() async def test_silenced_not_added_to_notifier(self): @@ -331,9 +635,12 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase): async def test_cached_previous_overwrites(self): """Channel's previous overwrites were cached.""" - overwrite_json = '{"send_messages": true, "add_reactions": false}' - await self.cog._set_silence_overwrites(self.channel) - self.cog.previous_overwrites.set.assert_called_once_with(self.channel.id, overwrite_json) + overwrite_json = ( + '{"send_messages": true, "add_reactions": false, "create_private_threads": true, ' + '"create_public_threads": false, "send_messages_in_threads": true}' + ) + await self.cog._set_silence_overwrites(self.text_channel) + self.cog.previous_overwrites.set.assert_awaited_once_with(self.text_channel.id, overwrite_json) @autospec(silence, "datetime") async def test_cached_unsilence_time(self, datetime_mock): @@ -343,7 +650,7 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase): timestamp = now_timestamp + duration * 60 datetime_mock.now.return_value = datetime.fromtimestamp(now_timestamp, tz=timezone.utc) - ctx = MockContext(channel=self.channel) + ctx = MockContext(channel=self.text_channel) await self.cog.silence.callback(self.cog, ctx, duration) self.cog.unsilence_timestamps.set.assert_awaited_once_with(ctx.channel.id, timestamp) @@ -351,26 +658,33 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase): async def test_cached_indefinite_time(self): """A value of -1 was cached for a permanent silence.""" - ctx = MockContext(channel=self.channel) - await self.cog.silence.callback(self.cog, ctx, None) + ctx = MockContext(channel=self.text_channel) + await self.cog.silence.callback(self.cog, ctx, None, None) self.cog.unsilence_timestamps.set.assert_awaited_once_with(ctx.channel.id, -1) async def test_scheduled_task(self): """An unsilence task was scheduled.""" - ctx = MockContext(channel=self.channel, invoke=mock.MagicMock()) + ctx = MockContext(channel=self.text_channel, invoke=mock.MagicMock()) await self.cog.silence.callback(self.cog, ctx, 5) args = (300, ctx.channel.id, ctx.invoke.return_value) self.cog.scheduler.schedule_later.assert_called_once_with(*args) - ctx.invoke.assert_called_once_with(self.cog.unsilence) + ctx.invoke.assert_called_once_with(self.cog.unsilence, channel=ctx.channel) async def test_permanent_not_scheduled(self): """A task was not scheduled for a permanent silence.""" - ctx = MockContext(channel=self.channel) - await self.cog.silence.callback(self.cog, ctx, None) + ctx = MockContext(channel=self.text_channel) + await self.cog.silence.callback(self.cog, ctx, None, None) self.cog.scheduler.schedule_later.assert_not_called() + async def test_indefinite_silence(self): + """Test silencing a channel forever.""" + with mock.patch.object(self.cog, "_schedule_unsilence") as unsilence: + ctx = MockContext(channel=self.text_channel) + await self.cog.silence.callback(self.cog, ctx, -1) + unsilence.assert_awaited_once_with(ctx, ctx.channel, None) + @autospec(silence.Silence, "unsilence_timestamps", pass_mocks=False) class UnsilenceTests(unittest.IsolatedAsyncioTestCase): @@ -391,9 +705,13 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase): self.cog.scheduler.__contains__.return_value = True overwrites_cache.get.return_value = '{"send_messages": true, "add_reactions": false}' - self.channel = MockTextChannel() - self.overwrite = PermissionOverwrite(stream=True, send_messages=False, add_reactions=False) - self.channel.overwrites_for.return_value = self.overwrite + self.text_channel = MockTextChannel() + self.text_overwrite = PermissionOverwrite(send_messages=False, add_reactions=False) + self.text_channel.overwrites_for.return_value = self.text_overwrite + + self.voice_channel = MockVoiceChannel() + self.voice_overwrite = PermissionOverwrite(connect=True, speak=True) + self.voice_channel.overwrites_for.return_value = self.voice_overwrite async def test_sent_correct_message(self): """Appropriate failure/success message was sent by the command.""" @@ -401,88 +719,128 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase): test_cases = ( (True, silence.MSG_UNSILENCE_SUCCESS, unsilenced_overwrite), (False, silence.MSG_UNSILENCE_FAIL, unsilenced_overwrite), - (False, silence.MSG_UNSILENCE_MANUAL, self.overwrite), + (False, silence.MSG_UNSILENCE_MANUAL, self.text_overwrite), (False, silence.MSG_UNSILENCE_MANUAL, PermissionOverwrite(send_messages=False)), (False, silence.MSG_UNSILENCE_MANUAL, PermissionOverwrite(add_reactions=False)), ) - for was_unsilenced, message, overwrite in test_cases: + + targets = (None, MockTextChannel()) + + for (was_unsilenced, message, overwrite), target in itertools.product(test_cases, targets): ctx = MockContext() - with self.subTest(was_unsilenced=was_unsilenced, message=message, overwrite=overwrite): - with mock.patch.object(self.cog, "_unsilence", return_value=was_unsilenced): - ctx.channel.overwrites_for.return_value = overwrite - await self.cog.unsilence.callback(self.cog, ctx) - ctx.channel.send.assert_called_once_with(message) + ctx.channel.overwrites_for.return_value = overwrite + if target: + target.overwrites_for.return_value = overwrite + + with mock.patch.object(self.cog, "_unsilence", return_value=was_unsilenced): + with mock.patch.object(self.cog, "send_message") as send_message: + with self.subTest(was_unsilenced=was_unsilenced, overwrite=overwrite, target=target): + await self.cog.unsilence.callback(self.cog, ctx, channel=target) + + call_args = (message, ctx.channel, target or ctx.channel) + send_message.assert_awaited_once_with(*call_args, alert_target=was_unsilenced) async def test_skipped_already_unsilenced(self): """Permissions were not set and `False` was returned for an already unsilenced channel.""" self.cog.scheduler.__contains__.return_value = False self.cog.previous_overwrites.get.return_value = None - channel = MockTextChannel() - self.assertFalse(await self.cog._unsilence(channel)) - channel.set_permissions.assert_not_called() + for channel in (MockVoiceChannel(), MockTextChannel()): + with self.subTest(channel=channel): + self.assertFalse(await self.cog._unsilence(channel)) + channel.set_permissions.assert_not_called() - async def test_restored_overwrites(self): - """Channel's `send_message` and `add_reactions` overwrites were restored.""" - await self.cog._unsilence(self.channel) - self.channel.set_permissions.assert_awaited_once_with( + async def test_restored_overwrites_text(self): + """Text channel's `send_message` and `add_reactions` overwrites were restored.""" + await self.cog._unsilence(self.text_channel) + self.text_channel.set_permissions.assert_awaited_once_with( self.cog._everyone_role, - overwrite=self.overwrite, + overwrite=self.text_overwrite, ) # Recall that these values are determined by the fixture. - self.assertTrue(self.overwrite.send_messages) - self.assertFalse(self.overwrite.add_reactions) + self.assertTrue(self.text_overwrite.send_messages) + self.assertFalse(self.text_overwrite.add_reactions) + + async def test_restored_overwrites_voice(self): + """Voice channel's `connect` and `speak` overwrites were restored.""" + await self.cog._unsilence(self.voice_channel) + self.voice_channel.set_permissions.assert_awaited_once_with( + self.cog._verified_voice_role, + overwrite=self.voice_overwrite, + ) - async def test_cache_miss_used_default_overwrites(self): - """Both overwrites were set to None due previous values not being found in the cache.""" + # Recall that these values are determined by the fixture. + self.assertTrue(self.voice_overwrite.connect) + self.assertTrue(self.voice_overwrite.speak) + + async def test_cache_miss_used_default_overwrites_text(self): + """Text overwrites were set to None due previous values not being found in the cache.""" self.cog.previous_overwrites.get.return_value = None - await self.cog._unsilence(self.channel) - self.channel.set_permissions.assert_awaited_once_with( + await self.cog._unsilence(self.text_channel) + self.text_channel.set_permissions.assert_awaited_once_with( self.cog._everyone_role, - overwrite=self.overwrite, + overwrite=self.text_overwrite, ) - self.assertIsNone(self.overwrite.send_messages) - self.assertIsNone(self.overwrite.add_reactions) + self.assertIsNone(self.text_overwrite.send_messages) + self.assertIsNone(self.text_overwrite.add_reactions) - async def test_cache_miss_sent_mod_alert(self): - """A message was sent to the mod alerts channel.""" + async def test_cache_miss_used_default_overwrites_voice(self): + """Voice overwrites were set to None due previous values not being found in the cache.""" self.cog.previous_overwrites.get.return_value = None - await self.cog._unsilence(self.channel) + await self.cog._unsilence(self.voice_channel) + self.voice_channel.set_permissions.assert_awaited_once_with( + self.cog._verified_voice_role, + overwrite=self.voice_overwrite, + ) + + self.assertIsNone(self.voice_overwrite.connect) + self.assertIsNone(self.voice_overwrite.speak) + + async def test_cache_miss_sent_mod_alert_text(self): + """A message was sent to the mod alerts channel upon muting a text channel.""" + self.cog.previous_overwrites.get.return_value = None + await self.cog._unsilence(self.text_channel) + self.cog._mod_alerts_channel.send.assert_awaited_once() + + async def test_cache_miss_sent_mod_alert_voice(self): + """A message was sent to the mod alerts channel upon muting a voice channel.""" + self.cog.previous_overwrites.get.return_value = None + await self.cog._unsilence(MockVoiceChannel()) self.cog._mod_alerts_channel.send.assert_awaited_once() async def test_removed_notifier(self): """Channel was removed from `notifier`.""" - await self.cog._unsilence(self.channel) - self.cog.notifier.remove_channel.assert_called_once_with(self.channel) + await self.cog._unsilence(self.text_channel) + self.cog.notifier.remove_channel.assert_called_once_with(self.text_channel) async def test_deleted_cached_overwrite(self): """Channel was deleted from the overwrites cache.""" - await self.cog._unsilence(self.channel) - self.cog.previous_overwrites.delete.assert_awaited_once_with(self.channel.id) + await self.cog._unsilence(self.text_channel) + self.cog.previous_overwrites.delete.assert_awaited_once_with(self.text_channel.id) async def test_deleted_cached_time(self): """Channel was deleted from the timestamp cache.""" - await self.cog._unsilence(self.channel) - self.cog.unsilence_timestamps.delete.assert_awaited_once_with(self.channel.id) + await self.cog._unsilence(self.text_channel) + self.cog.unsilence_timestamps.delete.assert_awaited_once_with(self.text_channel.id) async def test_cancelled_task(self): """The scheduled unsilence task should be cancelled.""" - await self.cog._unsilence(self.channel) - self.cog.scheduler.cancel.assert_called_once_with(self.channel.id) + await self.cog._unsilence(self.text_channel) + self.cog.scheduler.cancel.assert_called_once_with(self.text_channel.id) - async def test_preserved_other_overwrites(self): - """Channel's other unrelated overwrites were not changed, including cache misses.""" + async def test_preserved_other_overwrites_text(self): + """Text channel's other unrelated overwrites were not changed, including cache misses.""" for overwrite_json in ('{"send_messages": true, "add_reactions": null}', None): with self.subTest(overwrite_json=overwrite_json): self.cog.previous_overwrites.get.return_value = overwrite_json - prev_overwrite_dict = dict(self.overwrite) - await self.cog._unsilence(self.channel) - new_overwrite_dict = dict(self.overwrite) + prev_overwrite_dict = dict(self.text_overwrite) + await self.cog._unsilence(self.text_channel) + new_overwrite_dict = dict(self.text_overwrite) # Remove these keys because they were modified by the unsilence. del prev_overwrite_dict['send_messages'] @@ -491,3 +849,114 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase): del new_overwrite_dict['add_reactions'] self.assertDictEqual(prev_overwrite_dict, new_overwrite_dict) + + async def test_preserved_other_overwrites_voice(self): + """Voice channel's other unrelated overwrites were not changed, including cache misses.""" + for overwrite_json in ('{"connect": true, "speak": true}', None): + with self.subTest(overwrite_json=overwrite_json): + self.cog.previous_overwrites.get.return_value = overwrite_json + + prev_overwrite_dict = dict(self.voice_overwrite) + await self.cog._unsilence(self.voice_channel) + new_overwrite_dict = dict(self.voice_overwrite) + + # Remove these keys because they were modified by the unsilence. + del prev_overwrite_dict['connect'] + del prev_overwrite_dict['speak'] + del new_overwrite_dict['connect'] + del new_overwrite_dict['speak'] + + self.assertDictEqual(prev_overwrite_dict, new_overwrite_dict) + + async def test_unsilence_role(self): + """Tests unsilence_wrapper applies permission to the correct role.""" + test_cases = ( + (MockTextChannel(), self.cog.bot.get_guild(Guild.id).default_role), + (MockVoiceChannel(), self.cog.bot.get_guild(Guild.id).get_role(Roles.voice_verified)) + ) + + for channel, role in test_cases: + with self.subTest(channel=channel, role=role): + await self.cog._unsilence_wrapper(channel, MockContext()) + channel.overwrites_for.assert_called_with(role) + + +class SendMessageTests(unittest.IsolatedAsyncioTestCase): + """Unittests for the send message helper function.""" + + def setUp(self) -> None: + self.bot = MockBot() + self.cog = silence.Silence(self.bot) + + self.text_channels = [MockTextChannel() for _ in range(2)] + self.bot.get_channel.return_value = self.text_channels[1] + + self.voice_channel = MockVoiceChannel() + + async def test_send_to_channel(self): + """Tests a basic case for the send function.""" + message = "Test basic message." + await self.cog.send_message(message, *self.text_channels, alert_target=False) + + self.text_channels[0].send.assert_awaited_once_with(message) + self.text_channels[1].send.assert_not_called() + + async def test_send_to_multiple_channels(self): + """Tests sending messages to two channels.""" + message = "Test basic message." + await self.cog.send_message(message, *self.text_channels, alert_target=True) + + self.text_channels[0].send.assert_awaited_once_with(message) + self.text_channels[1].send.assert_awaited_once_with(message) + + async def test_duration_replacement(self): + """Tests that the channel name was set correctly for one target channel.""" + message = "Current. The following should be replaced: {channel}." + await self.cog.send_message(message, *self.text_channels, alert_target=False) + + updated_message = message.format(channel=self.text_channels[0].mention) + self.text_channels[0].send.assert_awaited_once_with(updated_message) + self.text_channels[1].send.assert_not_called() + + async def test_name_replacement_multiple_channels(self): + """Tests that the channel name was set correctly for two channels.""" + message = "Current. The following should be replaced: {channel}." + await self.cog.send_message(message, *self.text_channels, alert_target=True) + + self.text_channels[0].send.assert_awaited_once_with(message.format(channel=self.text_channels[0].mention)) + self.text_channels[1].send.assert_awaited_once_with(message.format(channel="current channel")) + + async def test_silence_voice(self): + """Tests that the correct message was sent when a voice channel is muted without alerting.""" + message = "This should show up just here." + await self.cog.send_message(message, self.text_channels[0], self.voice_channel, alert_target=False) + self.text_channels[0].send.assert_awaited_once_with(message) + self.text_channels[1].send.assert_not_called() + + async def test_silence_voice_alert(self): + """Tests that the correct message was sent when a voice channel is muted with alerts.""" + with unittest.mock.patch.object(silence, "VOICE_CHANNELS") as mock_voice_channels: + mock_voice_channels.get.return_value = self.text_channels[1].id + + message = "This should show up as {channel}." + await self.cog.send_message(message, self.text_channels[0], self.voice_channel, alert_target=True) + + updated_message = message.format(channel=self.voice_channel.mention) + self.text_channels[0].send.assert_awaited_once_with(updated_message) + self.text_channels[1].send.assert_awaited_once_with(updated_message) + + mock_voice_channels.get.assert_called_once_with(self.voice_channel.id) + + async def test_silence_voice_sibling_channel(self): + """Tests silencing a voice channel from the related text channel.""" + with unittest.mock.patch.object(silence, "VOICE_CHANNELS") as mock_voice_channels: + mock_voice_channels.get.return_value = self.text_channels[1].id + + message = "This should show up as {channel}." + await self.cog.send_message(message, self.text_channels[1], self.voice_channel, alert_target=True) + + updated_message = message.format(channel=self.voice_channel.mention) + self.text_channels[1].send.assert_awaited_once_with(updated_message) + + mock_voice_channels.get.assert_called_once_with(self.voice_channel.id) + self.bot.get_channel.assert_called_once_with(self.text_channels[1].id) diff --git a/tests/bot/exts/utils/test_jams.py b/tests/bot/exts/utils/test_jams.py deleted file mode 100644 index 85d6a1173..000000000 --- a/tests/bot/exts/utils/test_jams.py +++ /dev/null @@ -1,171 +0,0 @@ -import unittest -from unittest.mock import AsyncMock, MagicMock, create_autospec - -from discord import CategoryChannel - -from bot.constants import Roles -from bot.exts.utils import jams -from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockRole, MockTextChannel - - -def get_mock_category(channel_count: int, name: str) -> CategoryChannel: - """Return a mocked code jam category.""" - category = create_autospec(CategoryChannel, spec_set=True, instance=True) - category.name = name - category.channels = [MockTextChannel() for _ in range(channel_count)] - - return category - - -class JamCreateTeamTests(unittest.IsolatedAsyncioTestCase): - """Tests for `createteam` command.""" - - def setUp(self): - self.bot = MockBot() - self.admin_role = MockRole(name="Admins", id=Roles.admins) - self.command_user = MockMember([self.admin_role]) - self.guild = MockGuild([self.admin_role]) - self.ctx = MockContext(bot=self.bot, author=self.command_user, guild=self.guild) - self.cog = jams.CodeJams(self.bot) - - async def test_too_small_amount_of_team_members_passed(self): - """Should `ctx.send` and exit early when too small amount of members.""" - for case in (1, 2): - with self.subTest(amount_of_members=case): - self.cog.create_channels = AsyncMock() - self.cog.add_roles = AsyncMock() - - self.ctx.reset_mock() - members = (MockMember() for _ in range(case)) - await self.cog.createteam(self.cog, self.ctx, "foo", members) - - self.ctx.send.assert_awaited_once() - self.cog.create_channels.assert_not_awaited() - self.cog.add_roles.assert_not_awaited() - - async def test_duplicate_members_provided(self): - """Should `ctx.send` and exit early because duplicate members provided and total there is only 1 member.""" - self.cog.create_channels = AsyncMock() - self.cog.add_roles = AsyncMock() - - member = MockMember() - await self.cog.createteam(self.cog, self.ctx, "foo", (member for _ in range(5))) - - self.ctx.send.assert_awaited_once() - self.cog.create_channels.assert_not_awaited() - self.cog.add_roles.assert_not_awaited() - - async def test_result_sending(self): - """Should call `ctx.send` when everything goes right.""" - self.cog.create_channels = AsyncMock() - self.cog.add_roles = AsyncMock() - - members = [MockMember() for _ in range(5)] - await self.cog.createteam(self.cog, self.ctx, "foo", members) - - self.cog.create_channels.assert_awaited_once() - self.cog.add_roles.assert_awaited_once() - self.ctx.send.assert_awaited_once() - - async def test_category_doesnt_exist(self): - """Should create a new code jam category.""" - subtests = ( - [], - [get_mock_category(jams.MAX_CHANNELS - 1, jams.CATEGORY_NAME)], - [get_mock_category(jams.MAX_CHANNELS - 2, "other")], - ) - - for categories in subtests: - self.guild.reset_mock() - self.guild.categories = categories - - with self.subTest(categories=categories): - actual_category = await self.cog.get_category(self.guild) - - self.guild.create_category_channel.assert_awaited_once() - category_overwrites = self.guild.create_category_channel.call_args[1]["overwrites"] - - self.assertFalse(category_overwrites[self.guild.default_role].read_messages) - self.assertTrue(category_overwrites[self.guild.me].read_messages) - self.assertEqual(self.guild.create_category_channel.return_value, actual_category) - - async def test_category_channel_exist(self): - """Should not try to create category channel.""" - expected_category = get_mock_category(jams.MAX_CHANNELS - 2, jams.CATEGORY_NAME) - self.guild.categories = [ - get_mock_category(jams.MAX_CHANNELS - 2, "other"), - expected_category, - get_mock_category(0, jams.CATEGORY_NAME), - ] - - actual_category = await self.cog.get_category(self.guild) - self.assertEqual(expected_category, actual_category) - - async def test_channel_overwrites(self): - """Should have correct permission overwrites for users and roles.""" - leader = MockMember() - members = [leader] + [MockMember() for _ in range(4)] - overwrites = self.cog.get_overwrites(members, self.guild) - - # Leader permission overwrites - self.assertTrue(overwrites[leader].manage_messages) - self.assertTrue(overwrites[leader].read_messages) - self.assertTrue(overwrites[leader].manage_webhooks) - self.assertTrue(overwrites[leader].connect) - - # Other members permission overwrites - for member in members[1:]: - self.assertTrue(overwrites[member].read_messages) - self.assertTrue(overwrites[member].connect) - - # Everyone role overwrite - self.assertFalse(overwrites[self.guild.default_role].read_messages) - self.assertFalse(overwrites[self.guild.default_role].connect) - - async def test_team_channels_creation(self): - """Should create new voice and text channel for team.""" - members = [MockMember() for _ in range(5)] - - self.cog.get_overwrites = MagicMock() - self.cog.get_category = AsyncMock() - self.ctx.guild.create_text_channel.return_value = MockTextChannel(mention="foobar-channel") - actual = await self.cog.create_channels(self.guild, "my-team", members) - - self.assertEqual("foobar-channel", actual) - self.cog.get_overwrites.assert_called_once_with(members, self.guild) - self.cog.get_category.assert_awaited_once_with(self.guild) - - self.guild.create_text_channel.assert_awaited_once_with( - "my-team", - overwrites=self.cog.get_overwrites.return_value, - category=self.cog.get_category.return_value - ) - self.guild.create_voice_channel.assert_awaited_once_with( - "My Team", - overwrites=self.cog.get_overwrites.return_value, - category=self.cog.get_category.return_value - ) - - async def test_jam_roles_adding(self): - """Should add team leader role to leader and jam role to every team member.""" - leader_role = MockRole(name="Team Leader") - jam_role = MockRole(name="Jammer") - self.guild.get_role.side_effect = [leader_role, jam_role] - - leader = MockMember() - members = [leader] + [MockMember() for _ in range(4)] - await self.cog.add_roles(self.guild, members) - - leader.add_roles.assert_any_await(leader_role) - for member in members: - member.add_roles.assert_any_await(jam_role) - - -class CodeJamSetup(unittest.TestCase): - """Test for `setup` function of `CodeJam` cog.""" - - def test_setup(self): - """Should call `bot.add_cog`.""" - bot = MockBot() - jams.setup(bot) - bot.add_cog.assert_called_once() diff --git a/tests/bot/rules/test_mentions.py b/tests/bot/rules/test_mentions.py index 6444532f2..f8805ac48 100644 --- a/tests/bot/rules/test_mentions.py +++ b/tests/bot/rules/test_mentions.py @@ -2,12 +2,14 @@ from typing import Iterable from bot.rules import mentions from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage +from tests.helpers import MockMember, MockMessage -def make_msg(author: str, total_mentions: int) -> MockMessage: +def make_msg(author: str, total_user_mentions: int, total_bot_mentions: int = 0) -> MockMessage: """Makes a message with `total_mentions` mentions.""" - return MockMessage(author=author, mentions=list(range(total_mentions))) + user_mentions = [MockMember() for _ in range(total_user_mentions)] + bot_mentions = [MockMember(bot=True) for _ in range(total_bot_mentions)] + return MockMessage(author=author, mentions=user_mentions+bot_mentions) class TestMentions(RuleTest): @@ -48,11 +50,27 @@ class TestMentions(RuleTest): [make_msg("bob", 2), make_msg("alice", 3), make_msg("bob", 2)], ("bob",), 4, - ) + ), + DisallowedCase( + [make_msg("bob", 3, 1)], + ("bob",), + 3, + ), ) await self.run_disallowed(cases) + async def test_ignore_bot_mentions(self): + """Messages with an allowed amount of mentions, also containing bot mentions.""" + cases = ( + [make_msg("bob", 0, 3)], + [make_msg("bob", 2, 1)], + [make_msg("bob", 1, 2), make_msg("bob", 1, 2)], + [make_msg("bob", 1, 5), make_msg("alice", 2, 5)] + ) + + await self.run_allowed(cases) + def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: last_message = case.recent_messages[0] return tuple( diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py index 4af84dde5..ef6c8e19e 100644 --- a/tests/bot/test_converters.py +++ b/tests/bot/test_converters.py @@ -6,14 +6,7 @@ from unittest.mock import MagicMock, patch from dateutil.relativedelta import relativedelta from discord.ext.commands import BadArgument -from bot.converters import ( - Duration, - HushDurationConverter, - ISODateTime, - PackageName, - TagContentConverter, - TagNameConverter, -) +from bot.converters import Duration, HushDurationConverter, ISODateTime, PackageName, TagNameConverter class ConverterTests(unittest.IsolatedAsyncioTestCase): @@ -26,43 +19,6 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase): cls.fixed_utc_now = datetime.datetime.fromisoformat('2019-01-01T00:00:00') - async def test_tag_content_converter_for_valid(self): - """TagContentConverter should return correct values for valid input.""" - test_values = ( - ('hello', 'hello'), - (' h ello ', 'h ello'), - ) - - for content, expected_conversion in test_values: - with self.subTest(content=content, expected_conversion=expected_conversion): - conversion = await TagContentConverter.convert(self.context, content) - self.assertEqual(conversion, expected_conversion) - - async def test_tag_content_converter_for_invalid(self): - """TagContentConverter should raise the proper exception for invalid input.""" - test_values = ( - ('', "Tag contents should not be empty, or filled with whitespace."), - (' ', "Tag contents should not be empty, or filled with whitespace."), - ) - - for value, exception_message in test_values: - with self.subTest(tag_content=value, exception_message=exception_message): - with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): - await TagContentConverter.convert(self.context, value) - - async def test_tag_name_converter_for_valid(self): - """TagNameConverter should return the correct values for valid tag names.""" - test_values = ( - ('tracebacks', 'tracebacks'), - ('Tracebacks', 'tracebacks'), - (' Tracebacks ', 'tracebacks'), - ) - - for name, expected_conversion in test_values: - with self.subTest(name=name, expected_conversion=expected_conversion): - conversion = await TagNameConverter.convert(self.context, name) - self.assertEqual(conversion, expected_conversion) - async def test_tag_name_converter_for_invalid(self): """TagNameConverter should raise the correct exception for invalid tag names.""" test_values = ( @@ -291,7 +247,7 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase): ("10", 10), ("5m", 5), ("5M", 5), - ("forever", None), + ("forever", -1), ) converter = HushDurationConverter() for minutes_string, expected_minutes in test_values: diff --git a/tests/bot/utils/test_checks.py b/tests/bot/utils/test_checks.py index 883465e0b..4ae11d5d3 100644 --- a/tests/bot/utils/test_checks.py +++ b/tests/bot/utils/test_checks.py @@ -32,6 +32,7 @@ class ChecksTests(unittest.IsolatedAsyncioTestCase): async def test_has_no_roles_check_without_guild(self): """`has_no_roles_check` should return `False` when `Context.guild` is None.""" self.ctx.channel = MagicMock(DMChannel) + self.ctx.guild = None self.assertFalse(await checks.has_no_roles_check(self.ctx)) async def test_has_no_roles_check_returns_false_with_unwanted_role(self): diff --git a/tests/bot/utils/test_message_cache.py b/tests/bot/utils/test_message_cache.py new file mode 100644 index 000000000..04bfd28d1 --- /dev/null +++ b/tests/bot/utils/test_message_cache.py @@ -0,0 +1,214 @@ +import unittest + +from bot.utils.message_cache import MessageCache +from tests.helpers import MockMessage + + +# noinspection SpellCheckingInspection +class TestMessageCache(unittest.TestCase): + """Tests for the MessageCache class in the `bot.utils.caching` module.""" + + def test_first_append_sets_the_first_value(self): + """Test if the first append adds the message to the first cell.""" + cache = MessageCache(maxlen=10) + message = MockMessage() + + cache.append(message) + + self.assertEqual(cache[0], message) + + def test_append_adds_in_the_right_order(self): + """Test if two appends are added in the same order if newest_first is False, or in reverse order otherwise.""" + messages = [MockMessage(), MockMessage()] + + cache = MessageCache(maxlen=10, newest_first=False) + for msg in messages: + cache.append(msg) + self.assertListEqual(messages, list(cache)) + + cache = MessageCache(maxlen=10, newest_first=True) + for msg in messages: + cache.append(msg) + self.assertListEqual(messages[::-1], list(cache)) + + def test_appending_over_maxlen_removes_oldest(self): + """Test if three appends to a 2-cell cache leave the two newest messages.""" + cache = MessageCache(maxlen=2) + messages = [MockMessage() for _ in range(3)] + + for msg in messages: + cache.append(msg) + + self.assertListEqual(messages[1:], list(cache)) + + def test_appending_over_maxlen_with_newest_first_removes_oldest(self): + """Test if three appends to a 2-cell cache leave the two newest messages if newest_first is True.""" + cache = MessageCache(maxlen=2, newest_first=True) + messages = [MockMessage() for _ in range(3)] + + for msg in messages: + cache.append(msg) + + self.assertListEqual(messages[:0:-1], list(cache)) + + def test_pop_removes_from_the_end(self): + """Test if a pop removes the right-most message.""" + cache = MessageCache(maxlen=3) + messages = [MockMessage() for _ in range(3)] + + for msg in messages: + cache.append(msg) + msg = cache.pop() + + self.assertEqual(msg, messages[-1]) + self.assertListEqual(messages[:-1], list(cache)) + + def test_popleft_removes_from_the_beginning(self): + """Test if a popleft removes the left-most message.""" + cache = MessageCache(maxlen=3) + messages = [MockMessage() for _ in range(3)] + + for msg in messages: + cache.append(msg) + msg = cache.popleft() + + self.assertEqual(msg, messages[0]) + self.assertListEqual(messages[1:], list(cache)) + + def test_clear(self): + """Test if a clear makes the cache empty.""" + cache = MessageCache(maxlen=5) + messages = [MockMessage() for _ in range(3)] + + for msg in messages: + cache.append(msg) + cache.clear() + + self.assertListEqual(list(cache), []) + self.assertEqual(len(cache), 0) + + def test_get_message_returns_the_message(self): + """Test if get_message returns the cached message.""" + cache = MessageCache(maxlen=5) + message = MockMessage(id=1234) + + cache.append(message) + + self.assertEqual(cache.get_message(1234), message) + + def test_get_message_returns_none(self): + """Test if get_message returns None for an ID of a non-cached message.""" + cache = MessageCache(maxlen=5) + message = MockMessage(id=1234) + + cache.append(message) + + self.assertIsNone(cache.get_message(4321)) + + def test_update_replaces_old_element(self): + """Test if an update replaced the old message with the same ID.""" + cache = MessageCache(maxlen=5) + message = MockMessage(id=1234) + + cache.append(message) + message = MockMessage(id=1234) + cache.update(message) + + self.assertIs(cache.get_message(1234), message) + self.assertEqual(len(cache), 1) + + def test_contains_returns_true_for_cached_message(self): + """Test if contains returns True for an ID of a cached message.""" + cache = MessageCache(maxlen=5) + message = MockMessage(id=1234) + + cache.append(message) + + self.assertIn(1234, cache) + + def test_contains_returns_false_for_non_cached_message(self): + """Test if contains returns False for an ID of a non-cached message.""" + cache = MessageCache(maxlen=5) + message = MockMessage(id=1234) + + cache.append(message) + + self.assertNotIn(4321, cache) + + def test_indexing(self): + """Test if the cache returns the correct messages by index.""" + cache = MessageCache(maxlen=5) + messages = [MockMessage() for _ in range(5)] + + for msg in messages: + cache.append(msg) + + for current_loop in range(-5, 5): + with self.subTest(current_loop=current_loop): + self.assertEqual(cache[current_loop], messages[current_loop]) + + def test_bad_index_raises_index_error(self): + """Test if the cache raises IndexError for invalid indices.""" + cache = MessageCache(maxlen=5) + messages = [MockMessage() for _ in range(3)] + test_cases = (-10, -4, 3, 4, 5) + + for msg in messages: + cache.append(msg) + + for current_loop in test_cases: + with self.subTest(current_loop=current_loop): + with self.assertRaises(IndexError): + cache[current_loop] + + def test_slicing_with_unfilled_cache(self): + """Test if slicing returns the correct messages if the cache is not yet fully filled.""" + sizes = (5, 10, 55, 101) + + slices = ( + slice(None), slice(2, None), slice(None, 2), slice(None, None, 2), slice(None, None, 3), slice(-1, 2), + slice(-1, 3000), slice(-3, -1), slice(-10, 3), slice(-10, 4, 2), slice(None, None, -1), slice(None, 3, -2), + slice(None, None, -3), slice(-1, -10, -2), slice(-3, -7, -1) + ) + + for size in sizes: + cache = MessageCache(maxlen=size) + messages = [MockMessage() for _ in range(size // 3 * 2)] + + for msg in messages: + cache.append(msg) + + for slice_ in slices: + with self.subTest(current_loop=(size, slice_)): + self.assertListEqual(cache[slice_], messages[slice_]) + + def test_slicing_with_overfilled_cache(self): + """Test if slicing returns the correct messages if the cache was appended with more messages it can contain.""" + sizes = (5, 10, 55, 101) + + slices = ( + slice(None), slice(2, None), slice(None, 2), slice(None, None, 2), slice(None, None, 3), slice(-1, 2), + slice(-1, 3000), slice(-3, -1), slice(-10, 3), slice(-10, 4, 2), slice(None, None, -1), slice(None, 3, -2), + slice(None, None, -3), slice(-1, -10, -2), slice(-3, -7, -1) + ) + + for size in sizes: + cache = MessageCache(maxlen=size) + messages = [MockMessage() for _ in range(size * 3 // 2)] + + for msg in messages: + cache.append(msg) + messages = messages[size // 2:] + + for slice_ in slices: + with self.subTest(current_loop=(size, slice_)): + self.assertListEqual(cache[slice_], messages[slice_]) + + def test_length(self): + """Test if len returns the correct number of items in the cache.""" + cache = MessageCache(maxlen=5) + + for current_loop in range(10): + with self.subTest(current_loop=current_loop): + self.assertEqual(len(cache), min(current_loop, 5)) + cache.append(MockMessage()) diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py index 115ddfb0d..8edffd1c9 100644 --- a/tests/bot/utils/test_time.py +++ b/tests/bot/utils/test_time.py @@ -52,7 +52,7 @@ class TimeTests(unittest.TestCase): def test_format_infraction(self): """Testing format_infraction.""" - self.assertEqual(time.format_infraction('2019-12-12T00:01:00Z'), '2019-12-12 00:01') + self.assertEqual(time.format_infraction('2019-12-12T00:01:00Z'), '<t:1576108860:f>') def test_format_infraction_with_duration_none_expiry(self): """format_infraction_with_duration should work for None expiry.""" @@ -72,10 +72,10 @@ class TimeTests(unittest.TestCase): def test_format_infraction_with_duration_custom_units(self): """format_infraction_with_duration should work for custom max_units.""" test_cases = ( - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 5, 5), 6, - '2019-12-12 00:01 (11 hours, 55 minutes and 55 seconds)'), - ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 20, - '2019-11-23 20:09 (6 months, 28 days, 23 hours and 54 minutes)') + ('3000-12-12T00:01:00Z', datetime(3000, 12, 11, 12, 5, 5), 6, + '<t:32533488060:f> (11 hours, 55 minutes and 55 seconds)'), + ('3000-11-23T20:09:00Z', datetime(3000, 4, 25, 20, 15), 20, + '<t:32531918940:f> (6 months, 28 days, 23 hours and 54 minutes)') ) for expiry, date_from, max_units, expected in test_cases: @@ -85,16 +85,16 @@ class TimeTests(unittest.TestCase): def test_format_infraction_with_duration_normal_usage(self): """format_infraction_with_duration should work for normal usage, across various durations.""" test_cases = ( - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 2, '2019-12-12 00:01 (12 hours and 55 seconds)'), - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 1, '2019-12-12 00:01 (12 hours)'), - ('2019-12-12T00:00:00Z', datetime(2019, 12, 11, 23, 59), 2, '2019-12-12 00:00 (1 minute)'), - ('2019-11-23T20:09:00Z', datetime(2019, 11, 15, 20, 15), 2, '2019-11-23 20:09 (7 days and 23 hours)'), - ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 2, '2019-11-23 20:09 (6 months and 28 days)'), - ('2019-11-23T20:58:00Z', datetime(2019, 11, 23, 20, 53), 2, '2019-11-23 20:58 (5 minutes)'), - ('2019-11-24T00:00:00Z', datetime(2019, 11, 23, 23, 59, 0), 2, '2019-11-24 00:00 (1 minute)'), - ('2019-11-23T23:59:00Z', datetime(2017, 7, 21, 23, 0), 2, '2019-11-23 23:59 (2 years and 4 months)'), + ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 2, '<t:1576108860:f> (12 hours and 55 seconds)'), + ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 1, '<t:1576108860:f> (12 hours)'), + ('2019-12-12T00:00:00Z', datetime(2019, 12, 11, 23, 59), 2, '<t:1576108800:f> (1 minute)'), + ('2019-11-23T20:09:00Z', datetime(2019, 11, 15, 20, 15), 2, '<t:1574539740:f> (7 days and 23 hours)'), + ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 2, '<t:1574539740:f> (6 months and 28 days)'), + ('2019-11-23T20:58:00Z', datetime(2019, 11, 23, 20, 53), 2, '<t:1574542680:f> (5 minutes)'), + ('2019-11-24T00:00:00Z', datetime(2019, 11, 23, 23, 59, 0), 2, '<t:1574553600:f> (1 minute)'), + ('2019-11-23T23:59:00Z', datetime(2017, 7, 21, 23, 0), 2, '<t:1574553540:f> (2 years and 4 months)'), ('2019-11-23T23:59:00Z', datetime(2019, 11, 23, 23, 49, 5), 2, - '2019-11-23 23:59 (9 minutes and 55 seconds)'), + '<t:1574553540:f> (9 minutes and 55 seconds)'), (None, datetime(2019, 11, 23, 23, 49, 5), 2, None), ) @@ -104,45 +104,30 @@ class TimeTests(unittest.TestCase): def test_until_expiration_with_duration_none_expiry(self): """until_expiration should work for None expiry.""" - test_cases = ( - (None, None, None, None), - - # To make sure that now and max_units are not touched - (None, 'Why hello there!', None, None), - (None, None, float('inf'), None), - (None, 'Why hello there!', float('inf'), None), - ) - - for expiry, now, max_units, expected in test_cases: - with self.subTest(expiry=expiry, now=now, max_units=max_units, expected=expected): - self.assertEqual(time.until_expiration(expiry, now, max_units), expected) + self.assertEqual(time.until_expiration(None), None) def test_until_expiration_with_duration_custom_units(self): """until_expiration should work for custom max_units.""" test_cases = ( - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 5, 5), 6, '11 hours, 55 minutes and 55 seconds'), - ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 20, '6 months, 28 days, 23 hours and 54 minutes') + ('3000-12-12T00:01:00Z', '<t:32533488060:R>'), + ('3000-11-23T20:09:00Z', '<t:32531918940:R>') ) - for expiry, now, max_units, expected in test_cases: - with self.subTest(expiry=expiry, now=now, max_units=max_units, expected=expected): - self.assertEqual(time.until_expiration(expiry, now, max_units), expected) + for expiry, expected in test_cases: + with self.subTest(expiry=expiry, expected=expected): + self.assertEqual(time.until_expiration(expiry,), expected) def test_until_expiration_normal_usage(self): """until_expiration should work for normal usage, across various durations.""" test_cases = ( - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 2, '12 hours and 55 seconds'), - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 1, '12 hours'), - ('2019-12-12T00:00:00Z', datetime(2019, 12, 11, 23, 59), 2, '1 minute'), - ('2019-11-23T20:09:00Z', datetime(2019, 11, 15, 20, 15), 2, '7 days and 23 hours'), - ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 2, '6 months and 28 days'), - ('2019-11-23T20:58:00Z', datetime(2019, 11, 23, 20, 53), 2, '5 minutes'), - ('2019-11-24T00:00:00Z', datetime(2019, 11, 23, 23, 59, 0), 2, '1 minute'), - ('2019-11-23T23:59:00Z', datetime(2017, 7, 21, 23, 0), 2, '2 years and 4 months'), - ('2019-11-23T23:59:00Z', datetime(2019, 11, 23, 23, 49, 5), 2, '9 minutes and 55 seconds'), - (None, datetime(2019, 11, 23, 23, 49, 5), 2, None), + ('3000-12-12T00:01:00Z', '<t:32533488060:R>'), + ('3000-12-12T00:01:00Z', '<t:32533488060:R>'), + ('3000-12-12T00:00:00Z', '<t:32533488000:R>'), + ('3000-11-23T20:09:00Z', '<t:32531918940:R>'), + ('3000-11-23T20:09:00Z', '<t:32531918940:R>'), + (None, None), ) - for expiry, now, max_units, expected in test_cases: - with self.subTest(expiry=expiry, now=now, max_units=max_units, expected=expected): - self.assertEqual(time.until_expiration(expiry, now, max_units), expected) + for expiry, expected in test_cases: + with self.subTest(expiry=expiry, expected=expected): + self.assertEqual(time.until_expiration(expiry), expected) diff --git a/tests/helpers.py b/tests/helpers.py index e3dc5fe5b..9d4988d23 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -16,7 +16,6 @@ from bot.async_stats import AsyncStatsClient from bot.bot import Bot from tests._autospec import autospec # noqa: F401 other modules import it via this module - for logger in logging.Logger.manager.loggerDict.values(): # Set all loggers to CRITICAL by default to prevent screen clutter during testing @@ -40,7 +39,7 @@ class HashableMixin(discord.mixins.EqualityComparable): class ColourMixin: - """A mixin for Mocks that provides the aliasing of color->colour like discord.py does.""" + """A mixin for Mocks that provides the aliasing of (accent_)color->(accent_)colour like discord.py does.""" @property def color(self) -> discord.Colour: @@ -50,6 +49,14 @@ class ColourMixin: def color(self, color: discord.Colour) -> None: self.colour = color + @property + def accent_color(self) -> discord.Colour: + return self.accent_colour + + @accent_color.setter + def accent_color(self, color: discord.Colour) -> None: + self.accent_colour = color + class CustomMockMixin: """ @@ -236,13 +243,20 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin self.roles = [MockRole(name="@everyone", position=1, id=0)] if roles: self.roles.extend(roles) + self.top_role = max(self.roles) if 'mention' not in kwargs: self.mention = f"@{self.name}" # Create a User instance to get a realistic Mock of `discord.User` -user_instance = discord.User(data=unittest.mock.MagicMock(), state=unittest.mock.MagicMock()) +_user_data_mock = collections.defaultdict(unittest.mock.MagicMock, { + "accent_color": 0 +}) +user_instance = discord.User( + data=unittest.mock.MagicMock(get=unittest.mock.Mock(side_effect=_user_data_mock.get)), + state=unittest.mock.MagicMock() +) class MockUser(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin): @@ -279,7 +293,10 @@ def _get_mock_loop() -> unittest.mock.Mock: # Since calling `create_task` on our MockBot does not actually schedule the coroutine object # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object # to prevent "has not been awaited"-warnings. - loop.create_task.side_effect = lambda coroutine: coroutine.close() + def mock_create_task(coroutine, **kwargs): + coroutine.close() + return unittest.mock.Mock() + loop.create_task.side_effect = mock_create_task return loop @@ -320,7 +337,10 @@ channel_data = { } state = unittest.mock.MagicMock() guild = unittest.mock.MagicMock() -channel_instance = discord.TextChannel(state=state, guild=guild, data=channel_data) +text_channel_instance = discord.TextChannel(state=state, guild=guild, data=channel_data) + +channel_data["type"] = "VoiceChannel" +voice_channel_instance = discord.VoiceChannel(state=state, guild=guild, data=channel_data) class MockTextChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): @@ -330,7 +350,24 @@ class MockTextChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): Instances of this class will follow the specifications of `discord.TextChannel` instances. For more information, see the `MockGuild` docstring. """ - spec_set = channel_instance + spec_set = text_channel_instance + + def __init__(self, **kwargs) -> None: + default_kwargs = {'id': next(self.discord_id), 'name': 'channel', 'guild': MockGuild()} + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) + + if 'mention' not in kwargs: + self.mention = f"#{self.name}" + + +class MockVoiceChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): + """ + A MagicMock subclass to mock VoiceChannel objects. + + Instances of this class will follow the specifications of `discord.VoiceChannel` instances. For + more information, see the `MockGuild` docstring. + """ + spec_set = voice_channel_instance def __init__(self, **kwargs) -> None: default_kwargs = {'id': next(self.discord_id), 'name': 'channel', 'guild': MockGuild()} @@ -361,6 +398,27 @@ class MockDMChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): super().__init__(**collections.ChainMap(kwargs, default_kwargs)) +# Create CategoryChannel instance to get a realistic MagicMock of `discord.CategoryChannel` +category_channel_data = { + 'id': 1, + 'type': discord.ChannelType.category, + 'name': 'category', + 'position': 1, +} + +state = unittest.mock.MagicMock() +guild = unittest.mock.MagicMock() +category_channel_instance = discord.CategoryChannel( + state=state, guild=guild, data=category_channel_data +) + + +class MockCategoryChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): + def __init__(self, **kwargs) -> None: + default_kwargs = {'id': next(self.discord_id)} + super().__init__(**collections.ChainMap(default_kwargs, kwargs)) + + # Create a Message instance to get a realistic MagicMock of `discord.Message` message_data = { 'id': 1, @@ -384,7 +442,12 @@ message_instance = discord.Message(state=state, channel=channel, data=message_da # Create a Context instance to get a realistic MagicMock of `discord.ext.commands.Context` -context_instance = Context(message=unittest.mock.MagicMock(), prefix=unittest.mock.MagicMock()) +context_instance = Context( + message=unittest.mock.MagicMock(), + prefix="$", + bot=MockBot(), + view=None +) context_instance.invoked_from_error_handler = None @@ -399,10 +462,12 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock): def __init__(self, **kwargs) -> None: super().__init__(**kwargs) + self.me = kwargs.get('me', MockMember()) self.bot = kwargs.get('bot', MockBot()) self.guild = kwargs.get('guild', MockGuild()) self.author = kwargs.get('author', MockMember()) self.channel = kwargs.get('channel', MockTextChannel()) + self.message = kwargs.get('message', MockMessage()) self.invoked_from_error_handler = kwargs.get('invoked_from_error_handler', False) @@ -491,7 +556,7 @@ class MockReaction(CustomMockMixin, unittest.mock.MagicMock): self.__str__.return_value = str(self.emoji) -webhook_instance = discord.Webhook(data=unittest.mock.MagicMock(), adapter=unittest.mock.MagicMock()) +webhook_instance = discord.Webhook(data=unittest.mock.MagicMock(), session=unittest.mock.MagicMock()) class MockAsyncWebhook(CustomMockMixin, unittest.mock.MagicMock): diff --git a/tests/test_base.py b/tests/test_base.py index a7db4bf3e..365805a71 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -1,8 +1,7 @@ import logging -import unittest import unittest.mock - +from bot.log import get_logger from tests.base import LoggingTestsMixin, _CaptureLogHandler @@ -15,7 +14,7 @@ class LoggingTestCaseTests(unittest.TestCase): @classmethod def setUpClass(cls): - cls.log = logging.getLogger(__name__) + cls.log = get_logger(__name__) def test_assert_not_logs_does_not_raise_with_no_logs(self): """Test if LoggingTestCase.assertNotLogs does not raise when no logs were emitted.""" @@ -56,15 +55,15 @@ class LoggingTestCaseTests(unittest.TestCase): def test_logging_test_case_works_with_logger_instance(self): """Test if the LoggingTestCase captures logging for provided logger.""" - log = logging.getLogger("new_logger") + log = get_logger("new_logger") with self.assertRaises(AssertionError): with LoggingTestCase.assertNotLogs(self, logger=log): log.info("Hello, this should raise an AssertionError") def test_logging_test_case_respects_alternative_logger(self): """Test if LoggingTestCase only checks the provided logger.""" - log_one = logging.getLogger("log one") - log_two = logging.getLogger("log two") + log_one = get_logger("log one") + log_two = get_logger("log two") with LoggingTestCase.assertNotLogs(self, logger=log_one): log_two.info("Hello, this should not raise an AssertionError") @@ -5,7 +5,7 @@ import-order-style=pycharm application_import_names=bot,tests exclude=.cache,.venv,.git,constants.py ignore= - B311,W503,E226,S311,T000 + B311,W503,E226,S311,T000,E731 # Missing Docstrings D100,D104,D105,D107, # Docstring Whitespace |