aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar wookie184 <[email protected]>2022-08-19 15:18:25 +0100
committerGravatar wookie184 <[email protected]>2022-08-19 15:18:25 +0100
commitb923b52ed199d58abddb2cf2511025017a5758b6 (patch)
tree87c10b3cf54fcabbab07c73243635cf690b0700a
parentMerge branch 'python-discord:main' into patreon (diff)
parentMerge pull request #2260 from Dorukyum/channel.guild-nullable (diff)
Merge branch 'main' into patreon
-rw-r--r--.github/CODEOWNERS2
-rw-r--r--.github/workflows/lint-test.yml2
-rw-r--r--Dockerfile2
-rw-r--r--bot/__init__.py1
-rw-r--r--bot/__main__.py73
-rw-r--r--bot/api.py102
-rw-r--r--bot/async_stats.py41
-rw-r--r--bot/bot.py302
-rw-r--r--bot/constants.py2
-rw-r--r--bot/converters.py61
-rw-r--r--bot/decorators.py46
-rw-r--r--bot/errors.py2
-rw-r--r--bot/exts/backend/branding/__init__.py4
-rw-r--r--bot/exts/backend/branding/_cog.py155
-rw-r--r--bot/exts/backend/branding/_repository.py11
-rw-r--r--bot/exts/backend/config_verifier.py10
-rw-r--r--bot/exts/backend/error_handler.py44
-rw-r--r--bot/exts/backend/logging.py10
-rw-r--r--bot/exts/backend/sync/__init__.py4
-rw-r--r--bot/exts/backend/sync/_cog.py30
-rw-r--r--bot/exts/backend/sync/_syncers.py25
-rw-r--r--bot/exts/events/code_jams/__init__.py4
-rw-r--r--bot/exts/events/code_jams/_channels.py42
-rw-r--r--bot/exts/events/code_jams/_cog.py37
-rw-r--r--bot/exts/filters/antimalware.py8
-rw-r--r--bot/exts/filters/antispam.py26
-rw-r--r--bot/exts/filters/filter_lists.py36
-rw-r--r--bot/exts/filters/filtering.py79
-rw-r--r--bot/exts/filters/security.py6
-rw-r--r--bot/exts/filters/token_remover.py15
-rw-r--r--bot/exts/filters/webhook_remover.py8
-rw-r--r--bot/exts/fun/duck_pond.py46
-rw-r--r--bot/exts/fun/off_topic_names.py249
-rw-r--r--bot/exts/help_channels/__init__.py4
-rw-r--r--bot/exts/help_channels/_caches.py16
-rw-r--r--bot/exts/help_channels/_channel.py18
-rw-r--r--bot/exts/help_channels/_cog.py108
-rw-r--r--bot/exts/help_channels/_message.py84
-rw-r--r--bot/exts/help_channels/_name.py6
-rw-r--r--bot/exts/info/code_snippets.py12
-rw-r--r--bot/exts/info/codeblock/__init__.py4
-rw-r--r--bot/exts/info/codeblock/_cog.py25
-rw-r--r--bot/exts/info/doc/__init__.py4
-rw-r--r--bot/exts/info/doc/_batch_parser.py6
-rw-r--r--bot/exts/info/doc/_cog.py45
-rw-r--r--bot/exts/info/doc/_html.py18
-rw-r--r--bot/exts/info/doc/_parsing.py8
-rw-r--r--bot/exts/info/doc/_redis_cache.py105
-rw-r--r--bot/exts/info/help.py19
-rw-r--r--bot/exts/info/information.py14
-rw-r--r--bot/exts/info/pep.py20
-rw-r--r--bot/exts/info/pypi.py10
-rw-r--r--bot/exts/info/python_news.py27
-rw-r--r--bot/exts/info/resources.py70
-rw-r--r--bot/exts/info/source.py8
-rw-r--r--bot/exts/info/stats.py12
-rw-r--r--bot/exts/info/subscribe.py48
-rw-r--r--bot/exts/info/tags.py34
-rw-r--r--bot/exts/moderation/clean.py116
-rw-r--r--bot/exts/moderation/defcon.py40
-rw-r--r--bot/exts/moderation/dm_relay.py26
-rw-r--r--bot/exts/moderation/incidents.py131
-rw-r--r--bot/exts/moderation/infraction/_scheduler.py39
-rw-r--r--bot/exts/moderation/infraction/_utils.py16
-rw-r--r--bot/exts/moderation/infraction/infractions.py46
-rw-r--r--bot/exts/moderation/infraction/management.py54
-rw-r--r--bot/exts/moderation/infraction/superstarify.py12
-rw-r--r--bot/exts/moderation/metabase.py57
-rw-r--r--bot/exts/moderation/modlog.py136
-rw-r--r--bot/exts/moderation/modpings.py65
-rw-r--r--bot/exts/moderation/silence.py35
-rw-r--r--bot/exts/moderation/slowmode.py27
-rw-r--r--bot/exts/moderation/stream.py43
-rw-r--r--bot/exts/moderation/verification.py20
-rw-r--r--bot/exts/moderation/voice_gate.py61
-rw-r--r--bot/exts/moderation/watchchannels/_watchchannel.py23
-rw-r--r--bot/exts/moderation/watchchannels/bigbrother.py10
-rw-r--r--bot/exts/recruitment/talentpool/__init__.py4
-rw-r--r--bot/exts/recruitment/talentpool/_cog.py76
-rw-r--r--bot/exts/recruitment/talentpool/_review.py25
-rw-r--r--bot/exts/utils/bot.py15
-rw-r--r--bot/exts/utils/extensions.py68
-rw-r--r--bot/exts/utils/internal.py33
-rw-r--r--bot/exts/utils/ping.py8
-rw-r--r--bot/exts/utils/reminders.py47
-rw-r--r--bot/exts/utils/snekbox.py461
-rw-r--r--bot/exts/utils/thread_bumper.py89
-rw-r--r--bot/exts/utils/utils.py14
-rw-r--r--bot/log.py2
-rw-r--r--bot/pagination.py18
-rw-r--r--bot/resources/media/print-return.gifbin0 -> 119946 bytes
-rw-r--r--bot/resources/tags/dashmpip.md12
-rw-r--r--bot/resources/tags/dictcomps.md2
-rw-r--r--bot/resources/tags/docstring.md2
-rw-r--r--bot/resources/tags/enumerate.md2
-rw-r--r--bot/resources/tags/f-strings.md3
-rw-r--r--bot/resources/tags/indent.md6
-rw-r--r--bot/resources/tags/intents.md2
-rw-r--r--bot/resources/tags/or-gotcha.md1
-rw-r--r--bot/resources/tags/paste.md2
-rw-r--r--bot/resources/tags/pathlib.md2
-rw-r--r--bot/resources/tags/pep8.md2
-rw-r--r--bot/resources/tags/positional-keyword.md6
-rw-r--r--bot/resources/tags/print-return.md9
-rw-r--r--bot/resources/tags/quotes.md4
-rw-r--r--bot/resources/tags/regex.md4
-rw-r--r--bot/resources/tags/resources.md6
-rw-r--r--bot/resources/tags/sql-fstring.md2
-rw-r--r--bot/resources/tags/star-imports.md2
-rw-r--r--bot/resources/tags/type-hint.md19
-rw-r--r--bot/resources/tags/with.md2
-rw-r--r--bot/rules/attachments.py2
-rw-r--r--bot/rules/burst.py2
-rw-r--r--bot/rules/burst_shared.py2
-rw-r--r--bot/rules/chars.py2
-rw-r--r--bot/rules/discord_emojis.py2
-rw-r--r--bot/rules/duplicates.py2
-rw-r--r--bot/rules/links.py2
-rw-r--r--bot/rules/mentions.py56
-rw-r--r--bot/rules/newlines.py2
-rw-r--r--bot/rules/role_mentions.py2
-rw-r--r--bot/utils/__init__.py12
-rw-r--r--bot/utils/channel.py16
-rw-r--r--bot/utils/checks.py4
-rw-r--r--bot/utils/extensions.py34
-rw-r--r--bot/utils/function.py6
-rw-r--r--bot/utils/helpers.py2
-rw-r--r--bot/utils/members.py18
-rw-r--r--bot/utils/message_cache.py2
-rw-r--r--bot/utils/messages.py50
-rw-r--r--bot/utils/scheduling.py194
-rw-r--r--bot/utils/services.py34
-rw-r--r--bot/utils/webhooks.py10
-rw-r--r--config-default.yml2
-rw-r--r--docker-compose.yml14
-rw-r--r--poetry.lock1543
-rw-r--r--pyproject.toml89
-rw-r--r--tests/README.md12
-rw-r--r--tests/base.py32
-rw-r--r--tests/bot/exts/backend/sync/test_base.py3
-rw-r--r--tests/bot/exts/backend/sync/test_cog.py36
-rw-r--r--tests/bot/exts/backend/sync/test_roles.py6
-rw-r--r--tests/bot/exts/backend/sync/test_users.py2
-rw-r--r--tests/bot/exts/backend/test_error_handler.py116
-rw-r--r--tests/bot/exts/events/test_code_jams.py12
-rw-r--r--tests/bot/exts/filters/test_antimalware.py10
-rw-r--r--tests/bot/exts/filters/test_filtering.py2
-rw-r--r--tests/bot/exts/filters/test_security.py13
-rw-r--r--tests/bot/exts/filters/test_token_remover.py10
-rw-r--r--tests/bot/exts/info/test_help.py1
-rw-r--r--tests/bot/exts/info/test_information.py22
-rw-r--r--tests/bot/exts/moderation/infraction/test_infractions.py2
-rw-r--r--tests/bot/exts/moderation/infraction/test_utils.py4
-rw-r--r--tests/bot/exts/moderation/test_incidents.py91
-rw-r--r--tests/bot/exts/moderation/test_modlog.py4
-rw-r--r--tests/bot/exts/moderation/test_silence.py139
-rw-r--r--tests/bot/exts/test_cogs.py4
-rw-r--r--tests/bot/exts/utils/test_snekbox.py221
-rw-r--r--tests/bot/rules/test_mentions.py58
-rw-r--r--tests/bot/test_api.py66
-rw-r--r--tests/bot/test_converters.py2
-rw-r--r--tests/bot/utils/test_checks.py2
-rw-r--r--tests/bot/utils/test_services.py27
-rw-r--r--tests/helpers.py162
-rw-r--r--tests/test_helpers.py30
-rw-r--r--tox.ini4
166 files changed, 3282 insertions, 4032 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index ea69f7677..0bc2bb793 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -18,10 +18,8 @@ bot/exts/recruitment/** @wookie184
bot/rules/** @mbaruh
# Utils
-bot/utils/extensions.py @MarkKoz
bot/utils/function.py @MarkKoz
bot/utils/lock.py @MarkKoz
-bot/utils/scheduling.py @MarkKoz
# Tests
tests/_autospec.py @MarkKoz
diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml
index 57cc544d9..2b3dd5b4f 100644
--- a/.github/workflows/lint-test.yml
+++ b/.github/workflows/lint-test.yml
@@ -61,7 +61,7 @@ jobs:
id: python
uses: actions/setup-python@v2
with:
- python-version: '3.9'
+ python-version: '3.10'
# This step caches our Python dependencies. To make sure we
# only restore a cache when the dependencies, the python version,
diff --git a/Dockerfile b/Dockerfile
index 30bf8a361..5bb400658 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM --platform=linux/amd64 python:3.9-slim
+FROM --platform=linux/amd64 python:3.10-slim
# Set pip to have no saved cache
ENV PIP_NO_CACHE_DIR=false \
diff --git a/bot/__init__.py b/bot/__init__.py
index f087792e9..c652897be 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -15,7 +15,6 @@ log.setup()
if os.name == "nt":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
-# Apply all monkey patches from bot core.
apply_monkey_patches()
instance: "Bot" = None # Global Bot instance.
diff --git a/bot/__main__.py b/bot/__main__.py
index 0d3fce180..02af2e9ef 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -1,16 +1,81 @@
+import asyncio
+
import aiohttp
+import discord
+from async_rediscache import RedisSession
+from botcore import StartupError
+from botcore.site_api import APIClient
+from discord.ext import commands
+from redis import RedisError
import bot
from bot import constants
-from bot.bot import Bot, StartupError
+from bot.bot import Bot
from bot.log import get_logger, setup_sentry
setup_sentry()
+LOCALHOST = "127.0.0.1"
+
+
+async def _create_redis_session() -> RedisSession:
+ """Create and connect to a redis session."""
+ redis_session = RedisSession(
+ host=constants.Redis.host,
+ port=constants.Redis.port,
+ password=constants.Redis.password,
+ max_connections=20,
+ use_fakeredis=constants.Redis.use_fakeredis,
+ global_namespace="bot",
+ decode_responses=True,
+ )
+ try:
+ return await redis_session.connect()
+ except RedisError as e:
+ raise StartupError(e)
+
+
+async def main() -> None:
+ """Entry async method for starting the bot."""
+ statsd_url = constants.Stats.statsd_host
+ if constants.DEBUG_MODE:
+ # Since statsd is UDP, there are no errors for sending to a down port.
+ # For this reason, setting the statsd host to 127.0.0.1 for development
+ # will effectively disable stats.
+ statsd_url = LOCALHOST
+
+ allowed_roles = list({discord.Object(id_) for id_ in constants.MODERATION_ROLES})
+ intents = discord.Intents.all()
+ intents.presences = False
+ intents.dm_typing = False
+ intents.dm_reactions = False
+ intents.invites = False
+ intents.webhooks = False
+ intents.integrations = False
+
+ async with aiohttp.ClientSession() as session:
+ bot.instance = Bot(
+ guild_id=constants.Guild.id,
+ http_session=session,
+ redis_session=await _create_redis_session(),
+ statsd_url=statsd_url,
+ command_prefix=commands.when_mentioned_or(constants.Bot.prefix),
+ activity=discord.Game(name=f"Commands: {constants.Bot.prefix}help"),
+ case_insensitive=True,
+ max_messages=10_000,
+ allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles),
+ intents=intents,
+ allowed_roles=list({discord.Object(id_) for id_ in constants.MODERATION_ROLES}),
+ api_client=APIClient(
+ site_api_url=f"{constants.URLs.site_api_schema}{constants.URLs.site_api}",
+ site_api_token=constants.Keys.site_api,
+ ),
+ )
+ async with bot.instance as _bot:
+ await _bot.start(constants.Bot.token)
+
try:
- bot.instance = Bot.create()
- bot.instance.load_extensions()
- bot.instance.run(constants.Bot.token)
+ asyncio.run(main())
except StartupError as e:
message = "Unknown Startup Error Occurred."
if isinstance(e.exception, (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError)):
diff --git a/bot/api.py b/bot/api.py
deleted file mode 100644
index 856f7c865..000000000
--- a/bot/api.py
+++ /dev/null
@@ -1,102 +0,0 @@
-import asyncio
-from typing import Optional
-from urllib.parse import quote as quote_url
-
-import aiohttp
-
-from bot.log import get_logger
-
-from .constants import Keys, URLs
-
-log = get_logger(__name__)
-
-
-class ResponseCodeError(ValueError):
- """Raised when a non-OK HTTP response is received."""
-
- def __init__(
- self,
- response: aiohttp.ClientResponse,
- response_json: Optional[dict] = None,
- response_text: str = ""
- ):
- self.status = response.status
- self.response_json = response_json or {}
- self.response_text = response_text
- self.response = response
-
- def __str__(self):
- response = self.response_json if self.response_json else self.response_text
- return f"Status: {self.status} Response: {response}"
-
-
-class APIClient:
- """Django Site API wrapper."""
-
- # These are class attributes so they can be seen when being mocked for tests.
- # See commit 22a55534ef13990815a6f69d361e2a12693075d5 for details.
- session: Optional[aiohttp.ClientSession] = None
- loop: asyncio.AbstractEventLoop = None
-
- def __init__(self, **session_kwargs):
- auth_headers = {
- 'Authorization': f"Token {Keys.site_api}"
- }
-
- if 'headers' in session_kwargs:
- session_kwargs['headers'].update(auth_headers)
- else:
- session_kwargs['headers'] = auth_headers
-
- # aiohttp will complain if APIClient gets instantiated outside a coroutine. Thankfully, we
- # don't and shouldn't need to do that, so we can avoid scheduling a task to create it.
- self.session = aiohttp.ClientSession(**session_kwargs)
-
- @staticmethod
- def _url_for(endpoint: str) -> str:
- return f"{URLs.site_api_schema}{URLs.site_api}/{quote_url(endpoint)}"
-
- async def close(self) -> None:
- """Close the aiohttp session."""
- await self.session.close()
-
- async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None:
- """Raise ResponseCodeError for non-OK response if an exception should be raised."""
- if should_raise and response.status >= 400:
- try:
- response_json = await response.json()
- raise ResponseCodeError(response=response, response_json=response_json)
- except aiohttp.ContentTypeError:
- response_text = await response.text()
- raise ResponseCodeError(response=response, response_text=response_text)
-
- async def request(self, method: str, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
- """Send an HTTP request to the site API and return the JSON response."""
- async with self.session.request(method.upper(), self._url_for(endpoint), **kwargs) as resp:
- await self.maybe_raise_for_status(resp, raise_for_status)
- return await resp.json()
-
- async def get(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
- """Site API GET."""
- return await self.request("GET", endpoint, raise_for_status=raise_for_status, **kwargs)
-
- async def patch(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
- """Site API PATCH."""
- return await self.request("PATCH", endpoint, raise_for_status=raise_for_status, **kwargs)
-
- async def post(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
- """Site API POST."""
- return await self.request("POST", endpoint, raise_for_status=raise_for_status, **kwargs)
-
- async def put(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
- """Site API PUT."""
- return await self.request("PUT", endpoint, raise_for_status=raise_for_status, **kwargs)
-
- async def delete(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> Optional[dict]:
- """Site API DELETE."""
- async with self.session.delete(self._url_for(endpoint), **kwargs) as resp:
- if resp.status == 204:
- return None
-
- await self.maybe_raise_for_status(resp, raise_for_status)
- return await resp.json()
diff --git a/bot/async_stats.py b/bot/async_stats.py
deleted file mode 100644
index 2af832e5b..000000000
--- a/bot/async_stats.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import asyncio
-import socket
-
-from statsd.client.base import StatsClientBase
-
-from bot.utils import scheduling
-
-
-class AsyncStatsClient(StatsClientBase):
- """An async transport method for statsd communication."""
-
- def __init__(
- self,
- loop: asyncio.AbstractEventLoop,
- host: str = 'localhost',
- port: int = 8125,
- prefix: str = None
- ):
- """Create a new client."""
- family, _, _, _, addr = socket.getaddrinfo(
- host, port, socket.AF_INET, socket.SOCK_DGRAM)[0]
- self._addr = addr
- self._prefix = prefix
- self._loop = loop
- self._transport = None
-
- async def create_socket(self) -> None:
- """Use the loop.create_datagram_endpoint method to create a socket."""
- self._transport, _ = await self._loop.create_datagram_endpoint(
- asyncio.DatagramProtocol,
- family=socket.AF_INET,
- remote_addr=self._addr
- )
-
- def _send(self, data: str) -> None:
- """Start an async task to send data to statsd."""
- scheduling.create_task(self._async_send(data), event_loop=self._loop)
-
- async def _async_send(self, data: str) -> None:
- """Send data to the statsd server using the async transport."""
- self._transport.sendto(data.encode('ascii'), self._addr)
diff --git a/bot/bot.py b/bot/bot.py
index 2769b7dda..aff07cd32 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -1,22 +1,15 @@
import asyncio
-import socket
-import warnings
from collections import defaultdict
-from contextlib import suppress
-from typing import Dict, List, Optional
import aiohttp
-import disnake
-from async_rediscache import RedisSession
-from disnake.ext import commands
+from botcore import BotBase
+from botcore.utils import scheduling
from sentry_sdk import push_scope
-from bot import api, constants
-from bot.async_stats import AsyncStatsClient
+from bot import constants, exts
from bot.log import get_logger
log = get_logger('bot')
-LOCALHOST = "127.0.0.1"
class StartupError(Exception):
@@ -27,68 +20,15 @@ class StartupError(Exception):
self.exception = base
-class Bot(commands.Bot):
- """A subclass of `disnake.ext.commands.Bot` with an aiohttp session and an API client."""
+class Bot(BotBase):
+ """A subclass of `botcore.BotBase` that implements bot-specific functions."""
- def __init__(self, *args, redis_session: RedisSession, **kwargs):
- if "connector" in kwargs:
- warnings.warn(
- "If login() is called (or the bot is started), the connector will be overwritten "
- "with an internal one"
- )
+ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.http_session: Optional[aiohttp.ClientSession] = None
- self.redis_session = redis_session
- self.api_client: Optional[api.APIClient] = None
self.filter_list_cache = defaultdict(dict)
- self._connector = None
- self._resolver = None
- self._statsd_timerhandle: asyncio.TimerHandle = None
- self._guild_available = asyncio.Event()
-
- statsd_url = constants.Stats.statsd_host
-
- if constants.DEBUG_MODE:
- # Since statsd is UDP, there are no errors for sending to a down port.
- # For this reason, setting the statsd host to 127.0.0.1 for development
- # will effectively disable stats.
- statsd_url = LOCALHOST
-
- self.stats = AsyncStatsClient(self.loop, LOCALHOST)
- self._connect_statsd(statsd_url)
-
- def _connect_statsd(self, statsd_url: str, retry_after: int = 2, attempt: int = 1) -> None:
- """Callback used to retry a connection to statsd if it should fail."""
- if attempt >= 8:
- log.error("Reached 8 attempts trying to reconnect AsyncStatsClient. Aborting")
- return
-
- try:
- self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot")
- except socket.gaierror:
- log.warning(f"Statsd client failed to connect (Attempt(s): {attempt})")
- # Use a fallback strategy for retrying, up to 8 times.
- self._statsd_timerhandle = self.loop.call_later(
- retry_after,
- self._connect_statsd,
- statsd_url,
- retry_after * 2,
- attempt + 1
- )
-
- # All tasks that need to block closing until finished
- self.closing_tasks: List[asyncio.Task] = []
-
- async def cache_filter_list_data(self) -> None:
- """Cache all the data in the FilterList on the site."""
- full_cache = await self.api_client.get('bot/filter-lists')
-
- for item in full_cache:
- self.insert_item_into_filter_list_cache(item)
-
async def ping_services(self) -> None:
"""A helper to make sure all the services the bot relies on are available on startup."""
# Connect Site/API
@@ -105,112 +45,7 @@ class Bot(commands.Bot):
raise
await asyncio.sleep(constants.URLs.connect_cooldown)
- @classmethod
- def create(cls) -> "Bot":
- """Create and return an instance of a Bot."""
- loop = asyncio.get_event_loop()
- allowed_roles = list({disnake.Object(id_) for id_ in constants.MODERATION_ROLES})
-
- intents = disnake.Intents.all()
- intents.presences = False
- intents.dm_typing = False
- intents.dm_reactions = False
- intents.invites = False
- intents.webhooks = False
- intents.integrations = False
-
- return cls(
- redis_session=_create_redis_session(loop),
- loop=loop,
- command_prefix=commands.when_mentioned_or(constants.Bot.prefix),
- activity=disnake.Game(name=f"Commands: {constants.Bot.prefix}help"),
- case_insensitive=True,
- max_messages=10_000,
- allowed_mentions=disnake.AllowedMentions(everyone=False, roles=allowed_roles),
- intents=intents,
- )
-
- def load_extensions(self) -> None:
- """Load all enabled extensions."""
- # Must be done here to avoid a circular import.
- from bot.utils.extensions import EXTENSIONS
-
- extensions = set(EXTENSIONS) # Create a mutable copy.
- if not constants.HelpChannels.enable:
- extensions.remove("bot.exts.help_channels")
-
- for extension in extensions:
- self.load_extension(extension)
-
- def add_cog(self, cog: commands.Cog) -> None:
- """Adds a "cog" to the bot and logs the operation."""
- super().add_cog(cog)
- log.info(f"Cog loaded: {cog.qualified_name}")
-
- def add_command(self, command: commands.Command) -> None:
- """Add `command` as normal and then add its root aliases to the bot."""
- super().add_command(command)
- self._add_root_aliases(command)
-
- def remove_command(self, name: str) -> Optional[commands.Command]:
- """
- Remove a command/alias as normal and then remove its root aliases from the bot.
-
- Individual root aliases cannot be removed by this function.
- To remove them, either remove the entire command or manually edit `bot.all_commands`.
- """
- command = super().remove_command(name)
- if command is None:
- # Even if it's a root alias, there's no way to get the Bot instance to remove the alias.
- return
-
- self._remove_root_aliases(command)
- return command
-
- def clear(self) -> None:
- """Not implemented! Re-instantiate the bot instead of attempting to re-use a closed one."""
- raise NotImplementedError("Re-using a Bot object after closing it is not supported.")
-
- async def close(self) -> None:
- """Close the Discord connection and the aiohttp session, connector, statsd client, and resolver."""
- # Done before super().close() to allow tasks finish before the HTTP session closes.
- for ext in list(self.extensions):
- with suppress(Exception):
- self.unload_extension(ext)
-
- for cog in list(self.cogs):
- with suppress(Exception):
- self.remove_cog(cog)
-
- # Wait until all tasks that have to be completed before bot is closing is done
- log.trace("Waiting for tasks before closing.")
- await asyncio.gather(*self.closing_tasks)
-
- # Now actually do full close of bot
- await super().close()
-
- if self.api_client:
- await self.api_client.close()
-
- if self.http_session:
- await self.http_session.close()
-
- if self._connector:
- await self._connector.close()
-
- if self._resolver:
- await self._resolver.close()
-
- if self.stats._transport:
- self.stats._transport.close()
-
- if self.redis_session:
- await self.redis_session.close()
-
- if self._statsd_timerhandle:
- self._statsd_timerhandle.cancel()
-
- def insert_item_into_filter_list_cache(self, item: Dict[str, str]) -> None:
+ def insert_item_into_filter_list_cache(self, item: dict[str, str]) -> None:
"""Add an item to the bots filter_list_cache."""
type_ = item["type"]
allowed = item["allowed"]
@@ -223,81 +58,23 @@ class Bot(commands.Bot):
"updated_at": item["updated_at"],
}
- async def login(self, *args, **kwargs) -> None:
- """Re-create the connector and set up sessions before logging into Discord."""
- # Use asyncio for DNS resolution instead of threads so threads aren't spammed.
- self._resolver = aiohttp.AsyncResolver()
-
- # Use AF_INET as its socket family to prevent HTTPS related problems both locally
- # and in production.
- self._connector = aiohttp.TCPConnector(
- resolver=self._resolver,
- family=socket.AF_INET,
- )
-
- # Client.login() will call HTTPClient.static_login() which will create a session using
- # this connector attribute.
- self.http.connector = self._connector
-
- self.http_session = aiohttp.ClientSession(connector=self._connector)
- self.api_client = api.APIClient(connector=self._connector)
+ async def cache_filter_list_data(self) -> None:
+ """Cache all the data in the FilterList on the site."""
+ full_cache = await self.api_client.get('bot/filter-lists')
- if self.redis_session.closed:
- # If the RedisSession was somehow closed, we try to reconnect it
- # here. Normally, this shouldn't happen.
- await self.redis_session.connect()
+ for item in full_cache:
+ self.insert_item_into_filter_list_cache(item)
- try:
- await self.ping_services()
- except Exception as e:
- raise StartupError(e)
+ async def setup_hook(self) -> None:
+ """Default async initialisation method for discord.py."""
+ await super().setup_hook()
# Build the FilterList cache
await self.cache_filter_list_data()
- await self.stats.create_socket()
- await super().login(*args, **kwargs)
-
- async def on_guild_available(self, guild: disnake.Guild) -> None:
- """
- Set the internal guild available event when constants.Guild.id becomes available.
-
- If the cache appears to still be empty (no members, no channels, or no roles), the event
- will not be set.
- """
- if guild.id != constants.Guild.id:
- return
-
- if not guild.roles or not guild.members or not guild.channels:
- msg = "Guild available event was dispatched but the cache appears to still be empty!"
- log.warning(msg)
-
- try:
- webhook = await self.fetch_webhook(constants.Webhooks.dev_log)
- except disnake.HTTPException as e:
- log.error(f"Failed to fetch webhook to send empty cache warning: status {e.status}")
- else:
- await webhook.send(f"<@&{constants.Roles.admin}> {msg}")
-
- return
-
- self._guild_available.set()
-
- async def on_guild_unavailable(self, guild: disnake.Guild) -> None:
- """Clear the internal guild available event when constants.Guild.id becomes unavailable."""
- if guild.id != constants.Guild.id:
- return
-
- self._guild_available.clear()
-
- async def wait_until_guild_available(self) -> None:
- """
- Wait until the constants.Guild.id guild is available (and the cache is ready).
-
- The on_ready event is inadequate because it only waits 2 seconds for a GUILD_CREATE
- gateway event before giving up and thus not populating the cache for unavailable guilds.
- """
- await self._guild_available.wait()
+ # This is not awaited to avoid a deadlock with any cogs that have
+ # wait_until_guild_available in their cog_load method.
+ scheduling.create_task(self.load_extensions(exts))
async def on_error(self, event: str, *args, **kwargs) -> None:
"""Log errors raised in event listeners rather than printing them to stderr."""
@@ -309,46 +86,3 @@ class Bot(commands.Bot):
scope.set_extra("kwargs", kwargs)
log.exception(f"Unhandled exception in {event}.")
-
- def _add_root_aliases(self, command: commands.Command) -> None:
- """Recursively add root aliases for `command` and any of its subcommands."""
- if isinstance(command, commands.Group):
- for subcommand in command.commands:
- self._add_root_aliases(subcommand)
-
- for alias in getattr(command, "root_aliases", ()):
- if alias in self.all_commands:
- raise commands.CommandRegistrationError(alias, alias_conflict=True)
-
- self.all_commands[alias] = command
-
- def _remove_root_aliases(self, command: commands.Command) -> None:
- """Recursively remove root aliases for `command` and any of its subcommands."""
- if isinstance(command, commands.Group):
- for subcommand in command.commands:
- self._remove_root_aliases(subcommand)
-
- for alias in getattr(command, "root_aliases", ()):
- self.all_commands.pop(alias, None)
-
-
-def _create_redis_session(loop: asyncio.AbstractEventLoop) -> RedisSession:
- """
- Create and connect to a redis session.
-
- Ensure the connection is established before returning to prevent race conditions.
- `loop` is the event loop on which to connect. The Bot should use this same event loop.
- """
- redis_session = RedisSession(
- address=(constants.Redis.host, constants.Redis.port),
- password=constants.Redis.password,
- minsize=1,
- maxsize=20,
- use_fakeredis=constants.Redis.use_fakeredis,
- global_namespace="bot",
- )
- try:
- loop.run_until_complete(redis_session.connect())
- except OSError as e:
- raise StartupError(e)
- return redis_session
diff --git a/bot/constants.py b/bot/constants.py
index bfb27a47b..6fa5df1fc 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -488,6 +488,7 @@ class Roles(metaclass=YAMLGetter):
announcements: int
lovefest: int
pyweek_announcements: int
+ revival_of_code: int
contributors: int
help_cooldown: int
@@ -543,6 +544,7 @@ class URLs(metaclass=YAMLGetter):
# Snekbox endpoints
snekbox_eval_api: str
+ snekbox_311_eval_api: str
# Discord API endpoints
discord_api: str
diff --git a/bot/converters.py b/bot/converters.py
index 6f35d2fe4..5800ea044 100644
--- a/bot/converters.py
+++ b/bot/converters.py
@@ -6,22 +6,22 @@ from datetime import datetime, timezone
from ssl import CertificateError
import dateutil.parser
-import disnake
+import discord
from aiohttp import ClientConnectorError
+from botcore.site_api import ResponseCodeError
+from botcore.utils import unqualify
from botcore.utils.regex import DISCORD_INVITE
from dateutil.relativedelta import relativedelta
-from disnake.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter
-from disnake.utils import escape_markdown, snowflake_time
+from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter
+from discord.utils import escape_markdown, snowflake_time
-from bot import exts
-from bot.api import ResponseCodeError
+from bot import exts, instance as bot_instance
from bot.constants import URLs
from bot.errors import InvalidInfraction
from bot.exts.info.doc import _inventory_parser
from bot.exts.info.tags import TagIdentifier
from bot.log import get_logger
from bot.utils import time
-from bot.utils.extensions import EXTENSIONS, unqualify
if t.TYPE_CHECKING:
from bot.exts.info.source import SourceType
@@ -32,25 +32,6 @@ DISCORD_EPOCH_DT = snowflake_time(0)
RE_USER_MENTION = re.compile(r"<@!?([0-9]+)>$")
-def allowed_strings(*values, preserve_case: bool = False) -> t.Callable[[str], str]:
- """
- Return a converter which only allows arguments equal to one of the given values.
-
- Unless preserve_case is True, the argument is converted to lowercase. All values are then
- expected to have already been given in lowercase too.
- """
- def converter(arg: str) -> str:
- if not preserve_case:
- arg = arg.lower()
-
- if arg not in values:
- raise BadArgument(f"Only the following values are allowed:\n```{', '.join(values)}```")
- else:
- return arg
-
- return converter
-
-
class ValidDiscordServerInvite(Converter):
"""
A converter that validates whether a given string is a valid Discord server invite.
@@ -150,13 +131,13 @@ class Extension(Converter):
argument = argument.lower()
- if argument in EXTENSIONS:
+ if argument in bot_instance.all_extensions:
return argument
- elif (qualified_arg := f"{exts.__name__}.{argument}") in EXTENSIONS:
+ elif (qualified_arg := f"{exts.__name__}.{argument}") in bot_instance.all_extensions:
return qualified_arg
matches = []
- for ext in EXTENSIONS:
+ for ext in bot_instance.all_extensions:
if argument == unqualify(ext):
matches.append(ext)
@@ -235,7 +216,7 @@ class Inventory(Converter):
@staticmethod
async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.InventoryDict]:
"""Convert url to Intersphinx inventory URL."""
- await ctx.trigger_typing()
+ await ctx.typing()
try:
inventory = await _inventory_parser.fetch_inventory(url)
except _inventory_parser.InvalidHeaderError:
@@ -382,8 +363,8 @@ class Age(DurationDelta):
class OffTopicName(Converter):
"""A converter that ensures an added off-topic name is valid."""
- ALLOWED_CHARACTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-<>"
- TRANSLATED_CHARACTERS = "𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-<>"
+ ALLOWED_CHARACTERS = r"ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-<>\/"
+ TRANSLATED_CHARACTERS = "𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-<>⧹⧸"
@classmethod
def translate_name(cls, name: str, *, from_unicode: bool = True) -> str:
@@ -505,14 +486,14 @@ AMBIGUOUS_ARGUMENT_MSG = ("`{argument}` is not a User mention, a User ID or a Us
class UnambiguousUser(UserConverter):
"""
- Converts to a `disnake.User`, but only if a mention, userID or a username (name#discrim) is provided.
+ Converts to a `discord.User`, but only if a mention, userID or a username (name#discrim) is provided.
Unlike the default `UserConverter`, it doesn't allow conversion from a name.
This is useful in cases where that lookup strategy would lead to too much ambiguity.
"""
- async def convert(self, ctx: Context, argument: str) -> disnake.User:
- """Convert the `argument` to a `disnake.User`."""
+ async def convert(self, ctx: Context, argument: str) -> discord.User:
+ """Convert the `argument` to a `discord.User`."""
if _is_an_unambiguous_user_argument(argument):
return await super().convert(ctx, argument)
else:
@@ -521,14 +502,14 @@ class UnambiguousUser(UserConverter):
class UnambiguousMember(MemberConverter):
"""
- Converts to a `disnake.Member`, but only if a mention, userID or a username (name#discrim) is provided.
+ Converts to a `discord.Member`, but only if a mention, userID or a username (name#discrim) is provided.
Unlike the default `MemberConverter`, it doesn't allow conversion from a name or nickname.
This is useful in cases where that lookup strategy would lead to too much ambiguity.
"""
- async def convert(self, ctx: Context, argument: str) -> disnake.Member:
- """Convert the `argument` to a `disnake.Member`."""
+ async def convert(self, ctx: Context, argument: str) -> discord.Member:
+ """Convert the `argument` to a `discord.Member`."""
if _is_an_unambiguous_user_argument(argument):
return await super().convert(ctx, argument)
else:
@@ -588,10 +569,10 @@ if t.TYPE_CHECKING:
OffTopicName = str # noqa: F811
ISODateTime = datetime # noqa: F811
HushDurationConverter = int # noqa: F811
- UnambiguousUser = disnake.User # noqa: F811
- UnambiguousMember = disnake.Member # noqa: F811
+ UnambiguousUser = discord.User # noqa: F811
+ UnambiguousMember = discord.Member # noqa: F811
Infraction = t.Optional[dict] # noqa: F811
Expiry = t.Union[Duration, ISODateTime]
-MemberOrUser = t.Union[disnake.Member, disnake.User]
+MemberOrUser = t.Union[discord.Member, discord.User]
UnambiguousMemberOrUser = t.Union[UnambiguousMember, UnambiguousUser]
diff --git a/bot/decorators.py b/bot/decorators.py
index 9ae98442c..466770c3a 100644
--- a/bot/decorators.py
+++ b/bot/decorators.py
@@ -4,13 +4,15 @@ import types
import typing as t
from contextlib import suppress
-from disnake import Member, NotFound
-from disnake.ext import commands
-from disnake.ext.commands import Cog, Context
+import arrow
+from botcore.utils import scheduling
+from discord import Member, NotFound
+from discord.ext import commands
+from discord.ext.commands import Cog, Context
from bot.constants import Channels, DEBUG_MODE, RedirectOutput
from bot.log import get_logger
-from bot.utils import function, scheduling
+from bot.utils import function
from bot.utils.checks import ContextCheckFailure, in_whitelist_check
from bot.utils.function import command_wraps
@@ -179,7 +181,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable:
Ensure the highest role of the invoking member is greater than that of the target member.
If the condition fails, a warning is sent to the invoking context. A target which is not an
- instance of disnake.Member will always pass.
+ instance of discord.Member will always pass.
`member_arg` is the keyword name or position index of the parameter of the decorated command
whose value is the target member.
@@ -195,7 +197,7 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable:
target = function.get_arg_value(member_arg, bound_args)
if not isinstance(target, Member):
- log.trace("The target is not a disnake.Member; skipping role hierarchy check.")
+ log.trace("The target is not a discord.Member; skipping role hierarchy check.")
return await func(*args, **kwargs)
ctx = function.get_arg_value(1, bound_args)
@@ -236,3 +238,35 @@ def mock_in_debug(return_value: t.Any) -> t.Callable:
return await func(*args, **kwargs)
return wrapped
return decorator
+
+
+def ensure_future_timestamp(timestamp_arg: function.Argument) -> t.Callable:
+ """
+ Ensure the timestamp argument is in the future.
+
+ If the condition fails, send a warning to the invoking context.
+
+ `timestamp_arg` is the keyword name or position index of the parameter of the decorated command
+ whose value is the target timestamp.
+
+ This decorator must go before (below) the `command` decorator.
+ """
+ def decorator(func: types.FunctionType) -> types.FunctionType:
+ @command_wraps(func)
+ async def wrapper(*args, **kwargs) -> t.Any:
+ bound_args = function.get_bound_args(func, args, kwargs)
+ target = function.get_arg_value(timestamp_arg, bound_args)
+
+ ctx = function.get_arg_value(1, bound_args)
+
+ try:
+ is_future = target > arrow.utcnow()
+ except TypeError:
+ is_future = True
+ if not is_future:
+ await ctx.send(":x: Provided timestamp is in the past.")
+ return
+
+ return await func(*args, **kwargs)
+ return wrapper
+ return decorator
diff --git a/bot/errors.py b/bot/errors.py
index 298e7ac2d..078b645f1 100644
--- a/bot/errors.py
+++ b/bot/errors.py
@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import Hashable, TYPE_CHECKING, Union
-from disnake.ext.commands import ConversionError, Converter
+from discord.ext.commands import ConversionError, Converter
if TYPE_CHECKING:
from bot.converters import MemberOrUser
diff --git a/bot/exts/backend/branding/__init__.py b/bot/exts/backend/branding/__init__.py
index 20a747b7f..8460465cb 100644
--- a/bot/exts/backend/branding/__init__.py
+++ b/bot/exts/backend/branding/__init__.py
@@ -2,6 +2,6 @@ from bot.bot import Bot
from bot.exts.backend.branding._cog import Branding
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load Branding cog."""
- bot.add_cog(Branding(bot))
+ await bot.add_cog(Branding(bot))
diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py
index a07e70d58..ff2704835 100644
--- a/bot/exts/backend/branding/_cog.py
+++ b/bot/exts/backend/branding/_cog.py
@@ -1,23 +1,23 @@
import asyncio
import contextlib
import random
+import types
import typing as t
from datetime import timedelta
from enum import Enum
from operator import attrgetter
import async_timeout
-import disnake
+import discord
from arrow import Arrow
from async_rediscache import RedisCache
-from disnake.ext import commands, tasks
+from discord.ext import commands, tasks
from bot.bot import Bot
from bot.constants import Branding as BrandingConfig, Channels, Colours, Guild, MODERATION_ROLES
from bot.decorators import mock_in_debug
from bot.exts.backend.branding._repository import BrandingRepository, Event, RemoteObject
from bot.log import get_logger
-from bot.utils import scheduling
log = get_logger(__name__)
@@ -42,7 +42,7 @@ def compound_hash(objects: t.Iterable[RemoteObject]) -> str:
return "-".join(item.sha for item in objects)
-def make_embed(title: str, description: str, *, success: bool) -> disnake.Embed:
+def make_embed(title: str, description: str, *, success: bool) -> discord.Embed:
"""
Construct simple response embed.
@@ -51,7 +51,7 @@ def make_embed(title: str, description: str, *, success: bool) -> disnake.Embed:
For both `title` and `description`, empty string are valid values ~ fields will be empty.
"""
colour = Colours.soft_green if success else Colours.soft_red
- return disnake.Embed(title=title[:256], description=description[:4096], colour=colour)
+ return discord.Embed(title=title[:256], description=description[:4096], colour=colour)
def extract_event_duration(event: Event) -> str:
@@ -105,19 +105,24 @@ class Branding(commands.Cog):
"""
# RedisCache[
- # "daemon_active": bool | If True, daemon starts on start-up. Controlled via commands.
- # "event_path": str | Current event's path in the branding repo.
- # "event_description": str | Current event's Markdown description.
- # "event_duration": str | Current event's human-readable date range.
- # "banner_hash": str | SHA of the currently applied banner.
- # "icons_hash": str | Compound SHA of all icons in current rotation.
- # "last_rotation_timestamp": float | POSIX UTC timestamp.
+ # "daemon_active": bool | If True, daemon starts on start-up. Controlled via commands.
+ # "event_path": str | Current event's path in the branding repo.
+ # "event_description": str | Current event's Markdown description.
+ # "event_duration": str | Current event's human-readable date range.
+ # "banners_hash": str | Compound SHA of all banners in the current rotation.
+ # "icons_hash": str | Compound SHA of all icons in current rotation.
+ # "last_icon_rotation_timestamp": float | POSIX UTC timestamp.
+ # "last_banner_rotation_timestamp": float | POSIX UTC timestamp.
# ]
cache_information = RedisCache()
- # Icons in current rotation. Keys (str) are download URLs, values (int) track the amount of times each
- # icon has been used in the current rotation.
- cache_icons = RedisCache()
+ # Icons and banners in current rotation.
+ # Keys (str) are download URLs, values (int) track the amount of times each
+ # asset has been used in the current rotation.
+ asset_caches = types.MappingProxyType({
+ AssetType.ICON: RedisCache(namespace="Branding.icon_cache"),
+ AssetType.BANNER: RedisCache(namespace="Branding.banner_cache")
+ })
# All available event names & durations. Cached by the daemon nightly; read by the calendar command.
cache_events = RedisCache()
@@ -127,7 +132,9 @@ class Branding(commands.Cog):
self.bot = bot
self.repository = BrandingRepository(bot)
- scheduling.create_task(self.maybe_start_daemon(), event_loop=self.bot.loop) # Start depending on cache.
+ async def cog_load(self) -> None:
+ """Carry out cog asynchronous initialisation."""
+ await self.maybe_start_daemon() # Start depending on cache.
# region: Internal logic & state management
@@ -147,13 +154,13 @@ class Branding(commands.Cog):
return False
await self.bot.wait_until_guild_available()
- pydis: disnake.Guild = self.bot.get_guild(Guild.id)
+ pydis: discord.Guild = self.bot.get_guild(Guild.id)
timeout = 10 # Seconds.
try:
with async_timeout.timeout(timeout): # Raise after `timeout` seconds.
await pydis.edit(**{asset_type.value: file})
- except disnake.HTTPException:
+ except discord.HTTPException:
log.exception("Asset upload to Discord failed.")
return False
except asyncio.TimeoutError:
@@ -163,107 +170,92 @@ class Branding(commands.Cog):
log.trace("Asset uploaded successfully.")
return True
- async def apply_banner(self, banner: RemoteObject) -> bool:
+ async def rotate_assets(self, asset_type: AssetType) -> bool:
"""
- Apply `banner` to the guild and cache its hash if successful.
+ Choose and apply the next-up asset in rotation.
- Banners should always be applied via this method to ensure that the last hash is cached.
-
- Return a boolean indicating whether the application was successful.
- """
- success = await self.apply_asset(AssetType.BANNER, banner.download_url)
-
- if success:
- await self.cache_information.set("banner_hash", banner.sha)
-
- return success
-
- async def rotate_icons(self) -> bool:
- """
- Choose and apply the next-up icon in rotation.
-
- We keep track of the amount of times each icon has been used. The values in `cache_icons` can be understood
- to be iteration IDs. When an icon is chosen & applied, we bump its count, pushing it into the next iteration.
+ We keep track of the amount of times each asset has been used. The values in the cache can be understood
+ to be iteration IDs. When an asset is chosen & applied, we bump its count, pushing it into the next iteration.
Once the current iteration (lowest count in the cache) depletes, we move onto the next iteration.
- In the case that there is only 1 icon in the rotation and has already been applied, do nothing.
+ In the case that there is only 1 asset in the rotation and has already been applied, do nothing.
- Return a boolean indicating whether a new icon was applied successfully.
+ Return a boolean indicating whether a new asset was applied successfully.
"""
- log.debug("Rotating icons.")
+ log.debug(f"Rotating {asset_type.value}s.")
- state = await self.cache_icons.to_dict()
- log.trace(f"Total icons in rotation: {len(state)}.")
+ state = await self.asset_caches[asset_type].to_dict()
+ log.trace(f"Total {asset_type.value}s in rotation: {len(state)}.")
if not state: # This would only happen if rotation not initiated, but we can handle gracefully.
- log.warning("Attempted icon rotation with an empty icon cache. This indicates wrong logic.")
+ log.warning(f"Attempted {asset_type.value} rotation with an empty cache. This indicates wrong logic.")
return False
if len(state) == 1 and 1 in state.values():
- log.debug("Aborting icon rotation: only 1 icon is available and has already been applied.")
+ log.debug(f"Aborting {asset_type.value} rotation: only 1 asset is available and has already been applied.")
return False
current_iteration = min(state.values()) # Choose iteration to draw from.
options = [download_url for download_url, times_used in state.items() if times_used == current_iteration]
- log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}.")
- next_icon = random.choice(options)
+ log.trace(f"Choosing from {len(options)} {asset_type.value}s in iteration {current_iteration}.")
+ next_asset = random.choice(options)
- success = await self.apply_asset(AssetType.ICON, next_icon)
+ success = await self.apply_asset(asset_type, next_asset)
if success:
- await self.cache_icons.increment(next_icon) # Push the icon into the next iteration.
+ await self.asset_caches[asset_type].increment(next_asset) # Push the asset into the next iteration.
timestamp = Arrow.utcnow().timestamp()
- await self.cache_information.set("last_rotation_timestamp", timestamp)
+ await self.cache_information.set(f"last_{asset_type.value}_rotation_timestamp", timestamp)
return success
- async def maybe_rotate_icons(self) -> None:
+ async def maybe_rotate_assets(self, asset_type: AssetType) -> None:
"""
- Call `rotate_icons` if the configured amount of time has passed since last rotation.
+ Call `rotate_assets` if the configured amount of time has passed since last rotation.
We offset the calculated time difference into the future to avoid off-by-a-little-bit errors. Because there
is work to be done before the timestamp is read and written, the next read will likely commence slightly
under 24 hours after the last write.
"""
- log.debug("Checking whether it's time for icons to rotate.")
+ log.debug(f"Checking whether it's time for {asset_type.value}s to rotate.")
- last_rotation_timestamp = await self.cache_information.get("last_rotation_timestamp")
+ last_rotation_timestamp = await self.cache_information.get(f"last_{asset_type.value}_rotation_timestamp")
if last_rotation_timestamp is None: # Maiden case ~ never rotated.
- await self.rotate_icons()
+ await self.rotate_assets(asset_type)
return
last_rotation = Arrow.utcfromtimestamp(last_rotation_timestamp)
difference = (Arrow.utcnow() - last_rotation) + timedelta(minutes=5)
- log.trace(f"Icons last rotated at {last_rotation} (difference: {difference}).")
+ log.trace(f"{asset_type.value.title()}s last rotated at {last_rotation} (difference: {difference}).")
if difference.days >= BrandingConfig.cycle_frequency:
- await self.rotate_icons()
+ await self.rotate_assets(asset_type)
- async def initiate_icon_rotation(self, available_icons: t.List[RemoteObject]) -> None:
+ async def initiate_rotation(self, asset_type: AssetType, available_assets: list[RemoteObject]) -> None:
"""
- Set up a new icon rotation.
+ Set up a new asset rotation.
- This function should be called whenever available icons change. This is generally the case when we enter
+ This function should be called whenever available asset groups change. This is generally the case when we enter
a new event, but potentially also when the assets of an on-going event change. In such cases, a reset
- of `cache_icons` is necessary, because it contains download URLs which may have gotten stale.
+ of the cache is necessary, because it contains download URLs which may have gotten stale.
- This function does not upload a new icon!
+ This function does not upload a new asset!
"""
- log.debug("Initiating new icon rotation.")
+ log.debug(f"Initiating new {asset_type.value} rotation.")
- await self.cache_icons.clear()
+ await self.asset_caches[asset_type].clear()
- new_state = {icon.download_url: 0 for icon in available_icons}
- await self.cache_icons.update(new_state)
+ new_state = {asset.download_url: 0 for asset in available_assets}
+ await self.asset_caches[asset_type].update(new_state)
- log.trace(f"Icon rotation initiated for {len(new_state)} icons.")
+ log.trace(f"{asset_type.value.title()} rotation initiated for {len(new_state)} assets.")
- await self.cache_information.set("icons_hash", compound_hash(available_icons))
+ await self.cache_information.set(f"{asset_type.value}s_hash", compound_hash(available_assets))
async def send_info_embed(self, channel_id: int, *, is_notification: bool) -> None:
"""
@@ -277,7 +269,7 @@ class Branding(commands.Cog):
log.debug(f"Sending event information event to channel: {channel_id} ({is_notification=}).")
await self.bot.wait_until_guild_available()
- channel: t.Optional[disnake.TextChannel] = self.bot.get_channel(channel_id)
+ channel: t.Optional[discord.TextChannel] = self.bot.get_channel(channel_id)
if channel is None:
log.warning(f"Cannot send event information: channel {channel_id} not found!")
@@ -294,7 +286,7 @@ class Branding(commands.Cog):
else:
content = "Python Discord is entering a new event!" if is_notification else None
- embed = disnake.Embed(description=description[:4096], colour=disnake.Colour.og_blurple())
+ embed = discord.Embed(description=description[:4096], colour=discord.Colour.og_blurple())
embed.set_footer(text=duration[:4096])
await channel.send(content=content, embed=embed)
@@ -315,10 +307,12 @@ class Branding(commands.Cog):
"""
log.info(f"Entering event: '{event.path}'.")
- banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly.
+ # Prepare and apply new icon and banner rotations
+ await self.initiate_rotation(AssetType.ICON, event.icons)
+ await self.initiate_rotation(AssetType.BANNER, event.banners)
- await self.initiate_icon_rotation(event.icons) # Prepare a new rotation.
- icon_success = await self.rotate_icons() # Apply an icon from the new rotation.
+ icon_success = await self.rotate_assets(AssetType.ICON)
+ banner_success = await self.rotate_assets(AssetType.BANNER)
# This will only be False in the case of a manual same-event re-synchronisation.
event_changed = event.path != await self.cache_information.get("event_path")
@@ -413,7 +407,7 @@ class Branding(commands.Cog):
if should_begin:
self.daemon_loop.start()
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""
Cancel the daemon in case of cog unload.
@@ -453,16 +447,19 @@ class Branding(commands.Cog):
log.trace("Daemon main: event has not changed, checking for change in assets.")
- if new_event.banner.sha != await self.cache_information.get("banner_hash"):
+ if compound_hash(new_event.banners) != await self.cache_information.get("banners_hash"):
log.debug("Daemon main: detected banner change.")
- await self.apply_banner(new_event.banner)
+ await self.initiate_rotation(AssetType.BANNER, new_event.banners)
+ await self.rotate_assets(AssetType.BANNER)
+ else:
+ await self.maybe_rotate_assets(AssetType.BANNER)
if compound_hash(new_event.icons) != await self.cache_information.get("icons_hash"):
log.debug("Daemon main: detected icon change.")
- await self.initiate_icon_rotation(new_event.icons)
- await self.rotate_icons()
+ await self.initiate_rotation(AssetType.ICON, new_event.icons)
+ await self.rotate_assets(AssetType.ICON)
else:
- await self.maybe_rotate_icons()
+ await self.maybe_rotate_assets(AssetType.ICON)
@tasks.loop(hours=24)
async def daemon_loop(self) -> None:
@@ -573,7 +570,7 @@ class Branding(commands.Cog):
await ctx.send(embed=resp)
return
- embed = disnake.Embed(title="Current event calendar", colour=disnake.Colour.og_blurple())
+ embed = discord.Embed(title="Current event calendar", colour=discord.Colour.og_blurple())
# Because Discord embeds can only contain up to 25 fields, we only show the first 25.
first_25 = list(available_events.items())[:25]
diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py
index d88ea67f3..e14f0a1ef 100644
--- a/bot/exts/backend/branding/_repository.py
+++ b/bot/exts/backend/branding/_repository.py
@@ -64,8 +64,8 @@ class Event(t.NamedTuple):
path: str # Path from repo root where event lives. This is the event's identity.
meta: MetaFile
- banner: RemoteObject
- icons: t.List[RemoteObject]
+ banners: list[RemoteObject]
+ icons: list[RemoteObject]
def __str__(self) -> str:
return f"<Event at '{self.path}'>"
@@ -163,21 +163,24 @@ class BrandingRepository:
"""
contents = await self.fetch_directory(directory.path)
- missing_assets = {"meta.md", "banner.png", "server_icons"} - contents.keys()
+ missing_assets = {"meta.md", "server_icons", "banners"} - contents.keys()
if missing_assets:
raise BrandingMisconfiguration(f"Directory is missing following assets: {missing_assets}")
server_icons = await self.fetch_directory(contents["server_icons"].path, types=("file",))
+ banners = await self.fetch_directory(contents["banners"].path, types=("file",))
if len(server_icons) == 0:
raise BrandingMisconfiguration("Found no server icons!")
+ if len(banners) == 0:
+ raise BrandingMisconfiguration("Found no server banners!")
meta_bytes = await self.fetch_file(contents["meta.md"].download_url)
meta_file = self.parse_meta_file(meta_bytes)
- return Event(directory.path, meta_file, contents["banner.png"], list(server_icons.values()))
+ return Event(directory.path, meta_file, list(banners.values()), list(server_icons.values()))
async def get_events(self) -> t.List[Event]:
"""
diff --git a/bot/exts/backend/config_verifier.py b/bot/exts/backend/config_verifier.py
index 1ade2bce7..97c8869a1 100644
--- a/bot/exts/backend/config_verifier.py
+++ b/bot/exts/backend/config_verifier.py
@@ -1,9 +1,8 @@
-from disnake.ext.commands import Cog
+from discord.ext.commands import Cog
from bot import constants
from bot.bot import Bot
from bot.log import get_logger
-from bot.utils import scheduling
log = get_logger(__name__)
@@ -13,9 +12,8 @@ class ConfigVerifier(Cog):
def __init__(self, bot: Bot):
self.bot = bot
- self.channel_verify_task = scheduling.create_task(self.verify_channels(), event_loop=self.bot.loop)
- async def verify_channels(self) -> None:
+ async def cog_load(self) -> None:
"""
Verify channels.
@@ -34,6 +32,6 @@ class ConfigVerifier(Cog):
log.warning(f"Configured channels do not exist in server: {', '.join(invalid_channels)}.")
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the ConfigVerifier cog."""
- bot.add_cog(ConfigVerifier(bot))
+ await bot.add_cog(ConfigVerifier(bot))
diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py
index 953843a77..761991488 100644
--- a/bot/exts/backend/error_handler.py
+++ b/bot/exts/backend/error_handler.py
@@ -1,10 +1,11 @@
+import copy
import difflib
-from disnake import Embed
-from disnake.ext.commands import ChannelNotFound, Cog, Context, TextChannelConverter, VoiceChannelConverter, errors
+from botcore.site_api import ResponseCodeError
+from discord import Embed
+from discord.ext.commands import ChannelNotFound, Cog, Context, TextChannelConverter, VoiceChannelConverter, errors
from sentry_sdk import push_scope
-from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.constants import Colours, Icons, MODERATION_ROLES
from bot.errors import InvalidInfractedUserError, LockedResourceError
@@ -65,6 +66,8 @@ class ErrorHandler(Cog):
if isinstance(e, errors.CommandNotFound) and not getattr(ctx, "invoked_from_error_handler", False):
if await self.try_silence(ctx):
return
+ if await self.try_run_eval(ctx):
+ return
await self.try_get_tag(ctx) # Try to look for a tag with the command's name
elif isinstance(e, errors.UserInputError):
log.debug(debug_message)
@@ -179,6 +182,30 @@ class ErrorHandler(Cog):
if not any(role.id in MODERATION_ROLES for role in ctx.author.roles):
await self.send_command_suggestion(ctx, ctx.invoked_with)
+ async def try_run_eval(self, ctx: Context) -> bool:
+ """
+ Attempt to run eval command with backticks directly after command.
+
+ For example: !eval```print("hi")```
+
+ Return True if command was invoked, else False
+ """
+ msg = copy.copy(ctx.message)
+
+ command, sep, end = msg.content.partition("```")
+ msg.content = command + " " + sep + end
+ new_ctx = await self.bot.get_context(msg)
+
+ eval_command = self.bot.get_command("eval")
+ if eval_command is None or new_ctx.command != eval_command:
+ return False
+
+ log.debug("Running fixed eval command.")
+ new_ctx.invoked_from_error_handler = True
+ await self.bot.invoke(new_ctx)
+
+ return True
+
async def send_command_suggestion(self, ctx: Context, command_name: str) -> None:
"""Sends user similar commands if any can be found."""
# No similar tag found, or tag on cooldown -
@@ -284,8 +311,11 @@ class ErrorHandler(Cog):
await ctx.send("There does not seem to be anything matching your query.")
ctx.bot.stats.incr("errors.api_error_404")
elif e.status == 400:
- content = await e.response.json()
- log.debug(f"API responded with 400 for command {ctx.command}: %r.", content)
+ log.error(
+ "API responded with 400 for command %s: %r.",
+ ctx.command,
+ e.response_json or e.response_text,
+ )
await ctx.send("According to the API, your request is malformed.")
ctx.bot.stats.incr("errors.api_error_400")
elif 500 <= e.status < 600:
@@ -328,6 +358,6 @@ class ErrorHandler(Cog):
log.error(f"Error executing command invoked by {ctx.message.author}: {ctx.message.content}", exc_info=e)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the ErrorHandler cog."""
- bot.add_cog(ErrorHandler(bot))
+ await bot.add_cog(ErrorHandler(bot))
diff --git a/bot/exts/backend/logging.py b/bot/exts/backend/logging.py
index 040fb5d37..b9504c2eb 100644
--- a/bot/exts/backend/logging.py
+++ b/bot/exts/backend/logging.py
@@ -1,10 +1,10 @@
-from disnake import Embed
-from disnake.ext.commands import Cog
+from botcore.utils import scheduling
+from discord import Embed
+from discord.ext.commands import Cog
from bot.bot import Bot
from bot.constants import Channels, DEBUG_MODE
from bot.log import get_logger
-from bot.utils import scheduling
log = get_logger(__name__)
@@ -36,6 +36,6 @@ class Logging(Cog):
await self.bot.get_channel(Channels.dev_log).send(embed=embed)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Logging cog."""
- bot.add_cog(Logging(bot))
+ await bot.add_cog(Logging(bot))
diff --git a/bot/exts/backend/sync/__init__.py b/bot/exts/backend/sync/__init__.py
index 829098f79..1978917e6 100644
--- a/bot/exts/backend/sync/__init__.py
+++ b/bot/exts/backend/sync/__init__.py
@@ -1,8 +1,8 @@
from bot.bot import Bot
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Sync cog."""
# Defer import to reduce side effects from importing the sync package.
from bot.exts.backend.sync._cog import Sync
- bot.add_cog(Sync(bot))
+ await bot.add_cog(Sync(bot))
diff --git a/bot/exts/backend/sync/_cog.py b/bot/exts/backend/sync/_cog.py
index d08e56077..433ff5024 100644
--- a/bot/exts/backend/sync/_cog.py
+++ b/bot/exts/backend/sync/_cog.py
@@ -1,17 +1,18 @@
+import asyncio
from typing import Any, Dict
-from disnake import Member, Role, User
-from disnake.ext import commands
-from disnake.ext.commands import Cog, Context
+from botcore.site_api import ResponseCodeError
+from discord import Member, Role, User
+from discord.ext import commands
+from discord.ext.commands import Cog, Context
from bot import constants
-from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.exts.backend.sync import _syncers
from bot.log import get_logger
-from bot.utils import scheduling
log = get_logger(__name__)
+MAX_ATTEMPTS = 3
class Sync(Cog):
@@ -19,9 +20,8 @@ class Sync(Cog):
def __init__(self, bot: Bot) -> None:
self.bot = bot
- scheduling.create_task(self.sync_guild(), event_loop=self.bot.loop)
- async def sync_guild(self) -> None:
+ async def cog_load(self) -> None:
"""Syncs the roles/users of the guild with the database."""
await self.bot.wait_until_guild_available()
@@ -29,6 +29,22 @@ class Sync(Cog):
if guild is None:
return
+ attempts = 0
+ while True:
+ attempts += 1
+ if guild.chunked:
+ log.info("Guild was found to be chunked after %d attempt(s).", attempts)
+ break
+
+ if attempts == MAX_ATTEMPTS:
+ log.info("Guild not chunked after %d attempts, calling chunk manually.", MAX_ATTEMPTS)
+ await guild.chunk()
+ break
+
+ log.info("Attempt %d/%d: Guild not yet chunked, checking again in 10s.", attempts, MAX_ATTEMPTS)
+ await asyncio.sleep(10)
+
+ log.info("Starting syncers.")
for syncer in (_syncers.RoleSyncer, _syncers.UserSyncer):
await syncer.sync(guild)
diff --git a/bot/exts/backend/sync/_syncers.py b/bot/exts/backend/sync/_syncers.py
index 48ee3c842..8976245e3 100644
--- a/bot/exts/backend/sync/_syncers.py
+++ b/bot/exts/backend/sync/_syncers.py
@@ -2,14 +2,14 @@ import abc
import typing as t
from collections import namedtuple
-from disnake import Guild
-from disnake.ext.commands import Context
+import discord.errors
+from botcore.site_api import ResponseCodeError
+from discord import Guild
+from discord.ext.commands import Context
from more_itertools import chunked
import bot
-from bot.api import ResponseCodeError
from bot.log import get_logger
-from bot.utils.members import get_or_fetch_member
log = get_logger(__name__)
@@ -154,10 +154,19 @@ class UserSyncer(Syncer):
def maybe_update(db_field: str, guild_value: t.Union[str, int]) -> None:
# Equalize DB user and guild user attributes.
- if db_user[db_field] != guild_value:
- updated_fields[db_field] = guild_value
-
- if guild_user := await get_or_fetch_member(guild, db_user["id"]):
+ if db_user[db_field] != guild_value: # noqa: B023
+ updated_fields[db_field] = guild_value # noqa: B023
+
+ guild_user = guild.get_member(db_user["id"])
+ if not guild_user and db_user["in_guild"]:
+ # The member was in the guild during the last sync.
+ # We try to fetch them to verify cache integrity.
+ try:
+ guild_user = await guild.fetch_member(db_user["id"])
+ except discord.errors.NotFound:
+ guild_user = None
+
+ if guild_user:
seen_guild_users.add(guild_user.id)
maybe_update("name", guild_user.name)
diff --git a/bot/exts/events/code_jams/__init__.py b/bot/exts/events/code_jams/__init__.py
index 16e81e365..2f858d1f9 100644
--- a/bot/exts/events/code_jams/__init__.py
+++ b/bot/exts/events/code_jams/__init__.py
@@ -1,8 +1,8 @@
from bot.bot import Bot
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the CodeJams cog."""
from bot.exts.events.code_jams._cog import CodeJams
- bot.add_cog(CodeJams(bot))
+ await bot.add_cog(CodeJams(bot))
diff --git a/bot/exts/events/code_jams/_channels.py b/bot/exts/events/code_jams/_channels.py
index fc4693bd4..e8cf5f7bf 100644
--- a/bot/exts/events/code_jams/_channels.py
+++ b/bot/exts/events/code_jams/_channels.py
@@ -1,6 +1,6 @@
import typing as t
-import disnake
+import discord
from bot.constants import Categories, Channels, Roles
from bot.log import get_logger
@@ -11,7 +11,7 @@ MAX_CHANNELS = 50
CATEGORY_NAME = "Code Jam"
-async def _get_category(guild: disnake.Guild) -> disnake.CategoryChannel:
+async def _get_category(guild: discord.Guild) -> discord.CategoryChannel:
"""
Return a code jam category.
@@ -24,13 +24,13 @@ async def _get_category(guild: disnake.Guild) -> disnake.CategoryChannel:
return await _create_category(guild)
-async def _create_category(guild: disnake.Guild) -> disnake.CategoryChannel:
+async def _create_category(guild: discord.Guild) -> discord.CategoryChannel:
"""Create a new code jam category and return it."""
log.info("Creating a new code jam category.")
category_overwrites = {
- guild.default_role: disnake.PermissionOverwrite(read_messages=False),
- guild.me: disnake.PermissionOverwrite(read_messages=True)
+ guild.default_role: discord.PermissionOverwrite(read_messages=False),
+ guild.me: discord.PermissionOverwrite(read_messages=True)
}
category = await guild.create_category_channel(
@@ -47,17 +47,17 @@ async def _create_category(guild: disnake.Guild) -> disnake.CategoryChannel:
def _get_overwrites(
- members: list[tuple[disnake.Member, bool]],
- guild: disnake.Guild,
-) -> dict[t.Union[disnake.Member, disnake.Role], disnake.PermissionOverwrite]:
+ members: list[tuple[discord.Member, bool]],
+ guild: discord.Guild,
+) -> dict[t.Union[discord.Member, discord.Role], discord.PermissionOverwrite]:
"""Get code jam team channels permission overwrites."""
team_channel_overwrites = {
- guild.default_role: disnake.PermissionOverwrite(read_messages=False),
- guild.get_role(Roles.code_jam_event_team): disnake.PermissionOverwrite(read_messages=True)
+ guild.default_role: discord.PermissionOverwrite(read_messages=False),
+ guild.get_role(Roles.code_jam_event_team): discord.PermissionOverwrite(read_messages=True)
}
for member, _ in members:
- team_channel_overwrites[member] = disnake.PermissionOverwrite(
+ team_channel_overwrites[member] = discord.PermissionOverwrite(
read_messages=True
)
@@ -65,10 +65,10 @@ def _get_overwrites(
async def create_team_channel(
- guild: disnake.Guild,
+ guild: discord.Guild,
team_name: str,
- members: list[tuple[disnake.Member, bool]],
- team_leaders: disnake.Role
+ members: list[tuple[discord.Member, bool]],
+ team_leaders: discord.Role
) -> None:
"""Create the team's text channel."""
await _add_team_leader_roles(members, team_leaders)
@@ -84,29 +84,29 @@ async def create_team_channel(
)
-async def create_team_leader_channel(guild: disnake.Guild, team_leaders: disnake.Role) -> None:
+async def create_team_leader_channel(guild: discord.Guild, team_leaders: discord.Role) -> None:
"""Create the Team Leader Chat channel for the Code Jam team leaders."""
- category: disnake.CategoryChannel = guild.get_channel(Categories.summer_code_jam)
+ category: discord.CategoryChannel = guild.get_channel(Categories.summer_code_jam)
team_leaders_chat = await category.create_text_channel(
name="team-leaders-chat",
overwrites={
- guild.default_role: disnake.PermissionOverwrite(read_messages=False),
- team_leaders: disnake.PermissionOverwrite(read_messages=True)
+ guild.default_role: discord.PermissionOverwrite(read_messages=False),
+ team_leaders: discord.PermissionOverwrite(read_messages=True)
}
)
await _send_status_update(guild, f"Created {team_leaders_chat.mention} in the {category} category.")
-async def _send_status_update(guild: disnake.Guild, message: str) -> None:
+async def _send_status_update(guild: discord.Guild, message: str) -> None:
"""Inform the events lead with a status update when the command is ran."""
- channel: disnake.TextChannel = guild.get_channel(Channels.code_jam_planning)
+ channel: discord.TextChannel = guild.get_channel(Channels.code_jam_planning)
await channel.send(f"<@&{Roles.events_lead}>\n\n{message}")
-async def _add_team_leader_roles(members: list[tuple[disnake.Member, bool]], team_leaders: disnake.Role) -> None:
+async def _add_team_leader_roles(members: list[tuple[discord.Member, bool]], team_leaders: discord.Role) -> None:
"""Assign the team leader role to the team leaders."""
for member, is_leader in members:
if is_leader:
diff --git a/bot/exts/events/code_jams/_cog.py b/bot/exts/events/code_jams/_cog.py
index 5cb11826d..86c357863 100644
--- a/bot/exts/events/code_jams/_cog.py
+++ b/bot/exts/events/code_jams/_cog.py
@@ -3,16 +3,16 @@ import csv
import typing as t
from collections import defaultdict
-import disnake
-from disnake import Colour, Embed, Guild, Member
-from disnake.ext import commands
+import discord
+from discord import Colour, Embed, Guild, Member
+from discord.ext import commands
from bot.bot import Bot
from bot.constants import Emojis, Roles
from bot.exts.events.code_jams import _channels
from bot.log import get_logger
from bot.utils.members import get_or_fetch_member
-from bot.utils.services import send_to_paste_service
+from bot.utils.services import PasteTooLongError, PasteUploadError, send_to_paste_service
log = get_logger(__name__)
@@ -85,7 +85,7 @@ class CodeJams(commands.Cog):
A confirmation message is displayed with the categories and channels to be deleted.. Pressing the added reaction
deletes those channels.
"""
- def predicate_deletion_emoji_reaction(reaction: disnake.Reaction, user: disnake.User) -> bool:
+ def predicate_deletion_emoji_reaction(reaction: discord.Reaction, user: discord.User) -> bool:
"""Return True if the reaction :boom: was added by the context message author on this message."""
return (
reaction.message.id == message.id
@@ -124,14 +124,14 @@ class CodeJams(commands.Cog):
@staticmethod
async def _build_confirmation_message(
- categories: dict[disnake.CategoryChannel, list[disnake.abc.GuildChannel]]
+ categories: dict[discord.CategoryChannel, list[discord.abc.GuildChannel]]
) -> str:
"""Sends details of the channels to be deleted to the pasting service, and formats the confirmation message."""
- def channel_repr(channel: disnake.abc.GuildChannel) -> str:
+ def channel_repr(channel: discord.abc.GuildChannel) -> str:
"""Formats the channel name and ID and a readable format."""
return f"{channel.name} ({channel.id})"
- def format_category_info(category: disnake.CategoryChannel, channels: list[disnake.abc.GuildChannel]) -> str:
+ def format_category_info(category: discord.CategoryChannel, channels: list[discord.abc.GuildChannel]) -> str:
"""Displays the category and the channels within it in a readable format."""
return f"{channel_repr(category)}:\n" + "\n".join(" - " + channel_repr(channel) for channel in channels)
@@ -139,11 +139,14 @@ class CodeJams(commands.Cog):
format_category_info(category, channels) for category, channels in categories.items()
)
- url = await send_to_paste_service(deletion_details)
- if url is None:
- url = "**Unable to send deletion details to the pasting service.**"
+ try:
+ message = await send_to_paste_service(deletion_details)
+ except PasteTooLongError:
+ message = "**Too long to upload to paste service.**"
+ except PasteUploadError:
+ message = "**Failed to upload to paste service.**"
- return f"Are you sure you want to delete all code jam channels?\n\nThe channels to be deleted: {url}"
+ return f"Are you sure you want to delete all code jam channels?\n\nThe channels to be deleted: {message}"
@codejam.command()
@commands.has_any_role(Roles.admins, Roles.code_jam_event_team)
@@ -187,7 +190,7 @@ class CodeJams(commands.Cog):
await old_team_channel.set_permissions(member, overwrite=None, reason=f"Participant moved to {new_team_name}")
await new_team_channel.set_permissions(
member,
- overwrite=disnake.PermissionOverwrite(read_messages=True),
+ overwrite=discord.PermissionOverwrite(read_messages=True),
reason=f"Participant moved from {old_team_channel.name}"
)
@@ -212,16 +215,16 @@ class CodeJams(commands.Cog):
await ctx.send(f"Removed the participant from `{self.team_name(channel)}`.")
@staticmethod
- def jam_categories(guild: Guild) -> list[disnake.CategoryChannel]:
+ def jam_categories(guild: Guild) -> list[discord.CategoryChannel]:
"""Get all the code jam team categories."""
return [category for category in guild.categories if category.name == _channels.CATEGORY_NAME]
@staticmethod
- def team_channel(guild: Guild, criterion: t.Union[str, Member]) -> t.Optional[disnake.TextChannel]:
+ def team_channel(guild: Guild, criterion: t.Union[str, Member]) -> t.Optional[discord.TextChannel]:
"""Get a team channel through either a participant or the team name."""
for category in CodeJams.jam_categories(guild):
for channel in category.channels:
- if isinstance(channel, disnake.TextChannel):
+ if isinstance(channel, discord.TextChannel):
if (
# If it's a string.
criterion == channel.name or criterion == CodeJams.team_name(channel)
@@ -231,6 +234,6 @@ class CodeJams(commands.Cog):
return channel
@staticmethod
- def team_name(channel: disnake.TextChannel) -> str:
+ def team_name(channel: discord.TextChannel) -> str:
"""Retrieves the team name from the given channel."""
return channel.name.replace("-", " ").title()
diff --git a/bot/exts/filters/antimalware.py b/bot/exts/filters/antimalware.py
index e55ece910..ff39700a6 100644
--- a/bot/exts/filters/antimalware.py
+++ b/bot/exts/filters/antimalware.py
@@ -1,8 +1,8 @@
import typing as t
from os.path import splitext
-from disnake import Embed, Message, NotFound
-from disnake.ext.commands import Cog
+from discord import Embed, Message, NotFound
+from discord.ext.commands import Cog
from bot.bot import Bot
from bot.constants import Channels, Filter, URLs
@@ -101,6 +101,6 @@ class AntiMalware(Cog):
log.info(f"Tried to delete message `{message.id}`, but message could not be found.")
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the AntiMalware cog."""
- bot.add_cog(AntiMalware(bot))
+ await bot.add_cog(AntiMalware(bot))
diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py
index c887cf5fc..842aab384 100644
--- a/bot/exts/filters/antispam.py
+++ b/bot/exts/filters/antispam.py
@@ -8,8 +8,9 @@ from operator import attrgetter, itemgetter
from typing import Dict, Iterable, List, Set
import arrow
-from disnake import Colour, Member, Message, NotFound, Object, TextChannel
-from disnake.ext.commands import Cog
+from botcore.utils import scheduling
+from discord import Colour, Member, Message, MessageType, NotFound, Object, TextChannel
+from discord.ext.commands import Cog
from bot import rules
from bot.bot import Bot
@@ -20,7 +21,7 @@ from bot.converters import Duration
from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
-from bot.utils import lock, scheduling
+from bot.utils import lock
from bot.utils.message_cache import MessageCache
from bot.utils.messages import format_user, send_attachments
@@ -134,18 +135,12 @@ class AntiSpam(Cog):
self.max_interval = max_interval_config['interval']
self.cache = MessageCache(AntiSpamConfig.cache_size, newest_first=True)
- scheduling.create_task(
- self.alert_on_validation_error(),
- name="AntiSpam.alert_on_validation_error",
- event_loop=self.bot.loop,
- )
-
@property
def mod_log(self) -> ModLog:
"""Allows for easy access of the ModLog cog."""
return self.bot.get_cog("ModLog")
- async def alert_on_validation_error(self) -> None:
+ async def cog_load(self) -> None:
"""Unloads the cog and alerts admins if configuration validation failed."""
await self.bot.wait_until_guild_available()
if self.validation_errors:
@@ -174,6 +169,7 @@ class AntiSpam(Cog):
or (getattr(message.channel, "category", None) and message.channel.category.name == JAM_CATEGORY_NAME)
or (message.channel.id in Filter.channel_whitelist and not DEBUG_MODE)
or (any(role.id in Filter.role_whitelist for role in message.author.roles) and not DEBUG_MODE)
+ or message.type == MessageType.auto_moderation_action
):
return
@@ -189,13 +185,13 @@ class AntiSpam(Cog):
# Create a list of messages that were sent in the interval that the rule cares about.
latest_interesting_stamp = arrow.utcnow() - timedelta(seconds=rule_config['interval'])
messages_for_rule = list(
- takewhile(lambda msg: msg.created_at > latest_interesting_stamp, relevant_messages)
+ takewhile(lambda msg: msg.created_at > latest_interesting_stamp, relevant_messages) # noqa: B023
)
result = await rule_function(message, messages_for_rule, rule_config)
# If the rule returns `None`, that means the message didn't violate it.
- # If it doesn't, it returns a tuple in the form `(str, Iterable[disnake.Member])`
+ # If it doesn't, it returns a tuple in the form `(str, Iterable[discord.Member])`
# which contains the reason for why the message violated the rule and
# an iterable of all members that violated the rule.
if result is not None:
@@ -265,7 +261,7 @@ class AntiSpam(Cog):
# In the rare case where we found messages matching the
# spam filter across multiple channels, it is possible
# that a single channel will only contain a single message
- # to delete. If that should be the case, disnake will
+ # to delete. If that should be the case, discord.py will
# use the "delete single message" endpoint instead of the
# bulk delete endpoint, and the single message deletion
# endpoint will complain if you give it that does not exist.
@@ -322,7 +318,7 @@ def validate_config(rules_: Mapping = AntiSpamConfig.rules) -> Dict[str, str]:
return validation_errors
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Validate the AntiSpam configs and load the AntiSpam cog."""
validation_errors = validate_config()
- bot.add_cog(AntiSpam(bot, validation_errors))
+ await bot.add_cog(AntiSpam(bot, validation_errors))
diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py
index 05910973a..c643f9a84 100644
--- a/bot/exts/filters/filter_lists.py
+++ b/bot/exts/filters/filter_lists.py
@@ -1,17 +1,16 @@
import re
from typing import Optional
-from disnake import Colour, Embed
-from disnake.ext.commands import BadArgument, Cog, Context, IDConverter, group, has_any_role
+from botcore.site_api import ResponseCodeError
+from discord import Colour, Embed
+from discord.ext.commands import BadArgument, Cog, Context, IDConverter, group, has_any_role
from bot import constants
-from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.constants import Channels
from bot.converters import ValidDiscordServerInvite, ValidFilterListType
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling
log = get_logger(__name__)
@@ -30,9 +29,8 @@ class FilterLists(Cog):
def __init__(self, bot: Bot) -> None:
self.bot = bot
- scheduling.create_task(self._amend_docstrings(), event_loop=self.bot.loop)
- async def _amend_docstrings(self) -> None:
+ async def cog_load(self) -> None:
"""Add the valid FilterList types to the docstrings, so they'll appear in !help invocations."""
await self.bot.wait_until_guild_available()
@@ -57,17 +55,22 @@ class FilterLists(Cog):
"""Add an item to a filterlist."""
allow_type = "whitelist" if allowed else "blacklist"
- # If this is a server invite, we gotta validate it.
+ # If this is a guild invite, we gotta validate it.
if list_type == "GUILD_INVITE":
guild_data = await self._validate_guild_invite(ctx, content)
content = guild_data.get("id")
- # Unless the user has specified another comment, let's
- # use the server name as the comment so that the list
- # of guild IDs will be more easily readable when we
- # display it.
- if not comment:
- comment = guild_data.get("name")
+ # Some guild invites are autoban filters, which require the mod
+ # to set a comment which includes [autoban].
+ # Having the guild name in the comment is still useful when reviewing
+ # filter list, so prepend it to the set comment in case some mod forgets.
+ guild_name_part = f'Guild "{guild_data["name"]}"' if "name" in guild_data else None
+
+ comment = " - ".join(
+ comment_part
+ for comment_part in (guild_name_part, comment)
+ if comment_part
+ )
# If it's a file format, let's make sure it has a leading dot.
elif list_type == "FILE_FORMAT" and not content.startswith("."):
@@ -117,7 +120,8 @@ class FilterLists(Cog):
# If it is an autoban trigger we send a warning in #mod-meta
if comment and "[autoban]" in comment:
await self.bot.get_channel(Channels.mod_meta).send(
- f":warning: Heads-up! The new filter `{content}` (`{comment}`) will automatically ban users."
+ f":warning: Heads-up! The new `{list_type}` filter "
+ f"`{content}` (`{comment}`) will automatically ban users."
)
# Insert the item into the cache
@@ -288,6 +292,6 @@ class FilterLists(Cog):
return await has_any_role(*constants.MODERATION_ROLES).predicate(ctx)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the FilterLists cog."""
- bot.add_cog(FilterLists(bot))
+ await bot.add_cog(FilterLists(bot))
diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py
index 828e8b262..e4df0b1fd 100644
--- a/bot/exts/filters/filtering.py
+++ b/bot/exts/filters/filtering.py
@@ -1,28 +1,28 @@
import asyncio
import re
import unicodedata
+import urllib.parse
from datetime import timedelta
from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
import arrow
import dateutil.parser
-import disnake.errors
import regex
import tldextract
from async_rediscache import RedisCache
+from botcore.site_api import ResponseCodeError
+from botcore.utils import scheduling
from botcore.utils.regex import DISCORD_INVITE
from dateutil.relativedelta import relativedelta
-from disnake import Colour, HTTPException, Member, Message, NotFound, TextChannel
-from disnake.ext.commands import Cog
-from disnake.utils import escape_markdown
+from discord import ChannelType, Colour, Embed, Forbidden, HTTPException, Member, Message, NotFound, TextChannel
+from discord.ext.commands import Cog
+from discord.utils import escape_markdown
-from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, Colours, Filter, Guild, Icons, URLs
+from bot.constants import Bot as BotConfig, Channels, Colours, Filter, Guild, Icons, URLs
from bot.exts.events.code_jams._channels import CATEGORY_NAME as JAM_CATEGORY_NAME
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
-from bot.utils import scheduling
from bot.utils.messages import format_user
log = get_logger(__name__)
@@ -63,14 +63,14 @@ AUTO_BAN_REASON = (
)
AUTO_BAN_DURATION = timedelta(days=4)
-FilterMatch = Union[re.Match, dict, bool, List[disnake.Embed]]
+FilterMatch = Union[re.Match, dict, bool, List[Embed]]
class Stats(NamedTuple):
"""Additional stats on a triggered filter to append to a mod log."""
message_content: str
- additional_embeds: Optional[List[disnake.Embed]]
+ additional_embeds: Optional[List[Embed]]
class Filtering(Cog):
@@ -150,9 +150,7 @@ class Filtering(Cog):
},
}
- scheduling.create_task(self.reschedule_offensive_msg_deletion(), event_loop=self.bot.loop)
-
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Cancel scheduled tasks."""
self.scheduler.cancel_all()
@@ -207,6 +205,11 @@ class Filtering(Cog):
delta = relativedelta(after.edited_at, before.edited_at).microseconds
await self._filter_message(after, delta)
+ @Cog.listener()
+ async def on_voice_state_update(self, member: Member, *_) -> None:
+ """Checks for bad words in usernames when users join, switch or leave a voice channel."""
+ await self.check_bad_words_in_name(member)
+
def get_name_match(self, name: str) -> Optional[re.Match]:
"""Check bad words from passed string (name). Return the first match found."""
normalised_name = unicodedata.normalize("NFKC", name)
@@ -262,15 +265,16 @@ class Filtering(Cog):
title="Username filtering alert",
text=log_string,
channel_id=Channels.mod_alerts,
- thumbnail=member.display_avatar.url
+ thumbnail=member.display_avatar.url,
+ ping_everyone=True
)
# Update time when alert sent
await self.name_alerts.set(member.id, arrow.utcnow().timestamp())
- async def filter_eval(self, result: str, msg: Message) -> bool:
+ async def filter_snekbox_output(self, result: str, msg: Message) -> bool:
"""
- Filter the result of an !eval to see if it violates any of our rules, and then respond accordingly.
+ Filter the result of a snekbox command to see if it violates any of our rules, and then respond accordingly.
Also requires the original message, to check whether to filter and for mod logs.
Returns whether a filter was triggered or not.
@@ -339,7 +343,7 @@ class Filtering(Cog):
match = result
if match:
- is_private = msg.channel.type is disnake.ChannelType.private
+ is_private = msg.channel.type is ChannelType.private
# If this is a filter (not a watchlist) and not in a DM, delete the message.
if _filter["type"] == "filter" and not is_private:
@@ -354,7 +358,7 @@ class Filtering(Cog):
# In addition, to avoid sending two notifications to the user, the
# logs, and mod_alert, we return if the message no longer exists.
await msg.delete()
- except disnake.errors.NotFound:
+ except NotFound:
return
# Notify the user if the filter specifies
@@ -383,10 +387,20 @@ class Filtering(Cog):
log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}")
stats = self._add_stats(filter_name, match, msg.content)
- await self._send_log(filter_name, _filter, msg, stats, reason)
- # If the filter reason contains `[autoban]`, we want to auto-ban the user
- if reason and "[autoban]" in reason.lower():
+ # If the filter reason contains `[autoban]`, we want to auto-ban the user.
+ # Also pass this to _send_log so mods are not pinged filter matches that are auto-actioned
+ autoban = reason and "[autoban]" in reason.lower()
+ if not autoban and filter_name == "filter_invites" and isinstance(result, dict):
+ autoban = any(
+ "[autoban]" in invite_info["reason"].lower()
+ for invite_info in result.values()
+ if invite_info.get("reason")
+ )
+
+ await self._send_log(filter_name, _filter, msg, stats, reason, autoban=autoban)
+
+ if autoban:
# Create a new context, with the author as is the bot, and the channel as #mod-alerts.
# This sends the ban confirmation directly under watchlist trigger embed, to inform
# mods that the user was auto-banned for the message.
@@ -399,7 +413,7 @@ class Filtering(Cog):
await context.invoke(
context.command,
msg.author,
- arrow.utcnow() + AUTO_BAN_DURATION,
+ (arrow.utcnow() + AUTO_BAN_DURATION).datetime,
reason=AUTO_BAN_REASON
)
@@ -409,14 +423,15 @@ class Filtering(Cog):
self,
filter_name: str,
_filter: Dict[str, Any],
- msg: disnake.Message,
+ msg: Message,
stats: Stats,
reason: Optional[str] = None,
*,
is_eval: bool = False,
+ autoban: bool = False,
) -> None:
"""Send a mod log for a triggered filter."""
- if msg.channel.type is disnake.ChannelType.private:
+ if msg.channel.type is ChannelType.private:
channel_str = "via DM"
ping_everyone = False
else:
@@ -427,11 +442,11 @@ class Filtering(Cog):
content = str(msg.author.id) # quality-of-life improvement for mobile moderators
# If we are going to autoban, we don't want to ping and don't need the user ID
- if reason and "[autoban]" in reason:
+ if autoban:
ping_everyone = False
content = None
- eval_msg = "using !eval " if is_eval else ""
+ eval_msg = f"using {BotConfig.prefix}eval " if is_eval else ""
footer = f"Reason: {reason}" if reason else None
message = (
f"The {filter_name} {_filter['type']} was triggered by {format_user(msg.author)} "
@@ -478,7 +493,7 @@ class Filtering(Cog):
additional_embeds = []
for _, data in match.items():
reason = f"Reason: {data['reason']} | " if data.get('reason') else ""
- embed = disnake.Embed(description=(
+ embed = Embed(description=(
f"**Members:**\n{data['members']}\n"
f"**Active:**\n{data['active']}"
))
@@ -562,6 +577,7 @@ class Filtering(Cog):
If any are detected, a dictionary of invite data is returned, with a key per invite.
If none are detected, False is returned.
+ If we are unable to process an invite, True is returned.
Attempts to catch some of common ways to try to cheat the system.
"""
@@ -574,6 +590,7 @@ class Filtering(Cog):
invites = [m.group("invite") for m in DISCORD_INVITE.finditer(text)]
invite_data = dict()
for invite in invites:
+ invite = urllib.parse.quote_plus(invite.rstrip("/"))
if invite in invite_data:
continue
@@ -626,7 +643,7 @@ class Filtering(Cog):
return invite_data if invite_data else False
@staticmethod
- async def _has_rich_embed(msg: Message) -> Union[bool, List[disnake.Embed]]:
+ async def _has_rich_embed(msg: Message) -> Union[bool, List[Embed]]:
"""Determines if `msg` contains any rich embeds not auto-generated from a URL."""
if msg.embeds:
for embed in msg.embeds:
@@ -662,7 +679,7 @@ class Filtering(Cog):
"""
try:
await filtered_member.send(reason)
- except disnake.errors.Forbidden:
+ except Forbidden:
await channel.send(f"{filtered_member.mention} {reason}")
def schedule_msg_delete(self, msg: dict) -> None:
@@ -670,7 +687,7 @@ class Filtering(Cog):
delete_at = dateutil.parser.isoparse(msg['delete_date'])
self.scheduler.schedule_at(delete_at, msg['id'], self.delete_offensive_msg(msg))
- async def reschedule_offensive_msg_deletion(self) -> None:
+ async def cog_load(self) -> None:
"""Get all the pending message deletion from the API and reschedule them."""
await self.bot.wait_until_ready()
response = await self.bot.api_client.get('bot/offensive-messages',)
@@ -713,6 +730,6 @@ class Filtering(Cog):
return INVISIBLE_RE.sub("", no_zalgo)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Filtering cog."""
- bot.add_cog(Filtering(bot))
+ await bot.add_cog(Filtering(bot))
diff --git a/bot/exts/filters/security.py b/bot/exts/filters/security.py
index bbb15542f..27e4d9752 100644
--- a/bot/exts/filters/security.py
+++ b/bot/exts/filters/security.py
@@ -1,4 +1,4 @@
-from disnake.ext.commands import Cog, Context, NoPrivateMessage
+from discord.ext.commands import Cog, Context, NoPrivateMessage
from bot.bot import Bot
from bot.log import get_logger
@@ -25,6 +25,6 @@ class Security(Cog):
return True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Security cog."""
- bot.add_cog(Security(bot))
+ await bot.add_cog(Security(bot))
diff --git a/bot/exts/filters/token_remover.py b/bot/exts/filters/token_remover.py
index da42bb0aa..a0d5aa7b6 100644
--- a/bot/exts/filters/token_remover.py
+++ b/bot/exts/filters/token_remover.py
@@ -1,10 +1,9 @@
import base64
-import binascii
import re
import typing as t
-from disnake import Colour, Message, NotFound
-from disnake.ext.commands import Cog
+from discord import Colour, Message, NotFound
+from discord.ext.commands import Cog
from bot import utils
from bot.bot import Bot
@@ -53,7 +52,7 @@ class Token(t.NamedTuple):
class TokenRemover(Cog):
- """Scans messages for potential Discord bot tokens and removes them."""
+ """Scans messages for potential discord.py bot tokens and removes them."""
def __init__(self, bot: Bot):
self.bot = bot
@@ -182,7 +181,7 @@ class TokenRemover(Cog):
# that means it's not a valid user id.
return None
return int(string)
- except (binascii.Error, ValueError):
+ except ValueError:
return None
@staticmethod
@@ -198,7 +197,7 @@ class TokenRemover(Cog):
try:
decoded_bytes = base64.urlsafe_b64decode(b64_content)
timestamp = int.from_bytes(decoded_bytes, byteorder="big")
- except (binascii.Error, ValueError) as e:
+ except ValueError as e:
log.debug(f"Failed to decode token timestamp '{b64_content}': {e}")
return False
@@ -229,6 +228,6 @@ class TokenRemover(Cog):
return True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the TokenRemover cog."""
- bot.add_cog(TokenRemover(bot))
+ await bot.add_cog(TokenRemover(bot))
diff --git a/bot/exts/filters/webhook_remover.py b/bot/exts/filters/webhook_remover.py
index a5d51700c..b42613804 100644
--- a/bot/exts/filters/webhook_remover.py
+++ b/bot/exts/filters/webhook_remover.py
@@ -1,7 +1,7 @@
import re
-from disnake import Colour, Message, NotFound
-from disnake.ext.commands import Cog
+from discord import Colour, Message, NotFound
+from discord.ext.commands import Cog
from bot.bot import Bot
from bot.constants import Channels, Colours, Event, Icons
@@ -89,6 +89,6 @@ class WebhookRemover(Cog):
await self.on_message(after)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load `WebhookRemover` cog."""
- bot.add_cog(WebhookRemover(bot))
+ await bot.add_cog(WebhookRemover(bot))
diff --git a/bot/exts/fun/duck_pond.py b/bot/exts/fun/duck_pond.py
index 55196cd65..1815e54f2 100644
--- a/bot/exts/fun/duck_pond.py
+++ b/bot/exts/fun/duck_pond.py
@@ -1,15 +1,14 @@
import asyncio
from typing import Union
-import disnake
-from disnake import Color, Embed, Message, RawReactionActionEvent, TextChannel, errors
-from disnake.ext.commands import Cog, Context, command
+import discord
+from discord import Color, Embed, Message, RawReactionActionEvent, errors
+from discord.ext.commands import Cog, Context, command
from bot import constants
from bot.bot import Bot
from bot.converters import MemberOrUser
from bot.log import get_logger
-from bot.utils import scheduling
from bot.utils.checks import has_any_role
from bot.utils.messages import count_unique_users_reaction, send_attachments
from bot.utils.webhooks import send_webhook
@@ -25,16 +24,15 @@ class DuckPond(Cog):
self.webhook_id = constants.Webhooks.duck_pond
self.webhook = None
self.ducked_messages = []
- scheduling.create_task(self.fetch_webhook(), event_loop=self.bot.loop)
self.relay_lock = None
- async def fetch_webhook(self) -> None:
+ async def cog_load(self) -> None:
"""Fetches the webhook object, so we can post to it."""
await self.bot.wait_until_guild_available()
try:
self.webhook = await self.bot.fetch_webhook(self.webhook_id)
- except disnake.HTTPException:
+ except discord.HTTPException:
log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`")
@staticmethod
@@ -46,17 +44,6 @@ class DuckPond(Cog):
return True
return False
- @staticmethod
- def is_helper_viewable(channel: TextChannel) -> bool:
- """Check if helpers can view a specific channel."""
- guild = channel.guild
- helper_role = guild.get_role(constants.Roles.helpers)
- # check channel overwrites for both the Helper role and @everyone and
- # return True for channels that they have permissions to view.
- helper_overwrites = channel.overwrites_for(helper_role)
- default_overwrites = channel.overwrites_for(guild.default_role)
- return default_overwrites.view_channel is None or helper_overwrites.view_channel is True
-
async def has_green_checkmark(self, message: Message) -> bool:
"""Check if the message has a green checkmark reaction."""
for reaction in message.reactions:
@@ -67,7 +54,7 @@ class DuckPond(Cog):
return False
@staticmethod
- def _is_duck_emoji(emoji: Union[str, disnake.PartialEmoji, disnake.Emoji]) -> bool:
+ def _is_duck_emoji(emoji: Union[str, discord.PartialEmoji, discord.Emoji]) -> bool:
"""Check if the emoji is a valid duck emoji."""
if isinstance(emoji, str):
return emoji == "🦆"
@@ -111,7 +98,7 @@ class DuckPond(Cog):
username=message.author.display_name,
avatar_url=message.author.display_avatar.url
)
- except disnake.HTTPException:
+ except discord.HTTPException:
log.exception("Failed to send an attachment to the webhook")
async def locked_relay(self, message: Message) -> bool:
@@ -133,7 +120,7 @@ class DuckPond(Cog):
await message.add_reaction("✅")
return True
- def _payload_has_duckpond_emoji(self, emoji: disnake.PartialEmoji) -> bool:
+ def _payload_has_duckpond_emoji(self, emoji: discord.PartialEmoji) -> bool:
"""Test if the RawReactionActionEvent payload contains a duckpond emoji."""
if emoji.is_unicode_emoji():
# For unicode PartialEmojis, the `name` attribute is just the string
@@ -165,20 +152,23 @@ class DuckPond(Cog):
if not self._payload_has_duckpond_emoji(payload.emoji):
return
- channel = disnake.utils.get(self.bot.get_all_channels(), id=payload.channel_id)
+ await self.bot.wait_until_guild_available()
+ guild = self.bot.get_guild(payload.guild_id)
+ channel = guild.get_channel_or_thread(payload.channel_id)
if channel is None:
return
# Was the message sent in a channel Helpers can see?
- if not self.is_helper_viewable(channel):
+ helper_role = guild.get_role(constants.Roles.helpers)
+ if not channel.permissions_for(helper_role).view_channel:
return
try:
message = await channel.fetch_message(payload.message_id)
- except disnake.NotFound:
+ except discord.NotFound:
return # Message was deleted.
- member = disnake.utils.get(message.guild.members, id=payload.user_id)
+ member = discord.utils.get(message.guild.members, id=payload.user_id)
if not member:
return # Member left or wasn't in the cache.
@@ -205,7 +195,7 @@ class DuckPond(Cog):
if payload.guild_id != constants.Guild.id:
return
- channel = disnake.utils.get(self.bot.get_all_channels(), id=payload.channel_id)
+ channel = discord.utils.get(self.bot.get_all_channels(), id=payload.channel_id)
if channel is None:
return
@@ -226,6 +216,6 @@ class DuckPond(Cog):
await ctx.message.add_reaction("❌")
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the DuckPond cog."""
- bot.add_cog(DuckPond(bot))
+ await bot.add_cog(DuckPond(bot))
diff --git a/bot/exts/fun/off_topic_names.py b/bot/exts/fun/off_topic_names.py
index d49f71320..5c5fa1dd5 100644
--- a/bot/exts/fun/off_topic_names.py
+++ b/bot/exts/fun/off_topic_names.py
@@ -1,70 +1,97 @@
+import asyncio
+import datetime
import difflib
-from datetime import timedelta
+import json
+import random
+from functools import partial
+from typing import Optional
-import arrow
-from disnake import Colour, Embed
-from disnake.ext.commands import Cog, Context, group, has_any_role
-from disnake.utils import sleep_until
+from botcore.site_api import ResponseCodeError
+from discord import ButtonStyle, Colour, Embed, Interaction
+from discord.ext import tasks
+from discord.ext.commands import Cog, Context, group, has_any_role
+from discord.ui import Button, View
-from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, MODERATION_ROLES
+from bot.constants import Bot as BotConfig, Channels, MODERATION_ROLES, NEGATIVE_REPLIES
from bot.converters import OffTopicName
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling
CHANNELS = (Channels.off_topic_0, Channels.off_topic_1, Channels.off_topic_2)
+
+# In case, the off-topic channel name format is modified.
+OTN_FORMATTER = "ot{number}-{name}"
+OT_NUMBER_INDEX = 2
+NAME_START_INDEX = 4
+
log = get_logger(__name__)
-async def update_names(bot: Bot) -> None:
- """Background updater task that performs the daily channel name update."""
- while True:
- # Since we truncate the compute timedelta to seconds, we add one second to ensure
- # we go past midnight in the `seconds_to_sleep` set below.
- today_at_midnight = arrow.utcnow().replace(microsecond=0, second=0, minute=0, hour=0)
- next_midnight = today_at_midnight + timedelta(days=1)
- await sleep_until(next_midnight.datetime)
+class OffTopicNames(Cog):
+ """Commands related to managing the off-topic category channel names."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+
+ # What errors to handle and restart the task using an exponential back-off algorithm
+ self.update_names.add_exception_type(ResponseCodeError)
+ self.update_names.start()
+
+ async def cog_unload(self) -> None:
+ """
+ Gracefully stop the update_names task.
+
+ Clear the exception types first, so that if the task hits any errors it is not re-attempted.
+ """
+ self.update_names.clear_exception_types()
+ self.update_names.stop()
+
+ @tasks.loop(time=datetime.time(), reconnect=True)
+ async def update_names(self) -> None:
+ """Background updater task that performs the daily channel name update."""
+ await self.bot.wait_until_guild_available()
try:
- channel_0_name, channel_1_name, channel_2_name = await bot.api_client.get(
+ channel_0_name, channel_1_name, channel_2_name = await self.bot.api_client.get(
'bot/off-topic-channel-names', params={'random_items': 3}
)
except ResponseCodeError as e:
- log.error(f"Failed to get new off topic channel names: code {e.response.status}")
- continue
- channel_0, channel_1, channel_2 = (bot.get_channel(channel_id) for channel_id in CHANNELS)
+ log.error(f"Failed to get new off-topic channel names: code {e.response.status}")
+ raise
+
+ channel_0, channel_1, channel_2 = (self.bot.get_channel(channel_id) for channel_id in CHANNELS)
+
+ await channel_0.edit(name=OTN_FORMATTER.format(number=0, name=channel_0_name))
+ await channel_1.edit(name=OTN_FORMATTER.format(number=1, name=channel_1_name))
+ await channel_2.edit(name=OTN_FORMATTER.format(number=2, name=channel_2_name))
- await channel_0.edit(name=f'ot0-{channel_0_name}')
- await channel_1.edit(name=f'ot1-{channel_1_name}')
- await channel_2.edit(name=f'ot2-{channel_2_name}')
log.debug(
"Updated off-topic channel names to"
f" {channel_0_name}, {channel_1_name} and {channel_2_name}"
)
+ async def toggle_ot_name_activity(self, ctx: Context, name: str, active: bool) -> None:
+ """Toggle active attribute for an off-topic name."""
+ data = {
+ "active": active
+ }
+ await self.bot.api_client.patch(f"bot/off-topic-channel-names/{name}", data=data)
+ await ctx.send(f"Off-topic name `{name}` has been {'activated' if active else 'deactivated'}.")
-class OffTopicNames(Cog):
- """Commands related to managing the off-topic category channel names."""
-
- def __init__(self, bot: Bot):
- self.bot = bot
- self.updater_task = None
-
- scheduling.create_task(self.init_offtopic_updater(), event_loop=self.bot.loop)
-
- def cog_unload(self) -> None:
- """Cancel any running updater tasks on cog unload."""
- if self.updater_task is not None:
- self.updater_task.cancel()
-
- async def init_offtopic_updater(self) -> None:
- """Start off-topic channel updating event loop if it hasn't already started."""
- await self.bot.wait_until_guild_available()
- if self.updater_task is None:
- coro = update_names(self.bot)
- self.updater_task = scheduling.create_task(coro, event_loop=self.bot.loop)
+ async def list_ot_names(self, ctx: Context, active: bool = True) -> None:
+ """Send an embed containing active/deactivated off-topic channel names."""
+ result = await self.bot.api_client.get('bot/off-topic-channel-names', params={'active': json.dumps(active)})
+ lines = sorted(f"• {name}" for name in result)
+ embed = Embed(
+ title=f"{'Active' if active else 'Deactivated'} off-topic names (`{len(result)}` total)",
+ colour=Colour.blue()
+ )
+ if result:
+ await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False)
+ else:
+ embed.description = "Hmmm, seems like there's nothing here yet."
+ await ctx.send(embed=embed)
@group(name='otname', aliases=('otnames', 'otn'), invoke_without_command=True)
@has_any_role(*MODERATION_ROLES)
@@ -90,7 +117,7 @@ class OffTopicNames(Cog):
)
await ctx.send(
f":x: The channel name `{name}` is too similar to `{match}`, and thus was not added. "
- "Use `!otn forceadd` to override this check."
+ f"Use `{BotConfig.prefix}otn forceadd` to override this check."
)
else:
await self._add_name(ctx, name)
@@ -117,7 +144,111 @@ class OffTopicNames(Cog):
log.info(f"{ctx.author} deleted the off-topic channel name '{name}'")
await ctx.send(f":ok_hand: Removed `{name}` from the names list.")
- @otname_group.command(name='list', aliases=('l',))
+ @otname_group.command(name='activate', aliases=('whitelist',))
+ @has_any_role(*MODERATION_ROLES)
+ async def activate_ot_name(self, ctx: Context, name: OffTopicName) -> None:
+ """Activate an existing off-topic name."""
+ await self.toggle_ot_name_activity(ctx, name, True)
+
+ @otname_group.command(name='deactivate', aliases=('blacklist',))
+ @has_any_role(*MODERATION_ROLES)
+ async def de_activate_ot_name(self, ctx: Context, name: OffTopicName) -> None:
+ """Deactivate a specific off-topic name."""
+ await self.toggle_ot_name_activity(ctx, name, False)
+
+ @otname_group.command(name='reroll')
+ @has_any_role(*MODERATION_ROLES)
+ async def re_roll_command(self, ctx: Context, ot_channel_index: Optional[int] = None) -> None:
+ """
+ Re-roll an off-topic name for a specific off-topic channel and deactivate the current name.
+
+ ot_channel_index: [0, 1, 2, ...]
+ """
+ if ot_channel_index is not None:
+ try:
+ channel = self.bot.get_channel(CHANNELS[ot_channel_index])
+ except IndexError:
+ await ctx.send(f":x: No off-topic channel found with index {ot_channel_index}.")
+ return
+ elif ctx.channel.id in CHANNELS:
+ channel = ctx.channel
+
+ else:
+ await ctx.send("Please specify channel for which the off-topic name should be re-rolled.")
+ return
+
+ old_channel_name = channel.name
+ old_ot_name = old_channel_name[NAME_START_INDEX:] # ot1-name-of-ot -> name-of-ot
+
+ await self.de_activate_ot_name(ctx, old_ot_name)
+
+ response = await self.bot.api_client.get(
+ 'bot/off-topic-channel-names', params={'random_items': 1}
+ )
+ try:
+ new_channel_name = response[0]
+ except IndexError:
+ await ctx.send("Out of active off-topic names. Add new names to reroll.")
+ return
+
+ async def rename_channel() -> None:
+ """Rename off-topic channel and log events."""
+ await channel.edit(
+ name=OTN_FORMATTER.format(number=old_channel_name[OT_NUMBER_INDEX], name=new_channel_name)
+ )
+ log.info(
+ f"{ctx.author} Off-topic channel re-named from `{old_ot_name}` "
+ f"to `{new_channel_name}`."
+ )
+
+ await ctx.message.reply(
+ f":ok_hand: Off-topic channel re-named from `{old_ot_name}` "
+ f"to `{new_channel_name}`. "
+ )
+
+ try:
+ await asyncio.wait_for(rename_channel(), 3)
+ except asyncio.TimeoutError:
+ # Channel rename endpoint rate limited. The task was cancelled by asyncio.
+ btn_yes = Button(label="Yes", style=ButtonStyle.success)
+ btn_no = Button(label="No", style=ButtonStyle.danger)
+
+ embed = Embed(
+ title=random.choice(NEGATIVE_REPLIES),
+ description=(
+ "Re-naming the channel is being rate-limited. "
+ "Would you like to schedule an asyncio task to rename the channel within the current bot session ?"
+ ),
+ colour=Colour.blurple()
+ )
+
+ async def btn_call_back(schedule: bool, interaction: Interaction) -> None:
+ if ctx.author != interaction.user:
+ log.info("User is not author, skipping.")
+ return
+ message = interaction.message
+
+ embed.description = (
+ "Scheduled a channel re-name process within the current bot session."
+ if schedule
+ else
+ "Channel not re-named due to rate limit. Please try again later."
+ )
+ await message.edit(embed=embed, view=None)
+
+ if schedule:
+ await rename_channel()
+
+ btn_yes.callback = partial(btn_call_back, True)
+ btn_no.callback = partial(btn_call_back, False)
+
+ view = View()
+ view.add_item(btn_yes)
+ view.add_item(btn_no)
+
+ await ctx.message.reply(embed=embed, view=view)
+
+ @otname_group.group(name='list', aliases=('l',), invoke_without_command=True)
@has_any_role(*MODERATION_ROLES)
async def list_command(self, ctx: Context) -> None:
"""
@@ -125,17 +256,19 @@ class OffTopicNames(Cog):
Restricted to Moderator and above to not spoil the surprise.
"""
- result = await self.bot.api_client.get('bot/off-topic-channel-names')
- lines = sorted(f"• {name}" for name in result)
- embed = Embed(
- title=f"Known off-topic names (`{len(result)}` total)",
- colour=Colour.blue()
- )
- if result:
- await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False)
- else:
- embed.description = "Hmmm, seems like there's nothing here yet."
- await ctx.send(embed=embed)
+ await self.active_otnames_command(ctx)
+
+ @list_command.command(name='active', aliases=('a',))
+ @has_any_role(*MODERATION_ROLES)
+ async def active_otnames_command(self, ctx: Context) -> None:
+ """List active off-topic channel names."""
+ await self.list_ot_names(ctx, True)
+
+ @list_command.command(name='deactivated', aliases=('d',))
+ @has_any_role(*MODERATION_ROLES)
+ async def deactivated_otnames_command(self, ctx: Context) -> None:
+ """List deactivated off-topic channel names."""
+ await self.list_ot_names(ctx, False)
@otname_group.command(name='search', aliases=('s',))
@has_any_role(*MODERATION_ROLES)
@@ -167,6 +300,6 @@ class OffTopicNames(Cog):
await ctx.send(embed=embed)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the OffTopicNames cog."""
- bot.add_cog(OffTopicNames(bot))
+ await bot.add_cog(OffTopicNames(bot))
diff --git a/bot/exts/help_channels/__init__.py b/bot/exts/help_channels/__init__.py
index beba18aa6..b9c940183 100644
--- a/bot/exts/help_channels/__init__.py
+++ b/bot/exts/help_channels/__init__.py
@@ -28,7 +28,7 @@ def validate_config() -> None:
)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the HelpChannels cog."""
# Defer import to reduce side effects from importing the help_channels package.
from bot.exts.help_channels._cog import HelpChannels
@@ -37,4 +37,4 @@ def setup(bot: Bot) -> None:
except ValueError as e:
log.error(f"HelpChannels cog will not be loaded due to misconfiguration: {e}")
else:
- bot.add_cog(HelpChannels(bot))
+ await bot.add_cog(HelpChannels(bot))
diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py
index f4eaf3291..937c4ab57 100644
--- a/bot/exts/help_channels/_caches.py
+++ b/bot/exts/help_channels/_caches.py
@@ -1,35 +1,31 @@
from async_rediscache import RedisCache
# This dictionary maps a help channel to the time it was claimed
-# RedisCache[disnake.TextChannel.id, UtcPosixTimestamp]
+# RedisCache[discord.TextChannel.id, UtcPosixTimestamp]
claim_times = RedisCache(namespace="HelpChannels.claim_times")
# This cache tracks which channels are claimed by which members.
-# RedisCache[disnake.TextChannel.id, t.Union[disnake.User.id, disnake.Member.id]]
+# RedisCache[discord.TextChannel.id, t.Union[discord.User.id, discord.Member.id]]
claimants = RedisCache(namespace="HelpChannels.help_channel_claimants")
# Stores the timestamp of the last message from the claimant of a help channel
-# RedisCache[disnake.TextChannel.id, UtcPosixTimestamp]
+# RedisCache[discord.TextChannel.id, UtcPosixTimestamp]
claimant_last_message_times = RedisCache(namespace="HelpChannels.claimant_last_message_times")
# This cache maps a help channel to the timestamp of the last non-claimant message.
# This cache being empty for a given help channel indicates the question is unanswered.
-# RedisCache[disnake.TextChannel.id, UtcPosixTimestamp]
+# RedisCache[discord.TextChannel.id, UtcPosixTimestamp]
non_claimant_last_message_times = RedisCache(namespace="HelpChannels.non_claimant_last_message_times")
-# This cache maps a help channel to original question message in same channel.
-# RedisCache[disnake.TextChannel.id, disnake.Message.id]
-question_messages = RedisCache(namespace="HelpChannels.question_messages")
-
# This cache keeps track of the dynamic message ID for
# the continuously updated message in the #How-to-get-help channel.
dynamic_message = RedisCache(namespace="HelpChannels.dynamic_message")
# This cache keeps track of who has help-dms on.
-# RedisCache[disnake.User.id, bool]
+# RedisCache[discord.User.id, bool]
help_dm = RedisCache(namespace="HelpChannels.help_dm")
# This cache tracks member who are participating and opted in to help channel dms.
# serialise the set as a comma separated string to allow usage with redis
-# RedisCache[disnake.TextChannel.id, str[set[disnake.User.id]]]
+# RedisCache[discord.TextChannel.id, str[set[discord.User.id]]]
session_participants = RedisCache(namespace="HelpChannels.session_participants")
diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py
index 3c4eaa2b2..d9cebf215 100644
--- a/bot/exts/help_channels/_channel.py
+++ b/bot/exts/help_channels/_channel.py
@@ -4,7 +4,7 @@ from datetime import timedelta
from enum import Enum
import arrow
-import disnake
+import discord
from arrow import Arrow
import bot
@@ -31,7 +31,7 @@ class ClosingReason(Enum):
CLEANUP = "auto.cleanup"
-def get_category_channels(category: disnake.CategoryChannel) -> t.Iterable[disnake.TextChannel]:
+def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[discord.TextChannel]:
"""Yield the text channels of the `category` in an unsorted manner."""
log.trace(f"Getting text channels in the category '{category}' ({category.id}).")
@@ -41,7 +41,7 @@ def get_category_channels(category: disnake.CategoryChannel) -> t.Iterable[disna
yield channel
-async def get_closing_time(channel: disnake.TextChannel, init_done: bool) -> t.Tuple[Arrow, ClosingReason]:
+async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[Arrow, ClosingReason]:
"""
Return the time at which the given help `channel` should be closed along with the reason.
@@ -116,12 +116,12 @@ async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]:
return arrow.utcnow() - claimed
-def is_excluded_channel(channel: disnake.abc.GuildChannel) -> bool:
+def is_excluded_channel(channel: discord.abc.GuildChannel) -> bool:
"""Check if a channel should be excluded from the help channel system."""
- return not isinstance(channel, disnake.TextChannel) or channel.id in EXCLUDED_CHANNELS
+ return not isinstance(channel, discord.TextChannel) or channel.id in EXCLUDED_CHANNELS
-async def move_to_bottom(channel: disnake.TextChannel, category_id: int, **options) -> None:
+async def move_to_bottom(channel: discord.TextChannel, category_id: int, **options) -> None:
"""
Move the `channel` to the bottom position of `category` and edit channel attributes.
@@ -130,8 +130,8 @@ async def move_to_bottom(channel: disnake.TextChannel, category_id: int, **optio
really ends up at the bottom of the category.
If `options` are provided, the channel will be edited after the move is completed. This is the
- same order of operations that `disnake.TextChannel.edit` uses. For information on available
- options, see the documentation on `disnake.TextChannel.edit`. While possible, position-related
+ same order of operations that `discord.TextChannel.edit` uses. For information on available
+ options, see the documentation on `discord.TextChannel.edit`. While possible, position-related
options should be avoided, as it may interfere with the category move we perform.
"""
# Get a fresh copy of the category from the bot to avoid the cache mismatch issue we had.
@@ -161,7 +161,7 @@ async def move_to_bottom(channel: disnake.TextChannel, category_id: int, **optio
await channel.edit(**options)
-async def ensure_cached_claimant(channel: disnake.TextChannel) -> None:
+async def ensure_cached_claimant(channel: discord.TextChannel) -> None:
"""
Ensure there is a claimant cached for each help channel.
diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py
index fc55fa1df..f1351013a 100644
--- a/bot/exts/help_channels/_cog.py
+++ b/bot/exts/help_channels/_cog.py
@@ -5,16 +5,17 @@ from datetime import timedelta
from operator import attrgetter
import arrow
-import disnake
-import disnake.abc
-from disnake.ext import commands
+import discord
+import discord.abc
+from botcore.utils import members, scheduling
+from discord.ext import commands
from bot import constants
from bot.bot import Bot
from bot.constants import Channels, RedirectOutput
from bot.exts.help_channels import _caches, _channel, _message, _name, _stats
from bot.log import get_logger
-from bot.utils import channel as channel_utils, lock, members, scheduling
+from bot.utils import channel as channel_utils, lock
log = get_logger(__name__)
@@ -66,16 +67,16 @@ class HelpChannels(commands.Cog):
self.bot = bot
self.scheduler = scheduling.Scheduler(self.__class__.__name__)
- self.guild: disnake.Guild = None
- self.cooldown_role: disnake.Role = None
+ self.guild: discord.Guild = None
+ self.cooldown_role: discord.Role = None
# Categories
- self.available_category: disnake.CategoryChannel = None
- self.in_use_category: disnake.CategoryChannel = None
- self.dormant_category: disnake.CategoryChannel = None
+ self.available_category: discord.CategoryChannel = None
+ self.in_use_category: discord.CategoryChannel = None
+ self.dormant_category: discord.CategoryChannel = None
# Queues
- self.channel_queue: asyncio.Queue[disnake.TextChannel] = None
+ self.channel_queue: asyncio.Queue[discord.TextChannel] = None
self.name_queue: t.Deque[str] = None
# Notifications
@@ -84,16 +85,15 @@ class HelpChannels(commands.Cog):
self.last_running_low_notification = arrow.get('1815-12-10T18:00:00.00000+00:00')
self.dynamic_message: t.Optional[int] = None
- self.available_help_channels: t.Set[disnake.TextChannel] = set()
+ self.available_help_channels: t.Set[discord.TextChannel] = set()
# Asyncio stuff
self.queue_tasks: t.List[asyncio.Task] = []
- self.init_task = scheduling.create_task(self.init_cog(), event_loop=self.bot.loop)
+ self.init_done = False
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Cancel the init task and scheduled tasks when the cog unloads."""
log.trace("Cog unload: cancelling the init_cog task")
- self.init_task.cancel()
log.trace("Cog unload: cancelling the channel queue tasks")
for task in self.queue_tasks:
@@ -104,7 +104,7 @@ class HelpChannels(commands.Cog):
@lock.lock_arg(NAMESPACE, "message", attrgetter("channel.id"))
@lock.lock_arg(NAMESPACE, "message", attrgetter("author.id"))
@lock.lock_arg(f"{NAMESPACE}.unclaim", "message", attrgetter("author.id"), wait=True)
- async def claim_channel(self, message: disnake.Message) -> None:
+ async def claim_channel(self, message: discord.Message) -> None:
"""
Claim the channel in which the question `message` was sent.
@@ -116,7 +116,7 @@ class HelpChannels(commands.Cog):
try:
await self.move_to_in_use(message.channel)
- except disnake.DiscordServerError:
+ except discord.DiscordServerError:
try:
await message.channel.send(
"The bot encountered a Discord API error while trying to move this channel, please try again later."
@@ -133,14 +133,14 @@ class HelpChannels(commands.Cog):
self.bot.stats.incr("help.failed_claims.500_on_move")
return
- embed = disnake.Embed(
+ embed = discord.Embed(
description=f"Channel claimed by {message.author.mention}.",
color=constants.Colours.bright_green,
)
await message.channel.send(embed=embed)
- # Handle odd edge case of `message.author` not being a `disnake.Member` (see bot#1839)
- if not isinstance(message.author, disnake.Member):
+ # Handle odd edge case of `message.author` not being a `discord.Member` (see bot#1839)
+ if not isinstance(message.author, discord.Member):
log.debug(f"{message.author} ({message.author.id}) isn't a member. Not giving cooldown role or sending DM.")
else:
await members.handle_role_change(message.author, message.author.add_roles, self.cooldown_role)
@@ -189,7 +189,7 @@ class HelpChannels(commands.Cog):
return queue
- async def create_dormant(self) -> t.Optional[disnake.TextChannel]:
+ async def create_dormant(self) -> t.Optional[discord.TextChannel]:
"""
Create and return a new channel in the Dormant category.
@@ -234,12 +234,12 @@ class HelpChannels(commands.Cog):
May only be invoked by the channel's claimant or by staff.
"""
- # Don't use a disnake check because the check needs to fail silently.
+ # Don't use a discord.py check because the check needs to fail silently.
if await self.close_check(ctx):
log.info(f"Close command invoked by {ctx.author} in #{ctx.channel}.")
await self.unclaim_channel(ctx.channel, closed_on=_channel.ClosingReason.COMMAND)
- async def get_available_candidate(self) -> disnake.TextChannel:
+ async def get_available_candidate(self) -> discord.TextChannel:
"""
Return a dormant channel to turn into an available channel.
@@ -313,11 +313,11 @@ class HelpChannels(commands.Cog):
self.dormant_category = await channel_utils.get_or_fetch_channel(
constants.Categories.help_dormant
)
- except disnake.HTTPException:
+ except discord.HTTPException:
log.exception("Failed to get a category; cog will be removed")
self.bot.remove_cog(self.qualified_name)
- async def init_cog(self) -> None:
+ async def cog_load(self) -> None:
"""Initialise the help channel system."""
log.trace("Waiting for the guild to be available before initialisation.")
await self.bot.wait_until_guild_available()
@@ -353,9 +353,10 @@ class HelpChannels(commands.Cog):
await self.init_available()
_stats.report_counts()
+ self.init_done = True
log.info("Cog is ready!")
- async def move_idle_channel(self, channel: disnake.TextChannel, has_task: bool = True) -> None:
+ async def move_idle_channel(self, channel: discord.TextChannel, has_task: bool = True) -> None:
"""
Make the `channel` dormant if idle or schedule the move if still active.
@@ -364,7 +365,7 @@ class HelpChannels(commands.Cog):
"""
log.trace(f"Handling in-use channel #{channel} ({channel.id}).")
- closing_time, closed_on = await _channel.get_closing_time(channel, self.init_task.done())
+ closing_time, closed_on = await _channel.get_closing_time(channel, self.init_done)
# Closing time is in the past.
# Add 1 second due to POSIX timestamps being lower resolution than datetime objects.
@@ -393,17 +394,17 @@ class HelpChannels(commands.Cog):
log.trace("Making a channel available.")
channel = await self.get_available_candidate()
- log.info(f"Making #{channel} ({channel.id}) available.")
+ channel_str = f"#{channel} ({channel.id})"
+ log.info(f"Making {channel_str} available.")
await _message.send_available_message(channel)
- log.trace(f"Moving #{channel} ({channel.id}) to the Available category.")
+ log.trace(f"Moving {channel_str} to the Available category.")
# Unpin any previously stuck pins
- log.trace(f"Looking for pins stuck in #{channel} ({channel.id}).")
- for message in await channel.pins():
- await _message.pin_wrapper(message.id, channel, pin=False)
- log.debug(f"Removed a stuck pin from #{channel} ({channel.id}). ID: {message.id}")
+ log.trace(f"Looking for pins stuck in {channel_str}.")
+ if stuck_pins := await _message.unpin_all(channel):
+ log.debug(f"Removed {stuck_pins} stuck pins from {channel_str}.")
await _channel.move_to_bottom(
channel=channel,
@@ -416,7 +417,7 @@ class HelpChannels(commands.Cog):
_stats.report_counts()
- async def move_to_dormant(self, channel: disnake.TextChannel) -> None:
+ async def move_to_dormant(self, channel: discord.TextChannel) -> None:
"""Make the `channel` dormant."""
log.info(f"Moving #{channel} ({channel.id}) to the Dormant category.")
await _channel.move_to_bottom(
@@ -425,7 +426,7 @@ class HelpChannels(commands.Cog):
)
log.trace(f"Sending dormant message for #{channel} ({channel.id}).")
- embed = disnake.Embed(
+ embed = discord.Embed(
description=_message.DORMANT_MSG.format(
dormant=self.dormant_category.name,
available=self.available_category.name,
@@ -439,7 +440,7 @@ class HelpChannels(commands.Cog):
_stats.report_counts()
@lock.lock_arg(f"{NAMESPACE}.unclaim", "channel")
- async def unclaim_channel(self, channel: disnake.TextChannel, *, closed_on: _channel.ClosingReason) -> None:
+ async def unclaim_channel(self, channel: discord.TextChannel, *, closed_on: _channel.ClosingReason) -> None:
"""
Unclaim an in-use help `channel` to make it dormant.
@@ -462,7 +463,7 @@ class HelpChannels(commands.Cog):
async def _unclaim_channel(
self,
- channel: disnake.TextChannel,
+ channel: discord.TextChannel,
claimant_id: t.Optional[int],
closed_on: _channel.ClosingReason
) -> None:
@@ -479,7 +480,7 @@ class HelpChannels(commands.Cog):
else:
await members.handle_role_change(claimant, claimant.remove_roles, self.cooldown_role)
- await _message.unpin(channel)
+ await _message.unpin_all(channel)
await _stats.report_complete_session(channel.id, closed_on)
await self.move_to_dormant(channel)
@@ -488,7 +489,7 @@ class HelpChannels(commands.Cog):
if closed_on == _channel.ClosingReason.COMMAND:
self.scheduler.cancel(channel.id)
- async def move_to_in_use(self, channel: disnake.TextChannel) -> None:
+ async def move_to_in_use(self, channel: discord.TextChannel) -> None:
"""Make a channel in-use and schedule it to be made dormant."""
log.info(f"Moving #{channel} ({channel.id}) to the In Use category.")
@@ -504,13 +505,11 @@ class HelpChannels(commands.Cog):
_stats.report_counts()
@commands.Cog.listener()
- async def on_message(self, message: disnake.Message) -> None:
+ async def on_message(self, message: discord.Message) -> None:
"""Move an available channel to the In Use category and replace it with a dormant one."""
if message.author.bot:
return # Ignore messages sent by bots.
- await self.init_task
-
if channel_utils.is_in_category(message.channel, constants.Categories.help_available):
if not _channel.is_excluded_channel(message.channel):
await self.claim_channel(message)
@@ -520,14 +519,12 @@ class HelpChannels(commands.Cog):
await _message.update_message_caches(message)
@commands.Cog.listener()
- async def on_message_delete(self, msg: disnake.Message) -> None:
+ async def on_message_delete(self, msg: discord.Message) -> None:
"""
Reschedule an in-use channel to become dormant sooner if the channel is empty.
The new time for the dormant task is configured with `HelpChannels.deleted_idle_minutes`.
"""
- await self.init_task
-
if not channel_utils.is_in_category(msg.channel, constants.Categories.help_in_use):
return
@@ -542,7 +539,7 @@ class HelpChannels(commands.Cog):
delay = constants.HelpChannels.deleted_idle_minutes * 60
self.scheduler.schedule_later(delay, msg.channel.id, self.move_idle_channel(msg.channel))
- async def wait_for_dormant_channel(self) -> disnake.TextChannel:
+ async def wait_for_dormant_channel(self) -> discord.TextChannel:
"""Wait for a dormant channel to become available in the queue and return it."""
log.trace("Waiting for a dormant channel.")
@@ -566,19 +563,18 @@ class HelpChannels(commands.Cog):
if self.dynamic_message is not None:
try:
log.trace("Help channels have changed, dynamic message has been edited.")
- await self.bot.http.edit_message(
- constants.Channels.how_to_get_help, self.dynamic_message, content=available_channels, files=None
- )
- except disnake.NotFound:
+ await discord.PartialMessage(
+ channel=self.bot.get_channel(constants.Channels.how_to_get_help),
+ id=self.dynamic_message,
+ ).edit(content=available_channels)
+ except discord.NotFound:
pass
else:
return
log.trace("Dynamic message could not be edited or found. Creating a new one.")
- new_dynamic_message = await self.bot.http.send_message(
- constants.Channels.how_to_get_help, available_channels
- )
- self.dynamic_message = new_dynamic_message["id"]
+ new_dynamic_message = await self.bot.get_channel(constants.Channels.how_to_get_help).send(available_channels)
+ self.dynamic_message = new_dynamic_message.id
await _caches.dynamic_message.set("message_id", self.dynamic_message)
@staticmethod
@@ -593,7 +589,7 @@ class HelpChannels(commands.Cog):
@lock.lock_arg(NAMESPACE, "message", attrgetter("channel.id"))
@lock.lock_arg(NAMESPACE, "message", attrgetter("author.id"))
- async def notify_session_participants(self, message: disnake.Message) -> None:
+ async def notify_session_participants(self, message: discord.Message) -> None:
"""
Check if the message author meets the requirements to be notified.
@@ -615,7 +611,7 @@ class HelpChannels(commands.Cog):
if message.author.id not in session_participants:
session_participants.add(message.author.id)
- embed = disnake.Embed(
+ embed = discord.Embed(
title="Currently Helping",
description=f"You're currently helping in {message.channel.mention}",
color=constants.Colours.bright_green,
@@ -625,7 +621,7 @@ class HelpChannels(commands.Cog):
try:
await message.author.send(embed=embed)
- except disnake.Forbidden:
+ except discord.Forbidden:
log.trace(
f"Failed to send helpdm message to {message.author.id}. DMs Closed/Blocked. "
"Removing user from helpdm."
diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py
index e08043694..00d57ea40 100644
--- a/bot/exts/help_channels/_message.py
+++ b/bot/exts/help_channels/_message.py
@@ -2,7 +2,7 @@ import textwrap
import typing as t
import arrow
-import disnake
+import discord
from arrow import Arrow
import bot
@@ -27,7 +27,7 @@ For more tips, check out our guide on [asking good questions]({ASKING_GUIDE_URL}
AVAILABLE_TITLE = "Available help channel"
-AVAILABLE_FOOTER = "Closes after a period of inactivity, or when you send !close."
+AVAILABLE_FOOTER = f"Closes after a period of inactivity, or when you send {constants.Bot.prefix}close."
DORMANT_MSG = f"""
This help channel has been marked as **dormant**, and has been moved into the **{{dormant}}** \
@@ -41,7 +41,7 @@ through our guide for **[asking a good question]({ASKING_GUIDE_URL})**.
"""
-async def update_message_caches(message: disnake.Message) -> None:
+async def update_message_caches(message: discord.Message) -> None:
"""Checks the source of new content in a help channel and updates the appropriate cache."""
channel = message.channel
@@ -62,18 +62,18 @@ async def update_message_caches(message: disnake.Message) -> None:
await _caches.non_claimant_last_message_times.set(channel.id, timestamp)
-async def get_last_message(channel: disnake.TextChannel) -> t.Optional[disnake.Message]:
+async def get_last_message(channel: discord.TextChannel) -> t.Optional[discord.Message]:
"""Return the last message sent in the channel or None if no messages exist."""
log.trace(f"Getting the last message in #{channel} ({channel.id}).")
- try:
- return await channel.history(limit=1).next() # noqa: B305
- except disnake.NoMoreItems:
- log.debug(f"No last message available; #{channel} ({channel.id}) has no messages.")
- return None
+ async for message in channel.history(limit=1):
+ return message
+
+ log.debug(f"No last message available; #{channel} ({channel.id}) has no messages.")
+ return None
-async def is_empty(channel: disnake.TextChannel) -> bool:
+async def is_empty(channel: discord.TextChannel) -> bool:
"""Return True if there's an AVAILABLE_MSG and the messages leading up are bot messages."""
log.trace(f"Checking if #{channel} ({channel.id}) is empty.")
@@ -92,13 +92,13 @@ async def is_empty(channel: disnake.TextChannel) -> bool:
return False
-async def dm_on_open(message: disnake.Message) -> None:
+async def dm_on_open(message: discord.Message) -> None:
"""
DM claimant with a link to the claimed channel's first message, with a 100 letter preview of the message.
Does nothing if the user has DMs disabled.
"""
- embed = disnake.Embed(
+ embed = discord.Embed(
title="Help channel opened",
description=f"You claimed {message.channel.mention}.",
colour=bot.constants.Colours.bright_green,
@@ -118,7 +118,7 @@ async def dm_on_open(message: disnake.Message) -> None:
try:
await message.author.send(embed=embed)
log.trace(f"Sent DM to {message.author.id} after claiming help channel.")
- except disnake.errors.Forbidden:
+ except discord.errors.Forbidden:
log.trace(
f"Ignoring to send DM to {message.author.id} after claiming help channel: DMs disabled."
)
@@ -146,7 +146,7 @@ async def notify_none_remaining(last_notification: Arrow) -> t.Optional[Arrow]:
log.trace("Notifying about lack of channels.")
mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_none_remaining_roles)
- allowed_roles = [disnake.Object(id_) for id_ in constants.HelpChannels.notify_none_remaining_roles]
+ allowed_roles = [discord.Object(id_) for id_ in constants.HelpChannels.notify_none_remaining_roles]
channel = bot.instance.get_channel(constants.HelpChannels.notify_channel)
if channel is None:
@@ -157,7 +157,7 @@ async def notify_none_remaining(last_notification: Arrow) -> t.Optional[Arrow]:
f"{mentions} A new available help channel is needed but there "
"are no more dormant ones. Consider freeing up some in-use channels manually by "
f"using the `{constants.Bot.prefix}dormant` command within the channels.",
- allowed_mentions=disnake.AllowedMentions(everyone=False, roles=allowed_roles)
+ allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles)
)
except Exception:
# Handle it here cause this feature isn't critical for the functionality of the system.
@@ -213,18 +213,17 @@ async def notify_running_low(number_of_channels_left: int, last_notification: Ar
return arrow.utcnow()
-async def pin(message: disnake.Message) -> None:
- """Pin an initial question `message` and store it in a cache."""
- if await pin_wrapper(message.id, message.channel, pin=True):
- await _caches.question_messages.set(message.channel.id, message.id)
+async def pin(message: discord.Message) -> None:
+ """Pin an initial question `message`."""
+ await _pin_wrapper(message, pin=True)
-async def send_available_message(channel: disnake.TextChannel) -> None:
+async def send_available_message(channel: discord.TextChannel) -> None:
"""Send the available message by editing a dormant message or sending a new message."""
channel_info = f"#{channel} ({channel.id})"
log.trace(f"Sending available message in {channel_info}.")
- embed = disnake.Embed(
+ embed = discord.Embed(
color=constants.Colours.bright_green,
description=AVAILABLE_MSG,
)
@@ -240,51 +239,48 @@ async def send_available_message(channel: disnake.TextChannel) -> None:
await channel.send(embed=embed)
-async def unpin(channel: disnake.TextChannel) -> None:
- """Unpin the initial question message sent in `channel`."""
- msg_id = await _caches.question_messages.pop(channel.id)
- if msg_id is None:
- log.debug(f"#{channel} ({channel.id}) doesn't have a message pinned.")
- else:
- await pin_wrapper(msg_id, channel, pin=False)
+async def unpin_all(channel: discord.TextChannel) -> int:
+ """Unpin all pinned messages in `channel` and return the amount of unpinned messages."""
+ count = 0
+ for message in await channel.pins():
+ if await _pin_wrapper(message, pin=False):
+ count += 1
+
+ return count
-def _match_bot_embed(message: t.Optional[disnake.Message], description: str) -> bool:
+def _match_bot_embed(message: t.Optional[discord.Message], description: str) -> bool:
"""Return `True` if the bot's `message`'s embed description matches `description`."""
if not message or not message.embeds:
return False
bot_msg_desc = message.embeds[0].description
- if bot_msg_desc is disnake.Embed.Empty:
+ if bot_msg_desc is None:
log.trace("Last message was a bot embed but it was empty.")
return False
return message.author == bot.instance.user and bot_msg_desc.strip() == description.strip()
-async def pin_wrapper(msg_id: int, channel: disnake.TextChannel, *, pin: bool) -> bool:
+async def _pin_wrapper(message: discord.Message, *, pin: bool) -> bool:
"""
- Pin message `msg_id` in `channel` if `pin` is True or unpin if it's False.
+ Pin `message` if `pin` is True or unpin if it's False.
Return True if successful and False otherwise.
"""
- channel_str = f"#{channel} ({channel.id})"
- if pin:
- func = bot.instance.http.pin_message
- verb = "pin"
- else:
- func = bot.instance.http.unpin_message
- verb = "unpin"
+ channel_str = f"#{message.channel} ({message.channel.id})"
+ func = message.pin if pin else message.unpin
try:
- await func(channel.id, msg_id)
- except disnake.HTTPException as e:
+ await func()
+ except discord.HTTPException as e:
if e.code == 10008:
- log.debug(f"Message {msg_id} in {channel_str} doesn't exist; can't {verb}.")
+ log.debug(f"Message {message.id} in {channel_str} doesn't exist; can't {func.__name__}.")
else:
log.exception(
- f"Error {verb}ning message {msg_id} in {channel_str}: {e.status} ({e.code})"
+ f"Error {func.__name__}ning message {message.id} in {channel_str}: "
+ f"{e.status} ({e.code})"
)
return False
else:
- log.trace(f"{verb.capitalize()}ned message {msg_id} in {channel_str}.")
+ log.trace(f"{func.__name__.capitalize()}ned message {message.id} in {channel_str}.")
return True
diff --git a/bot/exts/help_channels/_name.py b/bot/exts/help_channels/_name.py
index 50b250cb5..a9d9b2df1 100644
--- a/bot/exts/help_channels/_name.py
+++ b/bot/exts/help_channels/_name.py
@@ -3,7 +3,7 @@ import typing as t
from collections import deque
from pathlib import Path
-import disnake
+import discord
from bot import constants
from bot.exts.help_channels._channel import MAX_CHANNELS_PER_CATEGORY, get_category_channels
@@ -12,7 +12,7 @@ from bot.log import get_logger
log = get_logger(__name__)
-def create_name_queue(*categories: disnake.CategoryChannel) -> deque:
+def create_name_queue(*categories: discord.CategoryChannel) -> deque:
"""
Return a queue of food names to use for creating new channels.
@@ -50,7 +50,7 @@ def _get_names() -> t.List[str]:
return all_names[:count]
-def _get_used_names(*categories: disnake.CategoryChannel) -> t.Set[str]:
+def _get_used_names(*categories: discord.CategoryChannel) -> t.Set[str]:
"""Return names which are already being used by channels in `categories`."""
log.trace("Getting channel names which are already being used.")
diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py
index 68eb52a59..bfe32459e 100644
--- a/bot/exts/info/code_snippets.py
+++ b/bot/exts/info/code_snippets.py
@@ -4,9 +4,9 @@ import textwrap
from typing import Any
from urllib.parse import quote_plus
-import disnake
+import discord
from aiohttp import ClientResponseError
-from disnake.ext.commands import Cog
+from discord.ext.commands import Cog
from bot.bot import Bot
from bot.constants import Channels
@@ -241,7 +241,7 @@ class CodeSnippets(Cog):
return '\n'.join(map(lambda x: x[1], sorted(all_snippets)))
@Cog.listener()
- async def on_message(self, message: disnake.Message) -> None:
+ async def on_message(self, message: discord.Message) -> None:
"""Checks if the message has a snippet link, removes the embed, then sends the snippet contents."""
if message.author.bot:
return
@@ -255,7 +255,7 @@ class CodeSnippets(Cog):
if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15:
try:
await message.edit(suppress=True)
- except disnake.NotFound:
+ except discord.NotFound:
# Don't send snippets if the original message was deleted.
return
@@ -275,6 +275,6 @@ class CodeSnippets(Cog):
)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the CodeSnippets cog."""
- bot.add_cog(CodeSnippets(bot))
+ await bot.add_cog(CodeSnippets(bot))
diff --git a/bot/exts/info/codeblock/__init__.py b/bot/exts/info/codeblock/__init__.py
index 5c55bc5e3..dde45bd59 100644
--- a/bot/exts/info/codeblock/__init__.py
+++ b/bot/exts/info/codeblock/__init__.py
@@ -1,8 +1,8 @@
from bot.bot import Bot
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the CodeBlockCog cog."""
# Defer import to reduce side effects from importing the codeblock package.
from bot.exts.info.codeblock._cog import CodeBlockCog
- bot.add_cog(CodeBlockCog(bot))
+ await bot.add_cog(CodeBlockCog(bot))
diff --git a/bot/exts/info/codeblock/_cog.py b/bot/exts/info/codeblock/_cog.py
index cf8c7d0be..9027105d9 100644
--- a/bot/exts/info/codeblock/_cog.py
+++ b/bot/exts/info/codeblock/_cog.py
@@ -1,9 +1,10 @@
import time
from typing import Optional
-import disnake
-from disnake import Message, RawMessageUpdateEvent
-from disnake.ext.commands import Cog
+import discord
+from botcore.utils import scheduling
+from discord import Message, RawMessageUpdateEvent
+from discord.ext.commands import Cog
from bot import constants
from bot.bot import Bot
@@ -11,7 +12,7 @@ from bot.exts.filters.token_remover import TokenRemover
from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE
from bot.exts.info.codeblock._instructions import get_instructions
from bot.log import get_logger
-from bot.utils import has_lines, scheduling
+from bot.utils import has_lines
from bot.utils.channel import is_help_channel
from bot.utils.messages import wait_for_deletion
@@ -62,9 +63,9 @@ class CodeBlockCog(Cog, name="Code Block"):
self.codeblock_message_ids = {}
@staticmethod
- def create_embed(instructions: str) -> disnake.Embed:
+ def create_embed(instructions: str) -> discord.Embed:
"""Return an embed which displays code block formatting `instructions`."""
- return disnake.Embed(description=instructions)
+ return discord.Embed(description=instructions)
async def get_sent_instructions(self, payload: RawMessageUpdateEvent) -> Optional[Message]:
"""
@@ -78,11 +79,11 @@ class CodeBlockCog(Cog, name="Code Block"):
try:
return await channel.fetch_message(self.codeblock_message_ids[payload.message_id])
- except disnake.NotFound:
+ except discord.NotFound:
log.debug("Could not find instructions message; it was probably deleted.")
return None
- def is_on_cooldown(self, channel: disnake.TextChannel) -> bool:
+ def is_on_cooldown(self, channel: discord.TextChannel) -> bool:
"""
Return True if an embed was sent too recently for `channel`.
@@ -93,7 +94,7 @@ class CodeBlockCog(Cog, name="Code Block"):
cooldown = constants.CodeBlock.cooldown_seconds
return (time.time() - self.channel_cooldowns.get(channel.id, 0)) < cooldown
- def is_valid_channel(self, channel: disnake.TextChannel) -> bool:
+ def is_valid_channel(self, channel: discord.TextChannel) -> bool:
"""Return True if `channel` is a help channel, may be on a cooldown, or is whitelisted."""
log.trace(f"Checking if #{channel} qualifies for code block detection.")
return (
@@ -102,7 +103,7 @@ class CodeBlockCog(Cog, name="Code Block"):
or channel.id in constants.CodeBlock.channel_whitelist
)
- async def send_instructions(self, message: disnake.Message, instructions: str) -> None:
+ async def send_instructions(self, message: discord.Message, instructions: str) -> None:
"""
Send an embed with `instructions` on fixing an incorrect code block in a `message`.
@@ -119,7 +120,7 @@ class CodeBlockCog(Cog, name="Code Block"):
# Increase amount of codeblock correction in stats
self.bot.stats.incr("codeblock_corrections")
- def should_parse(self, message: disnake.Message) -> bool:
+ def should_parse(self, message: discord.Message) -> bool:
"""
Return True if `message` should be parsed.
@@ -185,5 +186,5 @@ class CodeBlockCog(Cog, name="Code Block"):
else:
log.info("Message edited but still has invalid code blocks; editing instructions.")
await bot_message.edit(embed=self.create_embed(instructions))
- except disnake.NotFound:
+ except discord.NotFound:
log.debug("Could not find instructions message; it was probably deleted.")
diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py
index facdf4d0b..4cfec33d3 100644
--- a/bot/exts/info/doc/__init__.py
+++ b/bot/exts/info/doc/__init__.py
@@ -11,7 +11,7 @@ NAMESPACE = "doc"
doc_cache = DocRedisCache(namespace=NAMESPACE)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Doc cog."""
from ._cog import DocCog
- bot.add_cog(DocCog(bot))
+ await bot.add_cog(DocCog(bot))
diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py
index 487a0fd21..41a15fb6e 100644
--- a/bot/exts/info/doc/_batch_parser.py
+++ b/bot/exts/info/doc/_batch_parser.py
@@ -7,13 +7,13 @@ from contextlib import suppress
from operator import attrgetter
from typing import Deque, Dict, List, NamedTuple, Optional, Union
-import disnake
+import discord
+from botcore.utils import scheduling
from bs4 import BeautifulSoup
import bot
from bot.constants import Channels
from bot.log import get_logger
-from bot.utils import scheduling
from . import _cog, doc_cache
from ._parsing import get_symbol_markdown
@@ -48,7 +48,7 @@ class StaleInventoryNotifier:
if await self.symbol_counter.increment_for(doc_item) < 3:
self._warned_urls.add(doc_item.url)
await self._init_task
- embed = disnake.Embed(
+ embed = discord.Embed(
description=f"Doc item `{doc_item.symbol_id=}` present in loaded documentation inventories "
f"not found on [site]({doc_item.url}), inventories may need to be refreshed."
)
diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py
index 77fc61389..c35349c3c 100644
--- a/bot/exts/info/doc/_cog.py
+++ b/bot/exts/info/doc/_cog.py
@@ -6,22 +6,21 @@ import textwrap
from collections import defaultdict
from contextlib import suppress
from types import SimpleNamespace
-from typing import Dict, NamedTuple, Optional, Tuple, Union
+from typing import Dict, Literal, NamedTuple, Optional, Tuple, Union
import aiohttp
-import disnake
-from disnake.ext import commands
+import discord
+from botcore.site_api import ResponseCodeError
+from botcore.utils.scheduling import Scheduler
+from discord.ext import commands
-from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.constants import MODERATION_ROLES, RedirectOutput
-from bot.converters import Inventory, PackageName, ValidURL, allowed_strings
+from bot.converters import Inventory, PackageName, ValidURL
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling
from bot.utils.lock import SharedEvent, lock
from bot.utils.messages import send_denial, wait_for_deletion
-from bot.utils.scheduling import Scheduler
from . import NAMESPACE, PRIORITY_PACKAGES, _batch_parser, doc_cache
from ._inventory_parser import InvalidHeaderError, InventoryDict, fetch_inventory
@@ -78,14 +77,7 @@ class DocCog(commands.Cog):
self.refresh_event.set()
self.symbol_get_event = SharedEvent()
- self.init_refresh_task = scheduling.create_task(
- self.init_refresh_inventory(),
- name="Doc inventory init",
- event_loop=self.bot.loop,
- )
-
- @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
- async def init_refresh_inventory(self) -> None:
+ async def cog_load(self) -> None:
"""Refresh documentation inventory on cog initialization."""
await self.bot.wait_until_guild_available()
await self.refresh_inventories()
@@ -275,7 +267,7 @@ class DocCog(commands.Cog):
return "Unable to parse the requested symbol."
return markdown
- async def create_symbol_embed(self, symbol_name: str) -> Optional[disnake.Embed]:
+ async def create_symbol_embed(self, symbol_name: str) -> Optional[discord.Embed]:
"""
Attempt to scrape and fetch the data for the given `symbol_name`, and build an embed from its contents.
@@ -304,8 +296,8 @@ class DocCog(commands.Cog):
else:
footer_text = ""
- embed = disnake.Embed(
- title=disnake.utils.escape_markdown(symbol_name),
+ embed = discord.Embed(
+ title=discord.utils.escape_markdown(symbol_name),
url=f"{doc_item.url}#{doc_item.symbol_id}",
description=await self.get_symbol_markdown(doc_item)
)
@@ -331,9 +323,9 @@ class DocCog(commands.Cog):
!docs getdoc aiohttp.ClientSession
"""
if not symbol_name:
- inventory_embed = disnake.Embed(
+ inventory_embed = discord.Embed(
title=f"All inventories (`{len(self.base_urls)}` total)",
- colour=disnake.Colour.blue()
+ colour=discord.Colour.blue()
)
lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items())
@@ -355,7 +347,7 @@ class DocCog(commands.Cog):
# Make sure that we won't cause a ghost-ping by deleting the message
if not (ctx.message.mentions or ctx.message.role_mentions):
- with suppress(disnake.NotFound):
+ with suppress(discord.NotFound):
await ctx.message.delete()
await error_message.delete()
@@ -439,7 +431,7 @@ class DocCog(commands.Cog):
async def refresh_command(self, ctx: commands.Context) -> None:
"""Refresh inventories and show the difference."""
old_inventories = set(self.base_urls)
- with ctx.typing():
+ async with ctx.typing():
await self.refresh_inventories()
new_inventories = set(self.base_urls)
@@ -449,7 +441,7 @@ class DocCog(commands.Cog):
if removed := ", ".join(old_inventories - new_inventories):
removed = "- " + removed
- embed = disnake.Embed(
+ embed = discord.Embed(
title="Inventories refreshed",
description=f"```diff\n{added}\n{removed}```" if added or removed else ""
)
@@ -460,7 +452,7 @@ class DocCog(commands.Cog):
async def clear_cache_command(
self,
ctx: commands.Context,
- package_name: Union[PackageName, allowed_strings("*")] # noqa: F722
+ package_name: Union[PackageName, Literal["*"]]
) -> None:
"""Clear the persistent redis cache for `package`."""
if await doc_cache.delete(package_name):
@@ -469,8 +461,7 @@ class DocCog(commands.Cog):
else:
await ctx.send("No keys matching the package found.")
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Clear scheduled inventories, queued symbols and cleanup task on cog unload."""
self.inventory_scheduler.cancel_all()
- self.init_refresh_task.cancel()
- scheduling.create_task(self.item_fetcher.clear(), name="DocCog.item_fetcher unload clear")
+ await self.item_fetcher.clear()
diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py
index ca0a0ac4a..497246375 100644
--- a/bot/exts/info/doc/_html.py
+++ b/bot/exts/info/doc/_html.py
@@ -1,4 +1,3 @@
-import re
from functools import partial
from typing import Callable, Container, Iterable, List, Union
@@ -11,7 +10,6 @@ from . import MAX_SIGNATURE_AMOUNT
log = get_logger(__name__)
-_UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶")
_SEARCH_END_TAG_ATTRS = (
"data",
"function",
@@ -129,9 +127,23 @@ def get_signatures(start_signature: PageElement) -> List[str]:
start_signature,
*_find_next_siblings_until_tag(start_signature, ("dd",), limit=2),
)[-MAX_SIGNATURE_AMOUNT:]:
- signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text)
+ for tag in element.find_all(_filter_signature_links, recursive=False):
+ tag.decompose()
+ signature = element.text
if signature:
signatures.append(signature)
return signatures
+
+
+def _filter_signature_links(tag: Tag) -> bool:
+ """Return True if `tag` is a headerlink, or a link to source code; False otherwise."""
+ if tag.name == "a":
+ if "headerlink" in tag.get("class", ()):
+ return True
+
+ if tag.find(class_="viewcode-link"):
+ return True
+
+ return False
diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py
index 6ab38eb3d..b37aadc01 100644
--- a/bot/exts/info/doc/_parsing.py
+++ b/bot/exts/info/doc/_parsing.py
@@ -255,4 +255,10 @@ def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[s
else:
signature = get_signatures(symbol_heading)
description = get_dd_description(symbol_heading)
- return _create_markdown(signature, description, symbol_data.url).replace("¶", "").strip()
+
+ for description_element in description:
+ if isinstance(description_element, Tag):
+ for tag in description_element.find_all("a", class_="headerlink"):
+ tag.decompose()
+
+ return _create_markdown(signature, description, symbol_data.url).strip()
diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py
index 107f2344f..ef9abd981 100644
--- a/bot/exts/info/doc/_redis_cache.py
+++ b/bot/exts/info/doc/_redis_cache.py
@@ -1,14 +1,27 @@
from __future__ import annotations
import datetime
+import fnmatch
+import time
from typing import Optional, TYPE_CHECKING
-from async_rediscache.types.base import RedisObject, namespace_lock
+from async_rediscache.types.base import RedisObject
+
+from bot.log import get_logger
+from bot.utils.lock import lock
if TYPE_CHECKING:
from ._cog import DocItem
-WEEK_SECONDS = datetime.timedelta(weeks=1).total_seconds()
+WEEK_SECONDS = int(datetime.timedelta(weeks=1).total_seconds())
+
+log = get_logger(__name__)
+
+
+def serialize_resource_id_from_doc_item(bound_args: dict) -> str:
+ """Return the redis_key of the DocItem `item` from the bound args of DocRedisCache.set."""
+ item: DocItem = bound_args["item"]
+ return f"doc:{item_key(item)}"
class DocRedisCache(RedisObject):
@@ -16,9 +29,9 @@ class DocRedisCache(RedisObject):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self._set_expires = set()
+ self._set_expires = dict[str, float]()
- @namespace_lock
+ @lock("DocRedisCache.set", serialize_resource_id_from_doc_item, wait=True)
async def set(self, item: DocItem, value: str) -> None:
"""
Set the Markdown `value` for the symbol `item`.
@@ -28,41 +41,55 @@ class DocRedisCache(RedisObject):
redis_key = f"{self.namespace}:{item_key(item)}"
needs_expire = False
- with await self._get_pool_connection() as connection:
- if redis_key not in self._set_expires:
- # An expire is only set if the key didn't exist before.
- # If this is the first time setting values for this key check if it exists and add it to
- # `_set_expires` to prevent redundant checks for subsequent uses with items from the same page.
- self._set_expires.add(redis_key)
- needs_expire = not await connection.exists(redis_key)
-
- await connection.hset(redis_key, item.symbol_id, value)
- if needs_expire:
- await connection.expire(redis_key, WEEK_SECONDS)
+ set_expire = self._set_expires.get(redis_key)
+ if set_expire is None:
+ # An expire is only set if the key didn't exist before.
+ ttl = await self.redis_session.client.ttl(redis_key)
+ log.debug(f"Checked TTL for `{redis_key}`.")
+
+ if ttl == -1:
+ log.warning(f"Key `{redis_key}` had no expire set.")
+ if ttl < 0: # not set or didn't exist
+ needs_expire = True
+ else:
+ log.debug(f"Key `{redis_key}` has a {ttl} TTL.")
+ self._set_expires[redis_key] = time.monotonic() + ttl - .1 # we need this to expire before redis
+
+ elif time.monotonic() > set_expire:
+ # If we got here the key expired in redis and we can be sure it doesn't exist.
+ needs_expire = True
+ log.debug(f"Key `{redis_key}` expired in internal key cache.")
+
+ await self.redis_session.client.hset(redis_key, item.symbol_id, value)
+ if needs_expire:
+ self._set_expires[redis_key] = time.monotonic() + WEEK_SECONDS
+ await self.redis_session.client.expire(redis_key, WEEK_SECONDS)
+ log.info(f"Set {redis_key} to expire in a week.")
- @namespace_lock
async def get(self, item: DocItem) -> Optional[str]:
"""Return the Markdown content of the symbol `item` if it exists."""
- with await self._get_pool_connection() as connection:
- return await connection.hget(f"{self.namespace}:{item_key(item)}", item.symbol_id, encoding="utf8")
+ return await self.redis_session.client.hget(f"{self.namespace}:{item_key(item)}", item.symbol_id)
- @namespace_lock
async def delete(self, package: str) -> bool:
"""Remove all values for `package`; return True if at least one key was deleted, False otherwise."""
- with await self._get_pool_connection() as connection:
- package_keys = [
- package_key async for package_key in connection.iscan(match=f"{self.namespace}:{package}:*")
- ]
- if package_keys:
- await connection.delete(*package_keys)
- return True
- return False
+ pattern = f"{self.namespace}:{package}:*"
+
+ package_keys = [
+ package_key async for package_key in self.redis_session.client.scan_iter(match=pattern)
+ ]
+ if package_keys:
+ await self.redis_session.client.delete(*package_keys)
+ log.info(f"Deleted keys from redis: {package_keys}.")
+ self._set_expires = {
+ key: expire for key, expire in self._set_expires.items() if not fnmatch.fnmatchcase(key, pattern)
+ }
+ return True
+ return False
class StaleItemCounter(RedisObject):
"""Manage increment counters for stale `DocItem`s."""
- @namespace_lock
async def increment_for(self, item: DocItem) -> int:
"""
Increment the counter for `item` by 1, set it to expire in 3 weeks and return the new value.
@@ -70,21 +97,19 @@ class StaleItemCounter(RedisObject):
If the counter didn't exist, initialize it with 1.
"""
key = f"{self.namespace}:{item_key(item)}:{item.symbol_id}"
- with await self._get_pool_connection() as connection:
- await connection.expire(key, WEEK_SECONDS * 3)
- return int(await connection.incr(key))
+ await self.redis_session.client.expire(key, WEEK_SECONDS * 3)
+ return int(await self.redis_session.client.incr(key))
- @namespace_lock
async def delete(self, package: str) -> bool:
"""Remove all values for `package`; return True if at least one key was deleted, False otherwise."""
- with await self._get_pool_connection() as connection:
- package_keys = [
- package_key async for package_key in connection.iscan(match=f"{self.namespace}:{package}:*")
- ]
- if package_keys:
- await connection.delete(*package_keys)
- return True
- return False
+ package_keys = [
+ package_key
+ async for package_key in self.redis_session.client.scan_iter(match=f"{self.namespace}:{package}:*")
+ ]
+ if package_keys:
+ await self.redis_session.client.delete(*package_keys)
+ return True
+ return False
def item_key(item: DocItem) -> str:
diff --git a/bot/exts/info/help.py b/bot/exts/info/help.py
index 597534083..282f8c97a 100644
--- a/bot/exts/info/help.py
+++ b/bot/exts/info/help.py
@@ -6,8 +6,8 @@ from collections import namedtuple
from contextlib import suppress
from typing import List, Optional, Union
-from disnake import ButtonStyle, Colour, Embed, Emoji, HTTPException, Interaction, PartialEmoji, ui
-from disnake.ext.commands import Bot, Cog, Command, CommandError, Context, DisabledCommand, Group, HelpCommand
+from discord import ButtonStyle, Colour, Embed, Emoji, Interaction, PartialEmoji, ui
+from discord.ext.commands import Bot, Cog, Command, CommandError, Context, DisabledCommand, Group, HelpCommand
from rapidfuzz import fuzz, process
from rapidfuzz.utils import default_process
@@ -62,8 +62,8 @@ class SubcommandButton(ui.Button):
embed, subcommand_view = await self.help_command.format_group_help(subcommand)
else:
embed, subcommand_view = await self.help_command.command_formatting(subcommand)
- with suppress(HTTPException):
- await interaction.response.edit_message(embed=embed, view=subcommand_view)
+
+ await interaction.response.edit_message(embed=embed, view=subcommand_view)
class GroupButton(ui.Button):
@@ -96,8 +96,7 @@ class GroupButton(ui.Button):
async def callback(self, interaction: Interaction) -> None:
"""Edits the help embed to that of the parent."""
embed, group_view = await self.help_command.format_group_help(self.command.parent)
- with suppress(HTTPException):
- await interaction.response.edit_message(embed=embed, view=group_view)
+ await interaction.response.edit_message(embed=embed, view=group_view)
class CommandView(ui.View):
@@ -112,7 +111,7 @@ class CommandView(ui.View):
super().__init__()
if command.parent:
- self.children.append(GroupButton(help_command, command, emoji="↩️"))
+ self.add_item(GroupButton(help_command, command, emoji="↩️"))
async def interaction_check(self, interaction: Interaction) -> bool:
"""
@@ -483,12 +482,12 @@ class Help(Cog):
bot.help_command = CustomHelpCommand()
bot.help_command.cog = self
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Reset the help command when the cog is unloaded."""
self.bot.help_command = self.old_help_command
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Help cog."""
- bot.add_cog(Help(bot))
+ await bot.add_cog(Help(bot))
log.info("Cog loaded: Help")
diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py
index 44a9b8f1a..e7d17c971 100644
--- a/bot/exts/info/information.py
+++ b/bot/exts/info/information.py
@@ -6,12 +6,12 @@ from textwrap import shorten
from typing import Any, DefaultDict, Mapping, Optional, Tuple, Union
import rapidfuzz
-from disnake import AllowedMentions, Colour, Embed, Guild, Message, Role
-from disnake.ext.commands import BucketType, Cog, Context, Greedy, Paginator, command, group, has_any_role
-from disnake.utils import escape_markdown
+from botcore.site_api import ResponseCodeError
+from discord import AllowedMentions, Colour, Embed, Guild, Message, Role
+from discord.ext.commands import BucketType, Cog, Context, Greedy, Paginator, command, group, has_any_role
+from discord.utils import escape_markdown
from bot import constants
-from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.converters import MemberOrUser
from bot.decorators import in_whitelist
@@ -466,7 +466,7 @@ class Information(Cog):
async def send_raw_content(self, ctx: Context, message: Message, json: bool = False) -> None:
"""
- Send information about the raw API response for a `disnake.Message`.
+ Send information about the raw API response for a `discord.Message`.
If `json` is True, send the information in a copy-pasteable Python format.
"""
@@ -552,6 +552,6 @@ class Information(Cog):
await LinePaginator.paginate(final_rules, ctx, rules_embed, max_lines=3)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Information cog."""
- bot.add_cog(Information(bot))
+ await bot.add_cog(Information(bot))
diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py
index 08c693581..6c659379f 100644
--- a/bot/exts/info/pep.py
+++ b/bot/exts/info/pep.py
@@ -3,19 +3,18 @@ from email.parser import HeaderParser
from io import StringIO
from typing import Dict, Optional, Tuple
-from disnake import Colour, Embed
-from disnake.ext.commands import Cog, Context, command
+from discord import Colour, Embed
+from discord.ext.commands import Cog, Context, command
from bot.bot import Bot
from bot.constants import Keys
from bot.log import get_logger
-from bot.utils import scheduling
from bot.utils.caching import AsyncCache
log = get_logger(__name__)
ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
-BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
+BASE_PEP_URL = "https://peps.python.org/pep-"
PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=main"
pep_cache = AsyncCache()
@@ -33,7 +32,10 @@ class PythonEnhancementProposals(Cog):
self.peps: Dict[int, str] = {}
# To avoid situations where we don't have last datetime, set this to now.
self.last_refreshed_peps: datetime = datetime.now()
- scheduling.create_task(self.refresh_peps_urls(), event_loop=self.bot.loop)
+
+ async def cog_load(self) -> None:
+ """Carry out cog asynchronous initialisation."""
+ await self.refresh_peps_urls()
async def refresh_peps_urls(self) -> None:
"""Refresh PEP URLs listing in every 3 hours."""
@@ -67,7 +69,7 @@ class PythonEnhancementProposals(Cog):
"""Get information embed about PEP 0."""
pep_embed = Embed(
title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
- url="https://www.python.org/dev/peps/"
+ url="https://peps.python.org/"
)
pep_embed.set_thumbnail(url=ICON_URL)
pep_embed.add_field(name="Status", value="Active")
@@ -144,7 +146,7 @@ class PythonEnhancementProposals(Cog):
async def pep_command(self, ctx: Context, pep_number: int) -> None:
"""Fetches information about a PEP and sends it to the channel."""
# Trigger typing in chat to show users that bot is responding
- await ctx.trigger_typing()
+ await ctx.typing()
# Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs.
if pep_number == 0:
@@ -163,6 +165,6 @@ class PythonEnhancementProposals(Cog):
log.trace(f"Getting PEP {pep_number} failed. Error embed sent.")
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the PEP cog."""
- bot.add_cog(PythonEnhancementProposals(bot))
+ await bot.add_cog(PythonEnhancementProposals(bot))
diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py
index 0a7705eb0..2d387df3d 100644
--- a/bot/exts/info/pypi.py
+++ b/bot/exts/info/pypi.py
@@ -3,9 +3,9 @@ import random
import re
from contextlib import suppress
-from disnake import Embed, NotFound
-from disnake.ext.commands import Cog, Context, command
-from disnake.utils import escape_markdown
+from discord import Embed, NotFound
+from discord.ext.commands import Cog, Context, command
+from discord.utils import escape_markdown
from bot.bot import Bot
from bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput
@@ -82,6 +82,6 @@ class PyPi(Cog):
await ctx.send(embed=embed)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the PyPi cog."""
- bot.add_cog(PyPi(bot))
+ await bot.add_cog(PyPi(bot))
diff --git a/bot/exts/info/python_news.py b/bot/exts/info/python_news.py
index 7603b402b..111b2dcaf 100644
--- a/bot/exts/info/python_news.py
+++ b/bot/exts/info/python_news.py
@@ -2,19 +2,18 @@ import re
import typing as t
from datetime import date, datetime
-import disnake
+import discord
import feedparser
from bs4 import BeautifulSoup
-from disnake.ext.commands import Cog
-from disnake.ext.tasks import loop
+from discord.ext.commands import Cog
+from discord.ext.tasks import loop
from bot import constants
from bot.bot import Bot
from bot.log import get_logger
-from bot.utils import scheduling
from bot.utils.webhooks import send_webhook
-PEPS_RSS_URL = "https://www.python.org/dev/peps/peps.rss/"
+PEPS_RSS_URL = "https://peps.python.org/peps.rss"
RECENT_THREADS_TEMPLATE = "https://mail.python.org/archives/list/{name}@python.org/recent-threads"
THREAD_TEMPLATE_URL = "https://mail.python.org/archives/api/list/{name}@python.org/thread/{id}/"
@@ -40,10 +39,12 @@ class PythonNews(Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.webhook_names = {}
- self.webhook: t.Optional[disnake.Webhook] = None
+ self.webhook: t.Optional[discord.Webhook] = None
- scheduling.create_task(self.get_webhook_names(), event_loop=self.bot.loop)
- scheduling.create_task(self.get_webhook_and_channel(), event_loop=self.bot.loop)
+ async def cog_load(self) -> None:
+ """Carry out cog asynchronous initialisation."""
+ await self.get_webhook_names()
+ await self.get_webhook_and_channel()
async def start_tasks(self) -> None:
"""Start the tasks for fetching new PEPs and mailing list messages."""
@@ -119,7 +120,7 @@ class PythonNews(Cog):
continue
# Build an embed and send a webhook
- embed = disnake.Embed(
+ embed = discord.Embed(
title=self.escape_markdown(new["title"]),
description=self.escape_markdown(new["summary"]),
timestamp=new_datetime,
@@ -189,7 +190,7 @@ class PythonNews(Cog):
link = THREAD_URL.format(id=thread["href"].split("/")[-2], list=maillist)
# Build an embed and send a message to the webhook
- embed = disnake.Embed(
+ embed = discord.Embed(
title=self.escape_markdown(thread_information["subject"]),
description=content[:1000] + f"... [continue reading]({link})" if len(content) > 1000 else content,
timestamp=new_date,
@@ -240,11 +241,11 @@ class PythonNews(Cog):
await self.start_tasks()
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Stop news posting tasks on cog unload."""
self.fetch_new_media.cancel()
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Add `News` cog."""
- bot.add_cog(PythonNews(bot))
+ await bot.add_cog(PythonNews(bot))
diff --git a/bot/exts/info/resources.py b/bot/exts/info/resources.py
new file mode 100644
index 000000000..eeb9dd757
--- /dev/null
+++ b/bot/exts/info/resources.py
@@ -0,0 +1,70 @@
+import re
+from typing import Optional
+from urllib.parse import quote
+
+from discord import Embed
+from discord.ext import commands
+
+from bot.bot import Bot
+
+REGEX_CONSECUTIVE_NON_LETTERS = r"[^A-Za-z0-9]+"
+RESOURCE_URL = "https://www.pythondiscord.com/resources/"
+
+
+def to_kebabcase(resource_topic: str) -> str:
+ """
+ Convert any string to kebab-case.
+
+ For example, convert
+ "__Favorite FROOT¤#/$?is----LeMON???" to
+ "favorite-froot-is-lemon"
+
+ Code adopted from:
+ https://github.com/python-discord/site/blob/main/pydis_site/apps/resources/templatetags/to_kebabcase.py
+ """
+ # First, make it lowercase, and just remove any apostrophes.
+ # We remove the apostrophes because "wasnt" is better than "wasn-t"
+ resource_topic = resource_topic.casefold()
+ resource_topic = resource_topic.replace("'", '')
+
+ # Now, replace any non-alphanumerics that remains with a dash.
+ # If there are multiple consecutive non-letters, just replace them with a single dash.
+ # my-favorite-class is better than my-favorite------class
+ resource_topic = re.sub(
+ REGEX_CONSECUTIVE_NON_LETTERS,
+ "-",
+ resource_topic,
+ )
+
+ # Now we use strip to get rid of any leading or trailing dashes.
+ resource_topic = resource_topic.strip("-")
+ return resource_topic
+
+
+class Resources(commands.Cog):
+ """Display information about the Python Discord website Resource page."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+
+ @commands.command(name="resources", aliases=("res",))
+ async def resources_command(self, ctx: commands.Context, *, resource_topic: Optional[str]) -> None:
+ """Display information and a link to the Python Discord website Resources page."""
+ url = RESOURCE_URL
+
+ if resource_topic:
+ # Capture everything prior to new line allowing users to add messages below the command then prep for url
+ url = f"{url}?topics={quote(to_kebabcase(resource_topic.splitlines()[0]))}"
+
+ embed = Embed(
+ title="Resources",
+ description=f"The [Resources page]({url}) on our website contains a list "
+ f"of hand-selected learning resources that we "
+ f"regularly recommend to both beginners and experts."
+ )
+ await ctx.send(embed=embed)
+
+
+async def setup(bot: Bot) -> None:
+ """Load the Resources cog."""
+ await bot.add_cog(Resources(bot))
diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py
index 6305a9842..f735cc744 100644
--- a/bot/exts/info/source.py
+++ b/bot/exts/info/source.py
@@ -2,8 +2,8 @@ import inspect
from pathlib import Path
from typing import Optional, Tuple, Union
-from disnake import Embed
-from disnake.ext import commands
+from discord import Embed
+from discord.ext import commands
from bot.bot import Bot
from bot.constants import URLs
@@ -98,6 +98,6 @@ class BotSource(commands.Cog):
return embed
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the BotSource cog."""
- bot.add_cog(BotSource(bot))
+ await bot.add_cog(BotSource(bot))
diff --git a/bot/exts/info/stats.py b/bot/exts/info/stats.py
index 08422b38e..d4001a7bb 100644
--- a/bot/exts/info/stats.py
+++ b/bot/exts/info/stats.py
@@ -1,8 +1,8 @@
import string
-from disnake import Member, Message
-from disnake.ext.commands import Cog, Context
-from disnake.ext.tasks import loop
+from discord import Member, Message
+from discord.ext.commands import Cog, Context
+from discord.ext.tasks import loop
from bot.bot import Bot
from bot.constants import Categories, Channels, Guild
@@ -85,11 +85,11 @@ class Stats(Cog):
self.bot.stats.gauge("boost.amount", g.premium_subscription_count)
self.bot.stats.gauge("boost.tier", g.premium_tier)
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Stop the boost statistic task on unload of the Cog."""
self.update_guild_boost.stop()
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the stats cog."""
- bot.add_cog(Stats(bot))
+ await bot.add_cog(Stats(bot))
diff --git a/bot/exts/info/subscribe.py b/bot/exts/info/subscribe.py
index ddfb238b8..e36ce807c 100644
--- a/bot/exts/info/subscribe.py
+++ b/bot/exts/info/subscribe.py
@@ -1,19 +1,18 @@
import calendar
-import contextlib
import operator
import typing as t
from dataclasses import dataclass
import arrow
-import disnake
-from disnake.ext import commands
-from disnake.interactions import Interaction
+import discord
+from botcore.utils import members
+from discord.ext import commands
+from discord.interactions import Interaction
from bot import constants
from bot.bot import Bot
from bot.decorators import redirect_output
from bot.log import get_logger
-from bot.utils import members, scheduling
@dataclass(frozen=True)
@@ -27,7 +26,7 @@ class AssignableRole:
role_id: int
months_available: t.Optional[tuple[int]]
- name: t.Optional[str] = None # This gets populated within Subscribe.init_cog()
+ name: t.Optional[str] = None # This gets populated within Subscribe.cog_load()
def is_currently_available(self) -> bool:
"""Check if the role is available for the current month."""
@@ -51,6 +50,7 @@ ASSIGNABLE_ROLES = (
AssignableRole(constants.Roles.pyweek_announcements, None),
AssignableRole(constants.Roles.lovefest, (1, 2)),
AssignableRole(constants.Roles.advent_of_code, (11, 12)),
+ AssignableRole(constants.Roles.revival_of_code, (7, 8, 9, 10)),
)
ITEMS_PER_ROW = 3
@@ -59,10 +59,10 @@ DELETE_MESSAGE_AFTER = 300 # Seconds
log = get_logger(__name__)
-class RoleButtonView(disnake.ui.View):
+class RoleButtonView(discord.ui.View):
"""A list of SingleRoleButtons to show to the member."""
- def __init__(self, member: disnake.Member):
+ def __init__(self, member: discord.Member):
super().__init__()
self.interaction_owner = member
@@ -77,13 +77,13 @@ class RoleButtonView(disnake.ui.View):
return True
-class SingleRoleButton(disnake.ui.Button):
+class SingleRoleButton(discord.ui.Button):
"""A button that adds or removes a role from the member depending on it's current state."""
- ADD_STYLE = disnake.ButtonStyle.success
- REMOVE_STYLE = disnake.ButtonStyle.red
- UNAVAILABLE_STYLE = disnake.ButtonStyle.secondary
- LABEL_FORMAT = "{action} role {role_name}"
+ ADD_STYLE = discord.ButtonStyle.success
+ REMOVE_STYLE = discord.ButtonStyle.red
+ UNAVAILABLE_STYLE = discord.ButtonStyle.secondary
+ LABEL_FORMAT = "{action} role {role_name}."
CUSTOM_ID_FORMAT = "subscribe-{role_id}"
def __init__(self, role: AssignableRole, assigned: bool, row: int):
@@ -105,10 +105,9 @@ class SingleRoleButton(disnake.ui.Button):
async def callback(self, interaction: Interaction) -> None:
"""Update the member's role and change button text to reflect current text."""
- if isinstance(interaction.user, disnake.User):
+ if isinstance(interaction.user, discord.User):
log.trace("User %s is not a member", interaction.user)
- with contextlib.suppress(disnake.HTTPException):
- await interaction.delete_original_message()
+ await interaction.message.delete()
self.view.stop()
return
@@ -119,7 +118,7 @@ class SingleRoleButton(disnake.ui.Button):
await members.handle_role_change(
interaction.user,
interaction.user.remove_roles if self.assigned else interaction.user.add_roles,
- disnake.Object(self.role.role_id),
+ discord.Object(self.role.role_id),
)
self.assigned = not self.assigned
@@ -134,8 +133,8 @@ class SingleRoleButton(disnake.ui.Button):
self.style = self.REMOVE_STYLE if self.assigned else self.ADD_STYLE
self.label = self.LABEL_FORMAT.format(action="Remove" if self.assigned else "Add", role_name=self.role.name)
try:
- await interaction.response.edit_message(view=self.view)
- except disnake.HTTPException:
+ await interaction.message.edit(view=self.view)
+ except discord.NotFound:
log.debug("Subscribe message for %s removed before buttons could be updated", interaction.user)
self.view.stop()
@@ -145,11 +144,10 @@ class Subscribe(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
- self.init_task = scheduling.create_task(self.init_cog(), event_loop=self.bot.loop)
self.assignable_roles: list[AssignableRole] = []
- self.guild: disnake.Guild = None
+ self.guild: discord.Guild = None
- async def init_cog(self) -> None:
+ async def cog_load(self) -> None:
"""Initialise the cog by resolving the role IDs in ASSIGNABLE_ROLES to role names."""
await self.bot.wait_until_guild_available()
@@ -180,8 +178,6 @@ class Subscribe(commands.Cog):
)
async def subscribe_command(self, ctx: commands.Context, *_) -> None: # We don't actually care about the args
"""Display the member's current state for each role, and allow them to add/remove the roles."""
- await self.init_task
-
button_view = RoleButtonView(ctx.author)
author_roles = [role.id for role in ctx.author.roles]
for index, role in enumerate(self.assignable_roles):
@@ -195,9 +191,9 @@ class Subscribe(commands.Cog):
)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Subscribe cog."""
if len(ASSIGNABLE_ROLES) > ITEMS_PER_ROW*5: # Discord limits views to 5 rows of buttons.
log.error("Too many roles for 5 rows, not loading the Subscribe cog.")
else:
- bot.add_cog(Subscribe(bot))
+ await bot.add_cog(Subscribe(bot))
diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py
index baeb21adb..83d3a9d93 100644
--- a/bot/exts/info/tags.py
+++ b/bot/exts/info/tags.py
@@ -6,10 +6,10 @@ import time
from pathlib import Path
from typing import Callable, Iterable, Literal, NamedTuple, Optional, Union
-import disnake
+import discord
import frontmatter
-from disnake import Embed, Member
-from disnake.ext.commands import Cog, Context, group
+from discord import Embed, Member
+from discord.ext.commands import Cog, Context, group
from bot import constants
from bot.bot import Bot
@@ -81,7 +81,8 @@ class Tag:
self.content = post.content
self.metadata = post.metadata
self._restricted_to: set[int] = set(self.metadata.get("restricted_to", ()))
- self._cooldowns: dict[disnake.TextChannel, float] = {}
+ self._cooldowns: dict[discord.TextChannel, float] = {}
+ self.aliases: list[str] = self.metadata.get("aliases", [])
@property
def embed(self) -> Embed:
@@ -90,18 +91,18 @@ class Tag:
embed.description = self.content
return embed
- def accessible_by(self, member: disnake.Member) -> bool:
+ def accessible_by(self, member: discord.Member) -> bool:
"""Check whether `member` can access the tag."""
return bool(
not self._restricted_to
or self._restricted_to & {role.id for role in member.roles}
)
- def on_cooldown_in(self, channel: disnake.TextChannel) -> bool:
+ def on_cooldown_in(self, channel: discord.TextChannel) -> bool:
"""Check whether the tag is on cooldown in `channel`."""
return self._cooldowns.get(channel, float("-inf")) > time.time()
- def set_cooldown_for(self, channel: disnake.TextChannel) -> None:
+ def set_cooldown_for(self, channel: discord.TextChannel) -> None:
"""Set the tag to be on cooldown in `channel` for `constants.Cooldowns.tags` seconds."""
self._cooldowns[channel] = time.time() + constants.Cooldowns.tags
@@ -149,7 +150,11 @@ class Tags(Cog):
# Files directly under `base_path` have an empty string as the parent directory name
tag_group = parent_dir.name or None
- self.tags[TagIdentifier(tag_group, tag_name)] = Tag(file)
+ tag = Tag(file)
+ self.tags[TagIdentifier(tag_group, tag_name)] = tag
+
+ for alias in tag.aliases:
+ self.tags[TagIdentifier(tag_group, alias)] = tag
def _get_suggestions(self, tag_identifier: TagIdentifier) -> list[tuple[TagIdentifier, Tag]]:
"""Return a list of suggested tags for `tag_identifier`."""
@@ -274,11 +279,16 @@ class Tags(Cog):
if tag.accessible_by(ctx.author)
]
+ # Try exact match, includes checking through alt names
tag = self.tags.get(tag_identifier)
if tag is None and tag_identifier.group is not None:
# Try exact match with only the name
- tag = self.tags.get(TagIdentifier(None, tag_identifier.group))
+ name_only_identifier = TagIdentifier(None, tag_identifier.group)
+ tag = self.tags.get(name_only_identifier)
+ if tag:
+ # Ensure the correct tag information is sent to statsd
+ tag_identifier = name_only_identifier
if tag is None and len(filtered_tags) == 1:
tag_identifier = filtered_tags[0][0]
@@ -344,7 +354,7 @@ class Tags(Cog):
return result_lines
- def accessible_tags_in_group(self, group: str, user: disnake.Member) -> list[str]:
+ def accessible_tags_in_group(self, group: str, user: discord.Member) -> list[str]:
"""Return a formatted list of tags in `group`, that are accessible by `user`."""
return sorted(
f"**\N{RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK}** {identifier}"
@@ -395,6 +405,6 @@ class Tags(Cog):
return True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Tags cog."""
- bot.add_cog(Tags(bot))
+ await bot.add_cog(Tags(bot))
diff --git a/bot/exts/moderation/clean.py b/bot/exts/moderation/clean.py
index 2e274b23b..748c018d2 100644
--- a/bot/exts/moderation/clean.py
+++ b/bot/exts/moderation/clean.py
@@ -7,10 +7,10 @@ from datetime import datetime
from itertools import takewhile
from typing import Callable, Iterable, Literal, Optional, TYPE_CHECKING, Union
-from disnake import Colour, Message, NotFound, TextChannel, User, errors
-from disnake.ext.commands import Cog, Context, Converter, Greedy, group, has_any_role
-from disnake.ext.commands.converter import TextChannelConverter
-from disnake.ext.commands.errors import BadArgument
+from discord import Colour, Message, NotFound, TextChannel, Thread, User, errors
+from discord.ext.commands import Cog, Context, Converter, Greedy, command, group, has_any_role
+from discord.ext.commands.converter import TextChannelConverter
+from discord.ext.commands.errors import BadArgument
from bot.bot import Bot
from bot.constants import Channels, CleanMessages, Colours, Emojis, Event, Icons, MODERATION_ROLES
@@ -130,8 +130,8 @@ class Clean(Cog):
else:
if channels == "*":
channels = {
- channel for channel in ctx.guild.channels
- if isinstance(channel, TextChannel)
+ channel for channel in ctx.guild.channels + ctx.guild.threads
+ if isinstance(channel, (TextChannel, Thread))
# Assume that non-public channels are not needed to optimize for speed.
and channel.permissions_for(ctx.guild.default_role).view_channel
}
@@ -179,11 +179,11 @@ class Clean(Cog):
def predicate_range(message: Message) -> bool:
"""Check if the message age is between the two limits."""
- return first_limit <= message.created_at <= second_limit
+ return first_limit < message.created_at < second_limit
def predicate_after(message: Message) -> bool:
- """Check if the message is older than the first limit."""
- return message.created_at >= first_limit
+ """Check if the message is younger than the first limit."""
+ return message.created_at > first_limit
predicates = []
# Set up the correct predicate
@@ -241,9 +241,14 @@ class Clean(Cog):
self,
channels: Iterable[TextChannel],
to_delete: Predicate,
- before: datetime,
- after: Optional[datetime] = None
+ after: datetime,
+ before: Optional[datetime] = None
) -> tuple[defaultdict[TextChannel, list], list]:
+ """
+ Collect the messages for deletion by iterating over the histories of the appropriate channels.
+
+ The clean cog enforces an upper limit on message age through `_validate_input`.
+ """
message_mappings = defaultdict(list)
message_ids = []
@@ -419,8 +424,8 @@ class Clean(Cog):
message_mappings, message_ids = await self._get_messages_from_channels(
channels=deletion_channels,
to_delete=predicate,
- before=second_limit,
- after=first_limit # Remember first is the earlier datetime.
+ after=first_limit, # Remember first is the earlier datetime (the "older" time).
+ before=second_limit
)
if not self.cleaning:
@@ -441,7 +446,10 @@ class Clean(Cog):
f"A log of the deleted messages can be found here {log_url}."
)
if log_url and is_mod_channel(ctx.channel):
- await ctx.reply(success_message)
+ try:
+ await ctx.reply(success_message)
+ except errors.HTTPException:
+ await ctx.send(success_message)
elif log_url:
if mods := self.bot.get_channel(Channels.mods):
await mods.send(f"{ctx.author.mention} {success_message}")
@@ -449,7 +457,7 @@ class Clean(Cog):
# region: Commands
- @group(invoke_without_command=True, name="clean", aliases=["clear", "purge"])
+ @group(invoke_without_command=True, name="clean", aliases=("clear",))
async def clean_group(
self,
ctx: Context,
@@ -459,7 +467,7 @@ class Clean(Cog):
regex: Optional[Regex] = None,
bots_only: Optional[bool] = False,
*,
- channels: CleanChannels = None # "Optional" with disnake silently ignores incorrect input.
+ channels: CleanChannels = None # "Optional" with discord.py silently ignores incorrect input.
) -> None:
"""
Commands for cleaning messages in channels.
@@ -468,7 +476,7 @@ class Clean(Cog):
\u2003• `users`: A series of user mentions, ID's, or names.
\u2003• `first_limit` and `second_limit`: A message, a duration delta, or an ISO datetime.
- At least one limit is required.
+ At least one limit is required. The limits are *exclusive*.
If a message is provided, cleaning will happen in that channel, and channels cannot be provided.
If only one of them is provided, acts as `clean until`. If both are provided, acts as `clean between`.
\u2003• `regex`: A regex pattern the message must contain to be deleted.
@@ -483,34 +491,40 @@ class Clean(Cog):
await self._clean_messages(ctx, channels, bots_only, users, regex, first_limit, second_limit)
- @clean_group.command(name="user", aliases=["users"])
- async def clean_user(
+ @clean_group.command(name="users", aliases=["user"])
+ async def clean_users(
self,
ctx: Context,
- user: User,
+ users: Greedy[User],
message_or_time: CleanLimit,
*,
channels: CleanChannels = None
) -> None:
"""
- Delete messages posted by the provided user, stop cleaning after reaching `message_or_time`.
+ Delete messages posted by the provided users, stop cleaning after reaching `message_or_time`.
`message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO
datetime.
- If a message is specified, `channels` cannot be specified.
+ If a message is specified the cleanup will be limited to the channel the message is in.
+
+ If a timedelta or an ISO datetime is specified, `channels` can be specified to clean across multiple channels.
+ An asterisk can also be used to designate cleanup across all channels.
"""
- await self._clean_messages(ctx, users=[user], channels=channels, first_limit=message_or_time)
+ await self._clean_messages(ctx, users=users, channels=channels, first_limit=message_or_time)
@clean_group.command(name="bots", aliases=["bot"])
async def clean_bots(self, ctx: Context, message_or_time: CleanLimit, *, channels: CleanChannels = None) -> None:
"""
- Delete all messages posted by a bot, stop cleaning after traversing `traverse` messages.
+ Delete all messages posted by a bot, stop cleaning after reaching `message_or_time`.
`message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO
datetime.
- If a message is specified, `channels` cannot be specified.
+ If a message is specified the cleanup will be limited to the channel the message is in.
+
+ If a timedelta or an ISO datetime is specified, `channels` can be specified to clean across multiple channels.
+ An asterisk can also be used to designate cleanup across all channels.
"""
await self._clean_messages(ctx, bots_only=True, channels=channels, first_limit=message_or_time)
@@ -528,11 +542,19 @@ class Clean(Cog):
`message_or_time` can be either a message to stop at (exclusive), a timedelta for max message age, or an ISO
datetime.
- If a message is specified, `channels` cannot be specified.
- The pattern must be provided enclosed in backticks.
- If the pattern contains spaces, it still needs to be enclosed in double quotes on top of that.
- For example: `[0-9]`
+ If a message is specified the cleanup will be limited to the channel the message is in.
+
+ If a timedelta or an ISO datetime is specified, `channels` can be specified to clean across multiple channels.
+ An asterisk can also be used to designate cleanup across all channels.
+
+ The `regex` pattern must be provided enclosed in backticks.
+
+ For example: \\`[0-9]\\`.
+
+ If the `regex` pattern contains spaces, it still needs to be enclosed in double quotes on top of that.
+
+ For example: "\\`[0-9]\\`".
"""
await self._clean_messages(ctx, regex=regex, channels=channels, first_limit=message_or_time)
@@ -547,7 +569,13 @@ class Clean(Cog):
Delete all messages until a certain limit.
A limit can be either a message, and ISO date-time string, or a time delta.
- If a message is specified, `channel` cannot be specified.
+
+ The limit is *exclusive*.
+
+ If a message is specified the cleanup will be limited to the channel the message is in.
+
+ If a timedelta or an ISO datetime is specified, `channels` can be specified to clean across multiple channels.
+ An asterisk can also be used to designate cleanup across all channels.
"""
await self._clean_messages(
ctx,
@@ -569,8 +597,13 @@ class Clean(Cog):
The range is specified through two limits.
A limit can be either a message, and ISO date-time string, or a time delta.
+ The limits are *exclusive*.
+
If two messages are specified, they both must be in the same channel.
- If a message is specified, `channel` cannot be specified.
+ The cleanup will be limited to the channel the messages are in.
+
+ If two timedeltas or ISO datetimes are specified, `channels` can be specified to clean across multiple channels.
+ An asterisk can also be used to designate cleanup across all channels.
"""
await self._clean_messages(
ctx,
@@ -591,6 +624,23 @@ class Clean(Cog):
await self._send_expiring_message(ctx, message)
await self._delete_invocation(ctx)
+ @command()
+ async def purge(self, ctx: Context, users: Greedy[User], age: Optional[Union[Age, ISODateTime]] = None) -> None:
+ """
+ Clean messages of `users` from all public channels up to a certain message `age` (10 minutes by default).
+
+ Requires 1 or more users to be specified. For channel-based cleaning, use `clean` instead.
+
+ `age` can be a duration or an ISO 8601 timestamp.
+ """
+ if not users:
+ raise BadArgument("At least one user must be specified.")
+
+ if age is None:
+ age = await Age().convert(ctx, "10M")
+
+ await self._clean_messages(ctx, channels="*", users=users, first_limit=age)
+
# endregion
async def cog_check(self, ctx: Context) -> bool:
@@ -602,6 +652,6 @@ class Clean(Cog):
self.cleaning = False
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Clean cog."""
- bot.add_cog(Clean(bot))
+ await bot.add_cog(Clean(bot))
diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py
index 58e049d4f..7c924ff14 100644
--- a/bot/exts/moderation/defcon.py
+++ b/bot/exts/moderation/defcon.py
@@ -1,3 +1,4 @@
+import asyncio
import traceback
from collections import namedtuple
from datetime import datetime
@@ -5,21 +6,22 @@ from enum import Enum
from typing import Optional, Union
import arrow
-from aioredis import RedisError
from async_rediscache import RedisCache
+from botcore.utils import scheduling
+from botcore.utils.scheduling import Scheduler
from dateutil.relativedelta import relativedelta
-from disnake import Colour, Embed, Forbidden, Member, TextChannel, User
-from disnake.ext import tasks
-from disnake.ext.commands import Cog, Context, group, has_any_role
+from discord import Colour, Embed, Forbidden, Member, TextChannel, User
+from discord.ext import tasks
+from discord.ext.commands import Cog, Context, group, has_any_role
+from redis import RedisError
from bot.bot import Bot
from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles
from bot.converters import DurationDelta, Expiry
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
-from bot.utils import scheduling, time
+from bot.utils import time
from bot.utils.messages import format_user
-from bot.utils.scheduling import Scheduler
log = get_logger(__name__)
@@ -69,16 +71,18 @@ class Defcon(Cog):
scheduling.create_task(self._sync_settings(), event_loop=self.bot.loop)
- @property
- def mod_log(self) -> ModLog:
+ async def get_mod_log(self) -> ModLog:
"""Get currently loaded ModLog cog instance."""
- return self.bot.get_cog("ModLog")
+ while not (cog := self.bot.get_cog("ModLog")):
+ await asyncio.sleep(1)
+ return cog
@defcon_settings.atomic_transaction
async def _sync_settings(self) -> None:
"""On cog load, try to synchronize DEFCON settings to the API."""
log.trace("Waiting for the guild to become available before syncing.")
await self.bot.wait_until_guild_available()
+
self.channel = await self.bot.fetch_channel(Channels.defcon)
log.trace("Syncing settings.")
@@ -101,7 +105,7 @@ class Defcon(Cog):
self._update_notifier()
log.info(f"DEFCON synchronized: {time.humanize_delta(self.threshold) if self.threshold else '-'}")
- self._update_channel_topic()
+ await self._update_channel_topic()
@Cog.listener()
async def on_member_join(self, member: Member) -> None:
@@ -133,7 +137,7 @@ class Defcon(Cog):
if not message_sent:
message = f"{message}\n\nUnable to send rejection message via DM; they probably have DMs disabled."
- await self.mod_log.send_log_message(
+ await (await self.get_mod_log()).send_log_message(
Icons.defcon_denied, Colours.soft_red, "Entry denied",
message, member.display_avatar.url
)
@@ -209,12 +213,12 @@ class Defcon(Cog):
await role.edit(reason="DEFCON unshutdown", permissions=permissions)
await ctx.send(f"{Action.SERVER_OPEN.value.emoji} Server reopened.")
- def _update_channel_topic(self) -> None:
+ async def _update_channel_topic(self) -> None:
"""Update the #defcon channel topic with the current DEFCON status."""
threshold = time.humanize_delta(self.threshold) if self.threshold else '-'
new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {threshold})"
- self.mod_log.ignore(Event.guild_channel_update, Channels.defcon)
+ (await self.get_mod_log()).ignore(Event.guild_channel_update, Channels.defcon)
scheduling.create_task(self.channel.edit(topic=new_topic))
@defcon_settings.atomic_transaction
@@ -273,7 +277,7 @@ class Defcon(Cog):
await channel.send(message)
await self._send_defcon_log(action, author)
- self._update_channel_topic()
+ await self._update_channel_topic()
self._log_threshold_stat(threshold)
@@ -301,7 +305,7 @@ class Defcon(Cog):
)
status_msg = f"DEFCON {action.name.lower()}"
- await self.mod_log.send_log_message(info.icon, info.color, status_msg, log_msg)
+ await (await self.get_mod_log()).send_log_message(info.icon, info.color, status_msg, log_msg)
def _update_notifier(self) -> None:
"""Start or stop the notifier according to the DEFCON status."""
@@ -318,13 +322,13 @@ class Defcon(Cog):
"""Routinely notify moderators that DEFCON is active."""
await self.channel.send(f"Defcon is on and is set to {time.humanize_delta(self.threshold)}.")
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Cancel the notifer and threshold removal tasks when the cog unloads."""
log.trace("Cog unload: canceling defcon notifier task.")
self.defcon_notifier.cancel()
self.scheduler.cancel_all()
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Defcon cog."""
- bot.add_cog(Defcon(bot))
+ await bot.add_cog(Defcon(bot))
diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py
index 28e131eb4..bf0b96a58 100644
--- a/bot/exts/moderation/dm_relay.py
+++ b/bot/exts/moderation/dm_relay.py
@@ -1,11 +1,11 @@
-import disnake
-from disnake.ext.commands import Cog, Context, command, has_any_role
+import discord
+from discord.ext.commands import Cog, Context, command, has_any_role
from bot.bot import Bot
from bot.constants import Emojis, MODERATION_ROLES
from bot.log import get_logger
from bot.utils.channel import is_mod_channel
-from bot.utils.services import send_to_paste_service
+from bot.utils.services import PasteTooLongError, PasteUploadError, send_to_paste_service
log = get_logger(__name__)
@@ -17,7 +17,7 @@ class DMRelay(Cog):
self.bot = bot
@command(aliases=("relay", "dr"))
- async def dmrelay(self, ctx: Context, user: disnake.User, limit: int = 100) -> None:
+ async def dmrelay(self, ctx: Context, user: discord.User, limit: int = 100) -> None:
"""Relays the direct message history between the bot and given user."""
log.trace(f"Relaying DMs with {user.name} ({user.id})")
@@ -53,14 +53,14 @@ class DMRelay(Cog):
f"User: {user} ({user.id})\n"
f"Channel ID: {user.dm_channel.id}\n\n"
)
+ try:
+ message = await send_to_paste_service(metadata + output, extension="txt")
+ except PasteTooLongError:
+ message = f"{Emojis.cross_mark} Too long to upload to paste service."
+ except PasteUploadError:
+ message = f"{Emojis.cross_mark} Failed to upload to paste service."
- paste_link = await send_to_paste_service(metadata + output, extension="txt")
-
- if paste_link is None:
- await ctx.send(f"{Emojis.cross_mark} Failed to upload output to hastebin.")
- return
-
- await ctx.send(paste_link)
+ await ctx.send(message)
async def cog_check(self, ctx: Context) -> bool:
"""Only allow moderators to invoke the commands in this cog in mod channels."""
@@ -68,6 +68,6 @@ class DMRelay(Cog):
and is_mod_channel(ctx.channel))
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the DMRelay cog."""
- bot.add_cog(DMRelay(bot))
+ await bot.add_cog(DMRelay(bot))
diff --git a/bot/exts/moderation/incidents.py b/bot/exts/moderation/incidents.py
index c4c03e546..1ddbe9857 100644
--- a/bot/exts/moderation/incidents.py
+++ b/bot/exts/moderation/incidents.py
@@ -1,18 +1,19 @@
import asyncio
import re
-from datetime import datetime
+from datetime import datetime, timezone
from enum import Enum
from typing import Optional
-import disnake
+import discord
from async_rediscache import RedisCache
-from disnake.ext.commands import Cog, Context, MessageConverter, MessageNotFound
+from botcore.utils import scheduling
+from discord.ext.commands import Cog, Context, MessageConverter, MessageNotFound
from bot.bot import Bot
from bot.constants import Channels, Colours, Emojis, Guild, Roles, Webhooks
from bot.log import get_logger
-from bot.utils import scheduling
from bot.utils.messages import format_user, sub_clyde
+from bot.utils.time import TimestampFormats, discord_timestamp
log = get_logger(__name__)
@@ -25,9 +26,9 @@ CRAWL_LIMIT = 50
CRAWL_SLEEP = 2
DISCORD_MESSAGE_LINK_RE = re.compile(
- r"(https?:\/\/(?:(ptb|canary|www)\.)?discord(?:app)?\.com\/channels\/"
+ r"(https?://(?:(ptb|canary|www)\.)?discord(?:app)?\.com/channels/"
r"[0-9]{15,20}"
- r"\/[0-9]{15,20}\/[0-9]{15,20})"
+ r"/[0-9]{15,20}/[0-9]{15,20})"
)
@@ -52,10 +53,10 @@ ALL_SIGNALS: set[str] = {signal.value for signal in Signal}
# An embed coupled with an optional file to be dispatched
# If the file is not None, the embed attempts to show it in its body
-FileEmbed = tuple[disnake.Embed, Optional[disnake.File]]
+FileEmbed = tuple[discord.Embed, Optional[discord.File]]
-async def download_file(attachment: disnake.Attachment) -> Optional[disnake.File]:
+async def download_file(attachment: discord.Attachment) -> Optional[discord.File]:
"""
Download & return `attachment` file.
@@ -65,13 +66,13 @@ async def download_file(attachment: disnake.Attachment) -> Optional[disnake.File
log.debug(f"Attempting to download attachment: {attachment.filename}")
try:
return await attachment.to_file()
- except (disnake.NotFound, disnake.Forbidden) as exc:
+ except (discord.NotFound, discord.Forbidden) as exc:
log.debug(f"Failed to download attachment: {exc}")
except Exception:
log.exception("Failed to download attachment")
-async def make_embed(incident: disnake.Message, outcome: Signal, actioned_by: disnake.Member) -> FileEmbed:
+async def make_embed(incident: discord.Message, outcome: Signal, actioned_by: discord.Member) -> FileEmbed:
"""
Create an embed representation of `incident` for the #incidents-archive channel.
@@ -97,10 +98,20 @@ async def make_embed(incident: disnake.Message, outcome: Signal, actioned_by: di
colour = Colours.soft_red
footer = f"Rejected by {actioned_by}"
- embed = disnake.Embed(
- description=incident.content,
- timestamp=datetime.utcnow(),
+ reported_timestamp = discord_timestamp(incident.created_at)
+ relative_timestamp = discord_timestamp(incident.created_at, TimestampFormats.RELATIVE)
+ reported_on_msg = f"*Reported {reported_timestamp} ({relative_timestamp}).*"
+
+ # If the description will be too long (>4096 total characters), truncate the incident content
+ if len(incident.content) > (allowed_content_chars := 4096-len(reported_on_msg)-2): # -2 for the newlines
+ description = incident.content[:allowed_content_chars-3] + f"...\n\n{reported_on_msg}"
+ else:
+ description = incident.content + f"\n\n{reported_on_msg}"
+
+ embed = discord.Embed(
+ description=description,
colour=colour,
+ timestamp=datetime.now(timezone.utc)
)
embed.set_footer(text=footer, icon_url=actioned_by.display_avatar.url)
@@ -113,12 +124,12 @@ async def make_embed(incident: disnake.Message, outcome: Signal, actioned_by: di
else:
embed.set_author(name="[Failed to relay attachment]", url=attachment.proxy_url) # Embed links the file
else:
- file = disnake.utils.MISSING
+ file = discord.utils.MISSING
return embed, file
-def is_incident(message: disnake.Message) -> bool:
+def is_incident(message: discord.Message) -> bool:
"""True if `message` qualifies as an incident, False otherwise."""
conditions = (
message.channel.id == Channels.incidents, # Message sent in #incidents
@@ -129,12 +140,12 @@ def is_incident(message: disnake.Message) -> bool:
return all(conditions)
-def own_reactions(message: disnake.Message) -> set[str]:
+def own_reactions(message: discord.Message) -> set[str]:
"""Get the set of reactions placed on `message` by the bot itself."""
return {str(reaction.emoji) for reaction in message.reactions if reaction.me}
-def has_signals(message: disnake.Message) -> bool:
+def has_signals(message: discord.Message) -> bool:
"""True if `message` already has all `Signal` reactions, False otherwise."""
return ALL_SIGNALS.issubset(own_reactions(message))
@@ -167,9 +178,9 @@ def shorten_text(text: str) -> str:
return text
-async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[disnake.Embed]:
+async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[discord.Embed]:
"""
- Create an embedded representation of the Discord message link contained in the incident report.
+ Create an embedded representation of the discord message link contained in the incident report.
The Embed would contain the following information -->
Author: @Jason Terror ♦ (736234578745884682)
@@ -179,23 +190,23 @@ async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[d
embed = None
try:
- message: disnake.Message = await MessageConverter().convert(ctx, message_link)
+ message: discord.Message = await MessageConverter().convert(ctx, message_link)
except MessageNotFound:
mod_logs_channel = ctx.bot.get_channel(Channels.mod_log)
- last_100_logs: list[disnake.Message] = await mod_logs_channel.history(limit=100).flatten()
+ last_100_logs: list[discord.Message] = [message async for message in mod_logs_channel.history(limit=100)]
for log_entry in last_100_logs:
if not log_entry.embeds:
continue
- log_embed: disnake.Embed = log_entry.embeds[0]
+ log_embed: discord.Embed = log_entry.embeds[0]
if (
log_embed.author.name == "Message deleted"
and f"[Jump to message]({message_link})" in log_embed.description
):
- embed = disnake.Embed(
- colour=disnake.Colour.dark_gold(),
+ embed = discord.Embed(
+ colour=discord.Colour.dark_gold(),
title="Deleted Message Link",
description=(
f"Found <#{Channels.mod_log}> entry for deleted message: "
@@ -203,12 +214,12 @@ async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[d
)
)
if not embed:
- embed = disnake.Embed(
- colour=disnake.Colour.red(),
+ embed = discord.Embed(
+ colour=discord.Colour.red(),
title="Bad Message Link",
description=f"Message {message_link} not found."
)
- except disnake.DiscordException as e:
+ except discord.DiscordException as e:
log.exception(f"Failed to make message link embed for '{message_link}', raised exception: {e}")
else:
channel = message.channel
@@ -219,12 +230,12 @@ async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[d
)
return
- embed = disnake.Embed(
- colour=disnake.Colour.gold(),
+ embed = discord.Embed(
+ colour=discord.Colour.gold(),
description=(
f"**Author:** {format_user(message.author)}\n"
f"**Channel:** {channel.mention} ({channel.category}"
- f"{f'/#{channel.parent.name} - ' if isinstance(channel, disnake.Thread) else '/#'}"
+ f"{f'/#{channel.parent.name} - ' if isinstance(channel, discord.Thread) else '/#'}"
f"{channel.name})\n"
),
timestamp=message.created_at
@@ -242,7 +253,7 @@ async def make_message_link_embed(ctx: Context, message_link: str) -> Optional[d
return embed
-async def add_signals(incident: disnake.Message) -> None:
+async def add_signals(incident: discord.Message) -> None:
"""
Add `Signal` member emoji to `incident` as reactions.
@@ -257,7 +268,7 @@ async def add_signals(incident: disnake.Message) -> None:
log.trace(f"Adding reaction: {signal_emoji}")
try:
await incident.add_reaction(signal_emoji.value)
- except disnake.NotFound as e:
+ except discord.NotFound as e:
if e.code != 10008:
raise
@@ -300,7 +311,7 @@ class Incidents(Cog):
"""
# This dictionary maps an incident report message to the message link embed's ID
- # RedisCache[disnake.Message.id, disnake.Message.id]
+ # RedisCache[discord.Message.id, discord.Message.id]
message_link_embeds_cache = RedisCache()
def __init__(self, bot: Bot) -> None:
@@ -319,7 +330,7 @@ class Incidents(Cog):
try:
self.incidents_webhook = await self.bot.fetch_webhook(Webhooks.incidents)
- except disnake.HTTPException:
+ except discord.HTTPException:
log.error(f"Failed to fetch incidents webhook with id `{Webhooks.incidents}`.")
async def crawl_incidents(self) -> None:
@@ -335,7 +346,7 @@ class Incidents(Cog):
Behaviour is configured by: `CRAWL_LIMIT`, `CRAWL_SLEEP`.
"""
await self.bot.wait_until_guild_available()
- incidents: disnake.TextChannel = self.bot.get_channel(Channels.incidents)
+ incidents: discord.TextChannel = self.bot.get_channel(Channels.incidents)
log.debug(f"Crawling messages in #incidents: {CRAWL_LIMIT=}, {CRAWL_SLEEP=}")
async for message in incidents.history(limit=CRAWL_LIMIT):
@@ -353,7 +364,7 @@ class Incidents(Cog):
log.debug("Crawl task finished!")
- async def archive(self, incident: disnake.Message, outcome: Signal, actioned_by: disnake.Member) -> bool:
+ async def archive(self, incident: discord.Message, outcome: Signal, actioned_by: discord.Member) -> bool:
"""
Relay an embed representation of `incident` to the #incidents-archive channel.
@@ -381,7 +392,7 @@ class Incidents(Cog):
webhook = await self.bot.fetch_webhook(Webhooks.incidents_archive)
await webhook.send(
embed=embed,
- username=sub_clyde(incident.author.name),
+ username=sub_clyde(incident.author.display_name),
avatar_url=incident.author.display_avatar.url,
file=attachment_file,
)
@@ -392,7 +403,7 @@ class Incidents(Cog):
log.trace("Message archived successfully!")
return True
- def make_confirmation_task(self, incident: disnake.Message, timeout: int = 5) -> asyncio.Task:
+ def make_confirmation_task(self, incident: discord.Message, timeout: int = 5) -> asyncio.Task:
"""
Create a task to wait `timeout` seconds for `incident` to be deleted.
@@ -401,13 +412,13 @@ class Incidents(Cog):
"""
log.trace(f"Confirmation task will wait {timeout=} seconds for {incident.id=} to be deleted")
- def check(payload: disnake.RawReactionActionEvent) -> bool:
+ def check(payload: discord.RawReactionActionEvent) -> bool:
return payload.message_id == incident.id
- coroutine = self.bot.wait_for(event="raw_message_delete", check=check, timeout=timeout)
+ coroutine = self.bot.wait_for("raw_message_delete", check=check, timeout=timeout)
return scheduling.create_task(coroutine, event_loop=self.bot.loop)
- async def process_event(self, reaction: str, incident: disnake.Message, member: disnake.Member) -> None:
+ async def process_event(self, reaction: str, incident: discord.Message, member: discord.Member) -> None:
"""
Process a `reaction_add` event in #incidents.
@@ -430,7 +441,7 @@ class Incidents(Cog):
log.debug(f"Removing invalid reaction: user {member} is not permitted to send signals")
try:
await incident.remove_reaction(reaction, member)
- except disnake.NotFound:
+ except discord.NotFound:
log.trace("Couldn't remove reaction because the reaction or its message was deleted")
return
@@ -440,7 +451,7 @@ class Incidents(Cog):
log.debug(f"Removing invalid reaction: emoji {reaction} is not a valid signal")
try:
await incident.remove_reaction(reaction, member)
- except disnake.NotFound:
+ except discord.NotFound:
log.trace("Couldn't remove reaction because the reaction or its message was deleted")
return
@@ -461,7 +472,7 @@ class Incidents(Cog):
log.trace("Deleting original message")
try:
await incident.delete()
- except disnake.NotFound:
+ except discord.NotFound:
log.trace("Couldn't delete message because it was already deleted")
log.trace(f"Awaiting deletion confirmation: {timeout=} seconds")
@@ -476,9 +487,9 @@ class Incidents(Cog):
# Deletes the message link embeds found in cache from the channel and cache.
await self.delete_msg_link_embed(incident.id)
- async def resolve_message(self, message_id: int) -> Optional[disnake.Message]:
+ async def resolve_message(self, message_id: int) -> Optional[discord.Message]:
"""
- Get `disnake.Message` for `message_id` from cache, or API.
+ Get `discord.Message` for `message_id` from cache, or API.
We first look into the local cache to see if the message is present.
@@ -491,7 +502,7 @@ class Incidents(Cog):
"""
await self.bot.wait_until_guild_available() # First make sure that the cache is ready
log.trace(f"Resolving message for: {message_id=}")
- message: Optional[disnake.Message] = self.bot._connection._get_message(message_id)
+ message: Optional[discord.Message] = self.bot._connection._get_message(message_id)
if message is not None:
log.trace("Message was found in cache")
@@ -500,7 +511,7 @@ class Incidents(Cog):
log.trace("Message not found, attempting to fetch")
try:
message = await self.bot.get_channel(Channels.incidents).fetch_message(message_id)
- except disnake.NotFound:
+ except discord.NotFound:
log.trace("Message doesn't exist, it was likely already relayed")
except Exception:
log.exception(f"Failed to fetch message {message_id}!")
@@ -509,7 +520,7 @@ class Incidents(Cog):
return message
@Cog.listener()
- async def on_raw_reaction_add(self, payload: disnake.RawReactionActionEvent) -> None:
+ async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent) -> None:
"""
Pre-process `payload` and pass it to `process_event` if appropriate.
@@ -521,11 +532,11 @@ class Incidents(Cog):
Next, we acquire `event_lock` - to prevent racing, events are processed one at a time.
- Once we have the lock, the `disnake.Message` object for this event must be resolved.
+ Once we have the lock, the `discord.Message` object for this event must be resolved.
If the lock was previously held by an event which successfully relayed the incident,
this will fail and we abort the current event.
- Finally, with both the lock and the `disnake.Message` instance in our hands, we delegate
+ Finally, with both the lock and the `discord.Message` instance in our hands, we delegate
to `process_event` to handle the event.
The justification for using a raw listener is the need to receive events for messages
@@ -554,7 +565,7 @@ class Incidents(Cog):
log.trace("Releasing event lock")
@Cog.listener()
- async def on_message(self, message: disnake.Message) -> None:
+ async def on_message(self, message: discord.Message) -> None:
"""
Pass `message` to `add_signals` and `extract_message_links` if it satisfies `is_incident`.
@@ -575,7 +586,7 @@ class Incidents(Cog):
await self.send_message_link_embeds(embed_list, message, self.incidents_webhook)
@Cog.listener()
- async def on_raw_message_delete(self, payload: disnake.RawMessageDeleteEvent) -> None:
+ async def on_raw_message_delete(self, payload: discord.RawMessageDeleteEvent) -> None:
"""
Delete message link embeds for `payload.message_id`.
@@ -584,7 +595,7 @@ class Incidents(Cog):
if self.incidents_webhook:
await self.delete_msg_link_embed(payload.message_id)
- async def extract_message_links(self, message: disnake.Message) -> Optional[list[disnake.Embed]]:
+ async def extract_message_links(self, message: discord.Message) -> Optional[list[discord.Embed]]:
"""
Check if there's any message links in the text content.
@@ -615,8 +626,8 @@ class Incidents(Cog):
async def send_message_link_embeds(
self,
webhook_embed_list: list,
- message: disnake.Message,
- webhook: disnake.Webhook,
+ message: discord.Message,
+ webhook: discord.Webhook,
) -> Optional[int]:
"""
Send message link embeds to #incidents channel.
@@ -634,7 +645,7 @@ class Incidents(Cog):
avatar_url=message.author.display_avatar.url,
wait=True,
)
- except disnake.DiscordException:
+ except discord.DiscordException:
log.exception(
f"Failed to send message link embed {message.id} to #incidents."
)
@@ -651,13 +662,13 @@ class Incidents(Cog):
if webhook_msg_id:
try:
await self.incidents_webhook.delete_message(webhook_msg_id)
- except disnake.errors.NotFound:
+ except discord.errors.NotFound:
log.trace(f"Incidents message link embed (`{webhook_msg_id}`) has already been deleted, skipping.")
await self.message_link_embeds_cache.delete(message_id)
log.trace("Successfully deleted discord links webhook message.")
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Incidents cog."""
- bot.add_cog(Incidents(bot))
+ await bot.add_cog(Incidents(bot))
diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py
index 8107b502a..280b0fb0c 100644
--- a/bot/exts/moderation/infraction/_scheduler.py
+++ b/bot/exts/moderation/infraction/_scheduler.py
@@ -5,18 +5,19 @@ from gettext import ngettext
import arrow
import dateutil.parser
-import disnake
-from disnake.ext.commands import Context
+import discord
+from botcore.site_api import ResponseCodeError
+from botcore.utils import scheduling
+from discord.ext.commands import Context
from bot import constants
-from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Colours
+from bot.constants import Colours, Roles
from bot.converters import MemberOrUser
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
-from bot.utils import messages, scheduling, time
+from bot.utils import messages, time
from bot.utils.channel import is_mod_channel
log = get_logger(__name__)
@@ -28,10 +29,9 @@ class InfractionScheduler:
def __init__(self, bot: Bot, supported_infractions: t.Container[str]):
self.bot = bot
self.scheduler = scheduling.Scheduler(self.__class__.__name__)
+ self.supported_infractions = supported_infractions
- scheduling.create_task(self.reschedule_infractions(supported_infractions), event_loop=self.bot.loop)
-
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Cancel scheduled tasks."""
self.scheduler.cancel_all()
@@ -40,9 +40,10 @@ class InfractionScheduler:
"""Get the currently loaded ModLog cog instance."""
return self.bot.get_cog("ModLog")
- async def reschedule_infractions(self, supported_infractions: t.Container[str]) -> None:
+ async def cog_load(self) -> None:
"""Schedule expiration for previous infractions."""
await self.bot.wait_until_guild_available()
+ supported_infractions = self.supported_infractions
log.trace(f"Rescheduling infractions for {self.__class__.__name__}.")
@@ -71,7 +72,7 @@ class InfractionScheduler:
)
log.trace("Will reschedule remaining infractions at %s", next_reschedule_point)
- self.scheduler.schedule_at(next_reschedule_point, -1, self.reschedule_infractions(supported_infractions))
+ self.scheduler.schedule_at(next_reschedule_point, -1, self.cog_load())
log.trace("Done rescheduling")
@@ -101,7 +102,7 @@ class InfractionScheduler:
# Allowing mod log since this is a passive action that should be logged.
try:
await apply_coro
- except disnake.HTTPException as e:
+ except discord.HTTPException as e:
# When user joined and then right after this left again before action completed, this can't apply roles
if e.code == 10007 or e.status == 404:
log.info(
@@ -188,7 +189,10 @@ class InfractionScheduler:
f"Infraction #{id_} actor is bot; including the reason in the confirmation message."
)
if reason:
- end_msg = f" (reason: {textwrap.shorten(reason, width=1500, placeholder='...')})"
+ end_msg = (
+ f" (reason: {textwrap.shorten(reason, width=1500, placeholder='...')})."
+ f"\n\nThe <@&{Roles.moderators}> have been alerted for review"
+ )
purge = infraction.get("purge", "")
@@ -200,7 +204,7 @@ class InfractionScheduler:
if expiry:
# Schedule the expiration of the infraction.
self.schedule_expiration(infraction)
- except disnake.HTTPException as e:
+ except discord.HTTPException as e:
# Accordingly display that applying the infraction failed.
# Don't use ctx.message.author; antispam only patches ctx.author.
confirm_msg = ":x: failed to apply"
@@ -209,7 +213,7 @@ class InfractionScheduler:
log_title = "failed to apply"
log_msg = f"Failed to apply {' '.join(infr_type.split('_'))} infraction #{id_} to {user}"
- if isinstance(e, disnake.Forbidden):
+ if isinstance(e, discord.Forbidden):
log.warning(f"{log_msg}: bot lacks permissions.")
elif e.code == 10007 or e.status == 404:
log.info(
@@ -242,7 +246,8 @@ class InfractionScheduler:
# Send a confirmation message to the invoking context.
log.trace(f"Sending infraction #{id_} confirmation message.")
- await ctx.send(f"{dm_result}{confirm_msg}{infr_message}.")
+ mentions = discord.AllowedMentions(users=[user], roles=False)
+ await ctx.send(f"{dm_result}{confirm_msg}{infr_message}.", allowed_mentions=mentions)
# Send a log message to the mod log.
# Don't use ctx.message.author for the actor; antispam only patches ctx.author.
@@ -396,11 +401,11 @@ class InfractionScheduler:
raise ValueError(
f"Attempted to deactivate an unsupported infraction #{id_} ({type_})!"
)
- except disnake.Forbidden:
+ except discord.Forbidden:
log.warning(f"Failed to deactivate infraction #{id_} ({type_}): bot lacks permissions.")
log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)"
log_content = mod_role.mention
- except disnake.HTTPException as e:
+ except discord.HTTPException as e:
if e.code == 10007 or e.status == 404:
log.info(
f"Can't pardon {infraction['type']} for user {infraction['user']} because user left the guild."
diff --git a/bot/exts/moderation/infraction/_utils.py b/bot/exts/moderation/infraction/_utils.py
index 36e818ec6..3a2485ec2 100644
--- a/bot/exts/moderation/infraction/_utils.py
+++ b/bot/exts/moderation/infraction/_utils.py
@@ -2,11 +2,11 @@ import typing as t
from datetime import datetime
import arrow
-import disnake
-from disnake.ext.commands import Context
+import discord
+from botcore.site_api import ResponseCodeError
+from discord.ext.commands import Context
import bot
-from bot.api import ResponseCodeError
from bot.constants import Colours, Icons
from bot.converters import MemberOrUser
from bot.errors import InvalidInfractedUserError
@@ -86,7 +86,7 @@ async def post_infraction(
dm_sent: bool = False,
) -> t.Optional[dict]:
"""Posts an infraction to the API."""
- if isinstance(user, (disnake.Member, disnake.User)) and user.bot:
+ if isinstance(user, (discord.Member, discord.User)) and user.bot:
log.trace(f"Posting of {infr_type} infraction for {user} to the API aborted. User is a bot.")
raise InvalidInfractedUserError(user)
@@ -209,7 +209,7 @@ async def notify_infraction(
text += INFRACTION_APPEAL_SERVER_FOOTER if infraction["type"] == 'ban' else INFRACTION_APPEAL_MODMAIL_FOOTER
- embed = disnake.Embed(
+ embed = discord.Embed(
description=text,
colour=Colours.soft_red
)
@@ -238,7 +238,7 @@ async def notify_pardon(
"""DM a user about their pardoned infraction and return True if the DM is successful."""
log.trace(f"Sending {user} a DM about their pardoned infraction.")
- embed = disnake.Embed(
+ embed = discord.Embed(
description=content,
colour=Colours.soft_green
)
@@ -248,7 +248,7 @@ async def notify_pardon(
return await send_private_embed(user, embed)
-async def send_private_embed(user: MemberOrUser, embed: disnake.Embed) -> bool:
+async def send_private_embed(user: MemberOrUser, embed: discord.Embed) -> bool:
"""
A helper method for sending an embed to a user's DMs.
@@ -257,7 +257,7 @@ async def send_private_embed(user: MemberOrUser, embed: disnake.Embed) -> bool:
try:
await user.send(embed=embed)
return True
- except (disnake.HTTPException, disnake.Forbidden, disnake.NotFound):
+ except (discord.HTTPException, discord.Forbidden, discord.NotFound):
log.debug(
f"Infraction-related information could not be sent to user {user} ({user.id}). "
"The user either could not be retrieved or probably disabled their DMs."
diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py
index 5ff56abde..08a3609a7 100644
--- a/bot/exts/moderation/infraction/infractions.py
+++ b/bot/exts/moderation/infraction/infractions.py
@@ -1,16 +1,18 @@
import textwrap
import typing as t
-import disnake
-from disnake import Member
-from disnake.ext import commands
-from disnake.ext.commands import Context, command
+import arrow
+import discord
+from discord import Member
+from discord.ext import commands
+from discord.ext.commands import Context, command
from bot import constants
from bot.bot import Bot
from bot.constants import Event
from bot.converters import Age, Duration, Expiry, MemberOrUser, UnambiguousMemberOrUser
-from bot.decorators import respect_role_hierarchy
+from bot.decorators import ensure_future_timestamp, respect_role_hierarchy
+from bot.exts.filters.filtering import AUTO_BAN_DURATION, AUTO_BAN_REASON
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction._scheduler import InfractionScheduler
from bot.log import get_logger
@@ -35,8 +37,8 @@ class Infractions(InfractionScheduler, commands.Cog):
super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning", "voice_mute"})
self.category = "Moderation"
- self._muted_role = disnake.Object(constants.Roles.muted)
- self._voice_verified_role = disnake.Object(constants.Roles.voice_verified)
+ self._muted_role = discord.Object(constants.Roles.muted)
+ self._voice_verified_role = discord.Object(constants.Roles.voice_verified)
@commands.Cog.listener()
async def on_member_join(self, member: Member) -> None:
@@ -81,6 +83,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_kick(ctx, user, reason)
@command()
+ @ensure_future_timestamp(timestamp_arg=3)
async def ban(
self,
ctx: Context,
@@ -97,6 +100,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_ban(ctx, user, reason, expires_at=duration)
@command(aliases=("cban", "purgeban", "pban"))
+ @ensure_future_timestamp(timestamp_arg=3)
async def cleanban(
self,
ctx: Context,
@@ -123,7 +127,7 @@ class Infractions(InfractionScheduler, commands.Cog):
log.error("Failed to apply ban to user %d", user.id)
return
- # Calling commands directly skips disnake's convertors, so we need to convert args manually.
+ # Calling commands directly skips discord.py's convertors, so we need to convert args manually.
clean_time = await Age().convert(ctx, "1h")
log_url = await clean_cog._clean_messages(
@@ -149,6 +153,11 @@ class Infractions(InfractionScheduler, commands.Cog):
ctx.send = send
await infr_manage_cog.infraction_append(ctx, infraction, None, reason=f"[Clean log]({log_url})")
+ @command()
+ async def compban(self, ctx: Context, user: UnambiguousMemberOrUser) -> None:
+ """Same as cleanban, but specifically with the ban reason and duration used for compromised accounts."""
+ await self.cleanban(ctx, user, duration=(arrow.utcnow() + AUTO_BAN_DURATION).datetime, reason=AUTO_BAN_REASON)
+
@command(aliases=("vban",))
async def voiceban(self, ctx: Context) -> None:
"""
@@ -161,6 +170,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `voicemute`?")
@command(aliases=("vmute",))
+ @ensure_future_timestamp(timestamp_arg=3)
async def voicemute(
self,
ctx: Context,
@@ -180,6 +190,7 @@ class Infractions(InfractionScheduler, commands.Cog):
# region: Temporary infractions
@command(aliases=["mute"])
+ @ensure_future_timestamp(timestamp_arg=3)
async def tempmute(
self, ctx: Context,
user: UnambiguousMemberOrUser,
@@ -213,6 +224,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_mute(ctx, user, reason, expires_at=duration)
@command(aliases=("tban",))
+ @ensure_future_timestamp(timestamp_arg=3)
async def tempban(
self,
ctx: Context,
@@ -248,6 +260,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await ctx.send(":x: This command is not yet implemented. Maybe you meant to use `tempvoicemute`?")
@command(aliases=("tempvmute", "tvmute"))
+ @ensure_future_timestamp(timestamp_arg=3)
async def tempvoicemute(
self,
ctx: Context,
@@ -294,6 +307,7 @@ class Infractions(InfractionScheduler, commands.Cog):
# region: Temporary shadow infractions
@command(hidden=True, aliases=["shadowtempban", "stempban", "stban"])
+ @ensure_future_timestamp(timestamp_arg=3)
async def shadow_tempban(
self,
ctx: Context,
@@ -494,7 +508,7 @@ class Infractions(InfractionScheduler, commands.Cog):
async def pardon_mute(
self,
user_id: int,
- guild: disnake.Guild,
+ guild: discord.Guild,
reason: t.Optional[str],
*,
notify: bool = True
@@ -525,16 +539,16 @@ class Infractions(InfractionScheduler, commands.Cog):
return log_text
- async def pardon_ban(self, user_id: int, guild: disnake.Guild, reason: t.Optional[str]) -> t.Dict[str, str]:
+ async def pardon_ban(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]:
"""Remove a user's ban on the Discord guild and return a log dict."""
- user = disnake.Object(user_id)
+ user = discord.Object(user_id)
log_text = {}
self.mod_log.ignore(Event.member_unban, user_id)
try:
await guild.unban(user, reason=reason)
- except disnake.NotFound:
+ except discord.NotFound:
log.info(f"Failed to unban user {user_id}: no active ban found on Discord")
log_text["Note"] = "No active ban found on Discord."
@@ -543,7 +557,7 @@ class Infractions(InfractionScheduler, commands.Cog):
async def pardon_voice_mute(
self,
user_id: int,
- guild: disnake.Guild,
+ guild: discord.Guild,
*,
notify: bool = True
) -> t.Dict[str, str]:
@@ -597,11 +611,11 @@ class Infractions(InfractionScheduler, commands.Cog):
async def cog_command_error(self, ctx: Context, error: Exception) -> None:
"""Send a notification to the invoking context on a Union failure."""
if isinstance(error, commands.BadUnionArgument):
- if disnake.User in error.converters or Member in error.converters:
+ if discord.User in error.converters or Member in error.converters:
await ctx.send(str(error.errors[0]))
error.handled = True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Infractions cog."""
- bot.add_cog(Infractions(bot))
+ await bot.add_cog(Infractions(bot))
diff --git a/bot/exts/moderation/infraction/management.py b/bot/exts/moderation/infraction/management.py
index 25420cd7a..a7d7a844a 100644
--- a/bot/exts/moderation/infraction/management.py
+++ b/bot/exts/moderation/infraction/management.py
@@ -1,14 +1,16 @@
+import re
import textwrap
import typing as t
-import disnake
-from disnake.ext import commands
-from disnake.ext.commands import Context
-from disnake.utils import escape_markdown
+import discord
+from discord.ext import commands
+from discord.ext.commands import Context
+from discord.utils import escape_markdown
from bot import constants
from bot.bot import Bot
-from bot.converters import Expiry, Infraction, MemberOrUser, Snowflake, UnambiguousUser, allowed_strings
+from bot.converters import Expiry, Infraction, MemberOrUser, Snowflake, UnambiguousUser
+from bot.decorators import ensure_future_timestamp
from bot.errors import InvalidInfraction
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction.infractions import Infractions
@@ -52,9 +54,9 @@ class ModManagement(commands.Cog):
await ctx.send_help(ctx.command)
return
- embed = disnake.Embed(
+ embed = discord.Embed(
title=f"Infraction #{infraction['id']}",
- colour=disnake.Colour.orange()
+ colour=discord.Colour.orange()
)
await self.send_infraction_list(ctx, embed, [infraction])
@@ -87,7 +89,7 @@ class ModManagement(commands.Cog):
self,
ctx: Context,
infraction: Infraction,
- duration: t.Union[Expiry, allowed_strings("p", "permanent"), None], # noqa: F821
+ duration: t.Union[Expiry, t.Literal["p", "permanent"], None],
*,
reason: str = None
) -> None:
@@ -122,11 +124,12 @@ class ModManagement(commands.Cog):
await self.infraction_edit(ctx, infraction, duration, reason=reason)
@infraction_group.command(name='edit', aliases=('e',))
+ @ensure_future_timestamp(timestamp_arg=3)
async def infraction_edit(
self,
ctx: Context,
infraction: Infraction,
- duration: t.Union[Expiry, allowed_strings("p", "permanent"), None], # noqa: F821
+ duration: t.Union[Expiry, t.Literal["p", "permanent"], None],
*,
reason: str = None
) -> None:
@@ -222,7 +225,7 @@ class ModManagement(commands.Cog):
await self.mod_log.send_log_message(
icon_url=constants.Icons.pencil,
- colour=disnake.Colour.og_blurple(),
+ colour=discord.Colour.og_blurple(),
title="Infraction edited",
thumbnail=thumbnail,
text=textwrap.dedent(f"""
@@ -240,21 +243,21 @@ class ModManagement(commands.Cog):
async def infraction_search_group(self, ctx: Context, query: t.Union[UnambiguousUser, Snowflake, str]) -> None:
"""Searches for infractions in the database."""
if isinstance(query, int):
- await self.search_user(ctx, disnake.Object(query))
+ await self.search_user(ctx, discord.Object(query))
elif isinstance(query, str):
await self.search_reason(ctx, query)
else:
await self.search_user(ctx, query)
@infraction_search_group.command(name="user", aliases=("member", "userid"))
- async def search_user(self, ctx: Context, user: t.Union[MemberOrUser, disnake.Object]) -> None:
+ async def search_user(self, ctx: Context, user: t.Union[MemberOrUser, discord.Object]) -> None:
"""Search for infractions by member."""
infraction_list = await self.bot.api_client.get(
'bot/infractions/expanded',
params={'user__id': str(user.id)}
)
- if isinstance(user, (disnake.Member, disnake.User)):
+ if isinstance(user, (discord.Member, discord.User)):
user_str = escape_markdown(str(user))
else:
if infraction_list:
@@ -264,24 +267,29 @@ class ModManagement(commands.Cog):
user_str = str(user.id)
formatted_infraction_count = self.format_infraction_count(len(infraction_list))
- embed = disnake.Embed(
+ embed = discord.Embed(
title=f"Infractions for {user_str} ({formatted_infraction_count} total)",
- colour=disnake.Colour.orange()
+ colour=discord.Colour.orange()
)
await self.send_infraction_list(ctx, embed, infraction_list)
@infraction_search_group.command(name="reason", aliases=("match", "regex", "re"))
async def search_reason(self, ctx: Context, reason: str) -> None:
"""Search for infractions by their reason. Use Re2 for matching."""
+ try:
+ re.compile(reason)
+ except re.error as e:
+ raise commands.BadArgument(f"Invalid regular expression in `reason`: {e}")
+
infraction_list = await self.bot.api_client.get(
'bot/infractions/expanded',
params={'search': reason}
)
formatted_infraction_count = self.format_infraction_count(len(infraction_list))
- embed = disnake.Embed(
+ embed = discord.Embed(
title=f"Infractions matching `{reason}` ({formatted_infraction_count} total)",
- colour=disnake.Colour.orange()
+ colour=discord.Colour.orange()
)
await self.send_infraction_list(ctx, embed, infraction_list)
@@ -319,9 +327,9 @@ class ModManagement(commands.Cog):
)
formatted_infraction_count = self.format_infraction_count(len(infraction_list))
- embed = disnake.Embed(
+ embed = discord.Embed(
title=f"Infractions by {actor} ({formatted_infraction_count} total)",
- colour=disnake.Colour.orange()
+ colour=discord.Colour.orange()
)
await self.send_infraction_list(ctx, embed, infraction_list)
@@ -344,7 +352,7 @@ class ModManagement(commands.Cog):
async def send_infraction_list(
self,
ctx: Context,
- embed: disnake.Embed,
+ embed: discord.Embed,
infractions: t.Iterable[t.Dict[str, t.Any]]
) -> None:
"""Send a paginated embed of infractions for the specified user."""
@@ -433,7 +441,7 @@ class ModManagement(commands.Cog):
async def cog_command_error(self, ctx: Context, error: commands.CommandError) -> None:
"""Handles errors for commands within this cog."""
if isinstance(error, commands.BadUnionArgument):
- if disnake.User in error.converters:
+ if discord.User in error.converters:
await ctx.send(str(error.errors[0]))
error.handled = True
@@ -445,6 +453,6 @@ class ModManagement(commands.Cog):
error.handled = True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the ModManagement cog."""
- bot.add_cog(ModManagement(bot))
+ await bot.add_cog(ModManagement(bot))
diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py
index 41ba52580..0e6aaa1e7 100644
--- a/bot/exts/moderation/infraction/superstarify.py
+++ b/bot/exts/moderation/infraction/superstarify.py
@@ -4,13 +4,14 @@ import textwrap
import typing as t
from pathlib import Path
-from disnake import Embed, Member
-from disnake.ext.commands import Cog, Context, command, has_any_role
-from disnake.utils import escape_markdown
+from discord import Embed, Member
+from discord.ext.commands import Cog, Context, command, has_any_role
+from discord.utils import escape_markdown
from bot import constants
from bot.bot import Bot
from bot.converters import Duration, Expiry
+from bot.decorators import ensure_future_timestamp
from bot.exts.moderation.infraction import _utils
from bot.exts.moderation.infraction._scheduler import InfractionScheduler
from bot.log import get_logger
@@ -103,6 +104,7 @@ class Superstarify(InfractionScheduler, Cog):
await self.reapply_infraction(infraction, action)
@command(name="superstarify", aliases=("force_nick", "star", "starify", "superstar"))
+ @ensure_future_timestamp(timestamp_arg=3)
async def superstarify(
self,
ctx: Context,
@@ -237,6 +239,6 @@ class Superstarify(InfractionScheduler, Cog):
return await has_any_role(*constants.MODERATION_ROLES).predicate(ctx)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Superstarify cog."""
- bot.add_cog(Superstarify(bot))
+ await bot.add_cog(Superstarify(bot))
diff --git a/bot/exts/moderation/metabase.py b/bot/exts/moderation/metabase.py
index 482d49b83..c63019882 100644
--- a/bot/exts/moderation/metabase.py
+++ b/bot/exts/moderation/metabase.py
@@ -2,21 +2,21 @@ import csv
import json
from datetime import timedelta
from io import StringIO
-from typing import Dict, List, Optional
+from typing import Dict, List, Literal, Optional
import arrow
from aiohttp.client_exceptions import ClientResponseError
from arrow import Arrow
from async_rediscache import RedisCache
-from disnake.ext.commands import Cog, Context, group, has_any_role
+from botcore.utils.scheduling import Scheduler
+from discord.ext.commands import Cog, Context, group, has_any_role
from bot.bot import Bot
from bot.constants import Metabase as MetabaseConfig, Roles
-from bot.converters import allowed_strings
from bot.log import get_logger
-from bot.utils import scheduling, send_to_paste_service
+from bot.utils import send_to_paste_service
from bot.utils.channel import is_mod_channel
-from bot.utils.scheduling import Scheduler
+from bot.utils.services import PasteTooLongError, PasteUploadError
log = get_logger(__name__)
@@ -40,11 +40,9 @@ class Metabase(Cog):
self.exports: Dict[int, List[Dict]] = {} # Saves the output of each question, so internal eval can access it
- self.init_task = scheduling.create_task(self.init_cog(), event_loop=self.bot.loop)
-
async def cog_command_error(self, ctx: Context, error: Exception) -> None:
"""Handle ClientResponseError errors locally to invalidate token if needed."""
- if not isinstance(error.original, ClientResponseError):
+ if not hasattr(error, "original") or not isinstance(error.original, ClientResponseError):
return
if error.original.status == 403:
@@ -61,7 +59,7 @@ class Metabase(Cog):
await ctx.send(f":x: {ctx.author.mention} Session token is invalid or refresh failed.")
error.handled = True
- async def init_cog(self) -> None:
+ async def cog_load(self) -> None:
"""Initialise the metabase session."""
expiry_time = await self.session_info.get("session_expiry")
if expiry_time:
@@ -110,7 +108,7 @@ class Metabase(Cog):
self,
ctx: Context,
question_id: int,
- extension: allowed_strings("csv", "json") = "csv"
+ extension: Literal["csv", "json"] = "csv"
) -> None:
"""
Extract data from a metabase question.
@@ -125,10 +123,7 @@ class Metabase(Cog):
Valid extensions are: csv and json.
"""
- await ctx.trigger_typing()
-
- # Make sure we have a session token before running anything
- await self.init_task
+ await ctx.typing()
url = f"{MetabaseConfig.base_url}/api/card/{question_id}/query/{extension}"
@@ -146,11 +141,15 @@ class Metabase(Cog):
# Format it nicely for human eyes
out = json.dumps(out, indent=4, sort_keys=True)
- paste_link = await send_to_paste_service(out, extension=extension)
- if paste_link:
- message = f":+1: {ctx.author.mention} Here's your link: {paste_link}"
+ try:
+ paste_link = await send_to_paste_service(out, extension=extension)
+ except PasteTooLongError:
+ message = f":x: {ctx.author.mention} Too long to upload to paste service."
+ except PasteUploadError:
+ message = f":x: {ctx.author.mention} Failed to upload to paste service."
else:
- message = f":x: {ctx.author.mention} Link service is unavailible."
+ message = f":+1: {ctx.author.mention} Here's your link: {paste_link}"
+
await ctx.send(
f"{message}\nYou can also access this data within internal eval by doing: "
f"`bot.get_cog('Metabase').exports[{question_id}]`"
@@ -159,9 +158,7 @@ class Metabase(Cog):
@metabase_group.command(name="publish", aliases=("share",))
async def metabase_publish(self, ctx: Context, question_id: int) -> None:
"""Publically shares the given question and posts the link."""
- await ctx.trigger_typing()
- # Make sure we have a session token before running anything
- await self.init_task
+ await ctx.typing()
url = f"{MetabaseConfig.base_url}/api/card/{question_id}/public_link"
@@ -179,22 +176,14 @@ class Metabase(Cog):
]
return all(checks)
- def cog_unload(self) -> None:
- """
- Cancel the init task and scheduled tasks.
-
- It's important to wait for init_task to be cancelled before cancelling scheduled
- tasks. Otherwise, it's possible for _session_scheduler to schedule another task
- after cancel_all has finished, despite _init_task.cancel being called first.
- This is cause cancel() on its own doesn't block until the task is cancelled.
- """
- self.init_task.cancel()
- self.init_task.add_done_callback(lambda _: self._session_scheduler.cancel_all())
+ async def cog_unload(self) -> None:
+ """Cancel all scheduled tasks."""
+ self._session_scheduler.cancel_all()
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Metabase cog."""
if not all((MetabaseConfig.username, MetabaseConfig.password)):
log.error("Credentials not provided, cog not loaded.")
return
- bot.add_cog(Metabase(bot))
+ await bot.add_cog(Metabase(bot))
diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py
index a96638e53..efa87ce25 100644
--- a/bot/exts/moderation/modlog.py
+++ b/bot/exts/moderation/modlog.py
@@ -5,13 +5,15 @@ import typing as t
from datetime import datetime, timezone
from itertools import zip_longest
-import disnake
+import discord
+from botcore.site_api import ResponseCodeError
from dateutil.relativedelta import relativedelta
from deepdiff import DeepDiff
-from disnake import Colour, Message, Thread
-from disnake.abc import GuildChannel
-from disnake.ext.commands import Cog, Context
-from disnake.utils import escape_markdown
+from discord import Colour, Message, Thread
+from discord.abc import GuildChannel
+from discord.ext.commands import Cog, Context
+from discord.utils import escape_markdown, format_dt, snowflake_time
+from sentry_sdk import add_breadcrumb
from bot.bot import Bot
from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles, URLs
@@ -21,7 +23,7 @@ from bot.utils.messages import format_user
log = get_logger(__name__)
-GUILD_CHANNEL = t.Union[disnake.CategoryChannel, disnake.TextChannel, disnake.VoiceChannel]
+GUILD_CHANNEL = t.Union[discord.CategoryChannel, discord.TextChannel, discord.VoiceChannel]
CHANNEL_CHANGES_UNSUPPORTED = ("permissions",)
CHANNEL_CHANGES_SUPPRESSED = ("_overwrites", "position")
@@ -45,7 +47,7 @@ class ModLog(Cog, name="ModLog"):
async def upload_log(
self,
- messages: t.Iterable[disnake.Message],
+ messages: t.Iterable[discord.Message],
actor_id: int,
attachments: t.Iterable[t.List[str]] = None
) -> str:
@@ -53,24 +55,35 @@ class ModLog(Cog, name="ModLog"):
if attachments is None:
attachments = []
- response = await self.bot.api_client.post(
- 'bot/deleted-messages',
- json={
- 'actor': actor_id,
- 'creation': datetime.now(timezone.utc).isoformat(),
- 'deletedmessage_set': [
- {
- 'id': message.id,
- 'author': message.author.id,
- 'channel_id': message.channel.id,
- 'content': message.content.replace("\0", ""), # Null chars cause 400.
- 'embeds': [embed.to_dict() for embed in message.embeds],
- 'attachments': attachment,
- }
- for message, attachment in zip_longest(messages, attachments, fillvalue=[])
- ]
+ deletedmessage_set = [
+ {
+ "id": message.id,
+ "author": message.author.id,
+ "channel_id": message.channel.id,
+ "content": message.content.replace("\0", ""), # Null chars cause 400.
+ "embeds": [embed.to_dict() for embed in message.embeds],
+ "attachments": attachment,
}
- )
+ for message, attachment in zip_longest(messages, attachments, fillvalue=[])
+ ]
+
+ try:
+ response = await self.bot.api_client.post(
+ "bot/deleted-messages",
+ json={
+ "actor": actor_id,
+ "creation": datetime.now(timezone.utc).isoformat(),
+ "deletedmessage_set": deletedmessage_set,
+ }
+ )
+ except ResponseCodeError as e:
+ add_breadcrumb(
+ category="api_error",
+ message=str(e),
+ level="error",
+ data=deletedmessage_set,
+ )
+ raise
return f"{URLs.site_logs_view}/{response['id']}"
@@ -83,22 +96,22 @@ class ModLog(Cog, name="ModLog"):
async def send_log_message(
self,
icon_url: t.Optional[str],
- colour: t.Union[disnake.Colour, int],
+ colour: t.Union[discord.Colour, int],
title: t.Optional[str],
text: str,
- thumbnail: t.Optional[t.Union[str, disnake.Asset]] = None,
+ thumbnail: t.Optional[t.Union[str, discord.Asset]] = None,
channel_id: int = Channels.mod_log,
ping_everyone: bool = False,
- files: t.Optional[t.List[disnake.File]] = None,
+ files: t.Optional[t.List[discord.File]] = None,
content: t.Optional[str] = None,
- additional_embeds: t.Optional[t.List[disnake.Embed]] = None,
+ additional_embeds: t.Optional[t.List[discord.Embed]] = None,
timestamp_override: t.Optional[datetime] = None,
footer: t.Optional[str] = None,
) -> Context:
"""Generate log embed and send to logging channel."""
await self.bot.wait_until_guild_available()
# Truncate string directly here to avoid removing newlines
- embed = disnake.Embed(
+ embed = discord.Embed(
description=text[:4093] + "..." if len(text) > 4096 else text
)
@@ -143,10 +156,10 @@ class ModLog(Cog, name="ModLog"):
if channel.guild.id != GuildConstant.id:
return
- if isinstance(channel, disnake.CategoryChannel):
+ if isinstance(channel, discord.CategoryChannel):
title = "Category created"
message = f"{channel.name} (`{channel.id}`)"
- elif isinstance(channel, disnake.VoiceChannel):
+ elif isinstance(channel, discord.VoiceChannel):
title = "Voice channel created"
if channel.category:
@@ -169,14 +182,14 @@ class ModLog(Cog, name="ModLog"):
if channel.guild.id != GuildConstant.id:
return
- if isinstance(channel, disnake.CategoryChannel):
+ if isinstance(channel, discord.CategoryChannel):
title = "Category deleted"
- elif isinstance(channel, disnake.VoiceChannel):
+ elif isinstance(channel, discord.VoiceChannel):
title = "Voice channel deleted"
else:
title = "Text channel deleted"
- if channel.category and not isinstance(channel, disnake.CategoryChannel):
+ if channel.category and not isinstance(channel, discord.CategoryChannel):
message = f"{channel.category}/{channel.name} (`{channel.id}`)"
else:
message = f"{channel.name} (`{channel.id}`)"
@@ -256,7 +269,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_guild_role_create(self, role: disnake.Role) -> None:
+ async def on_guild_role_create(self, role: discord.Role) -> None:
"""Log role create event to mod log."""
if role.guild.id != GuildConstant.id:
return
@@ -267,7 +280,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_guild_role_delete(self, role: disnake.Role) -> None:
+ async def on_guild_role_delete(self, role: discord.Role) -> None:
"""Log role delete event to mod log."""
if role.guild.id != GuildConstant.id:
return
@@ -278,7 +291,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_guild_role_update(self, before: disnake.Role, after: disnake.Role) -> None:
+ async def on_guild_role_update(self, before: discord.Role, after: discord.Role) -> None:
"""Log role update event to mod log."""
if before.guild.id != GuildConstant.id:
return
@@ -331,7 +344,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_guild_update(self, before: disnake.Guild, after: disnake.Guild) -> None:
+ async def on_guild_update(self, before: discord.Guild, after: discord.Guild) -> None:
"""Log guild update event to mod log."""
if before.id != GuildConstant.id:
return
@@ -382,7 +395,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_member_ban(self, guild: disnake.Guild, member: disnake.Member) -> None:
+ async def on_member_ban(self, guild: discord.Guild, member: discord.Member) -> None:
"""Log ban event to user log."""
if guild.id != GuildConstant.id:
return
@@ -399,7 +412,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_member_join(self, member: disnake.Member) -> None:
+ async def on_member_join(self, member: discord.Member) -> None:
"""Log member join event to user log."""
if member.guild.id != GuildConstant.id:
return
@@ -420,7 +433,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_member_remove(self, member: disnake.Member) -> None:
+ async def on_member_remove(self, member: discord.Member) -> None:
"""Log member leave event to user log."""
if member.guild.id != GuildConstant.id:
return
@@ -437,7 +450,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_member_unban(self, guild: disnake.Guild, member: disnake.User) -> None:
+ async def on_member_unban(self, guild: discord.Guild, member: discord.User) -> None:
"""Log member unban event to mod log."""
if guild.id != GuildConstant.id:
return
@@ -454,7 +467,7 @@ class ModLog(Cog, name="ModLog"):
)
@staticmethod
- def get_role_diff(before: t.List[disnake.Role], after: t.List[disnake.Role]) -> t.List[str]:
+ def get_role_diff(before: t.List[discord.Role], after: t.List[discord.Role]) -> t.List[str]:
"""Return a list of strings describing the roles added and removed."""
changes = []
before_roles = set(before)
@@ -469,7 +482,7 @@ class ModLog(Cog, name="ModLog"):
return changes
@Cog.listener()
- async def on_member_update(self, before: disnake.Member, after: disnake.Member) -> None:
+ async def on_member_update(self, before: discord.Member, after: discord.Member) -> None:
"""Log member update event to user log."""
if before.guild.id != GuildConstant.id:
return
@@ -539,7 +552,7 @@ class ModLog(Cog, name="ModLog"):
channel = self.bot.get_channel(channel_id)
# Ignore not found channels, DMs, and messages outside of the main guild.
- if not channel or not hasattr(channel, "guild") or channel.guild.id != GuildConstant.id:
+ if not channel or channel.guild is None or channel.guild.id != GuildConstant.id:
return True
# Look at the parent channel of a thread.
@@ -552,7 +565,7 @@ class ModLog(Cog, name="ModLog"):
return channel.id in GuildConstant.modlog_blacklist
- async def log_cached_deleted_message(self, message: disnake.Message) -> None:
+ async def log_cached_deleted_message(self, message: discord.Message) -> None:
"""
Log the message's details to message change log.
@@ -573,6 +586,7 @@ class ModLog(Cog, name="ModLog"):
f"**Author:** {format_user(author)}\n"
f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{message.id}`\n"
+ f"**Sent at:** {format_dt(message.created_at)}\n"
f"[Jump to message]({message.jump_url})\n"
"\n"
)
@@ -581,6 +595,7 @@ class ModLog(Cog, name="ModLog"):
f"**Author:** {format_user(author)}\n"
f"**Channel:** #{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{message.id}`\n"
+ f"**Sent at:** {format_dt(message.created_at)}\n"
f"[Jump to message]({message.jump_url})\n"
"\n"
)
@@ -608,7 +623,7 @@ class ModLog(Cog, name="ModLog"):
channel_id=Channels.message_log
)
- async def log_uncached_deleted_message(self, event: disnake.RawMessageDeleteEvent) -> None:
+ async def log_uncached_deleted_message(self, event: discord.RawMessageDeleteEvent) -> None:
"""
Log the message's details to message change log.
@@ -629,6 +644,7 @@ class ModLog(Cog, name="ModLog"):
response = (
f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{event.message_id}`\n"
+ f"**Sent at:** {format_dt(snowflake_time(event.message_id))}\n"
"\n"
"This message was not cached, so the message content cannot be displayed."
)
@@ -636,6 +652,7 @@ class ModLog(Cog, name="ModLog"):
response = (
f"**Channel:** #{channel.name} (`{channel.id}`)\n"
f"**Message ID:** `{event.message_id}`\n"
+ f"**Sent at:** {format_dt(snowflake_time(event.message_id))}\n"
"\n"
"This message was not cached, so the message content cannot be displayed."
)
@@ -648,7 +665,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_raw_message_delete(self, event: disnake.RawMessageDeleteEvent) -> None:
+ async def on_raw_message_delete(self, event: discord.RawMessageDeleteEvent) -> None:
"""Log message deletions to message change log."""
if event.cached_message is not None:
await self.log_cached_deleted_message(event.cached_message)
@@ -656,7 +673,7 @@ class ModLog(Cog, name="ModLog"):
await self.log_uncached_deleted_message(event)
@Cog.listener()
- async def on_message_edit(self, msg_before: disnake.Message, msg_after: disnake.Message) -> None:
+ async def on_message_edit(self, msg_before: discord.Message, msg_after: discord.Message) -> None:
"""Log message edit event to message change log."""
if self.is_message_blacklisted(msg_before):
return
@@ -727,7 +744,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_raw_message_edit(self, event: disnake.RawMessageUpdateEvent) -> None:
+ async def on_raw_message_edit(self, event: discord.RawMessageUpdateEvent) -> None:
"""Log raw message edit event to message change log."""
if event.guild_id is None:
return # ignore DM edits
@@ -736,7 +753,7 @@ class ModLog(Cog, name="ModLog"):
try:
channel = self.bot.get_channel(int(event.data["channel_id"]))
message = await channel.fetch_message(event.message_id)
- except disnake.NotFound: # Was deleted before we got the event
+ except discord.NotFound: # Was deleted before we got the event
return
if self.is_message_blacklisted(message):
@@ -836,13 +853,8 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_thread_join(self, thread: Thread) -> None:
+ async def on_thread_create(self, thread: Thread) -> None:
"""Log thread creation."""
- # If we are in the thread already we can most probably assume we already logged it?
- # We don't really have a better way of doing this since the API doesn't make any difference between the two
- if thread.me:
- return
-
if self.is_channel_ignored(thread.id):
log.trace("Ignoring creation of thread %s (%d)", thread.mention, thread.id)
return
@@ -860,9 +872,9 @@ class ModLog(Cog, name="ModLog"):
@Cog.listener()
async def on_voice_state_update(
self,
- member: disnake.Member,
- before: disnake.VoiceState,
- after: disnake.VoiceState
+ member: discord.Member,
+ before: discord.VoiceState,
+ after: discord.VoiceState
) -> None:
"""Log member voice state changes to the voice log channel."""
if (
@@ -932,6 +944,6 @@ class ModLog(Cog, name="ModLog"):
)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the ModLog cog."""
- bot.add_cog(ModLog(bot))
+ await bot.add_cog(ModLog(bot))
diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py
index 51d161d84..7c8e4ac13 100644
--- a/bot/exts/moderation/modpings.py
+++ b/bot/exts/moderation/modpings.py
@@ -3,16 +3,17 @@ import datetime
import arrow
from async_rediscache import RedisCache
+from botcore.utils.scheduling import Scheduler
from dateutil.parser import isoparse, parse as dateutil_parse
-from disnake import Embed, Member
-from disnake.ext.commands import Cog, Context, group, has_any_role
+from discord import Member
+from discord.ext.commands import Cog, Context, group, has_any_role
from bot.bot import Bot
-from bot.constants import Colours, Emojis, Guild, Icons, MODERATION_ROLES, Roles
+from bot.constants import Emojis, Guild, MODERATION_ROLES, Roles
from bot.converters import Expiry
from bot.log import get_logger
-from bot.utils import scheduling, time
-from bot.utils.scheduling import Scheduler
+from bot.utils.members import get_or_fetch_member
+from bot.utils.time import TimestampFormats, discord_timestamp
log = get_logger(__name__)
@@ -22,12 +23,12 @@ MAXIMUM_WORK_LIMIT = 16
class ModPings(Cog):
"""Commands for a moderator to turn moderator pings on and off."""
- # RedisCache[disnake.Member.id, 'Naïve ISO 8601 string']
+ # RedisCache[discord.Member.id, 'Naïve ISO 8601 string']
# The cache's keys are mods who have pings off.
# The cache's values are the times when the role should be re-applied to them, stored in ISO format.
pings_off_mods = RedisCache()
- # RedisCache[disnake.Member.id, 'start timestamp|total worktime in seconds']
+ # RedisCache[discord.Member.id, 'start timestamp|total worktime in seconds']
# The cache's keys are mod's ID
# The cache's values are their pings on schedule timestamp and the total seconds (work time) until pings off
modpings_schedule = RedisCache()
@@ -40,15 +41,10 @@ class ModPings(Cog):
self.guild = None
self.moderators_role = None
- self.modpings_schedule_task = scheduling.create_task(
- self.reschedule_modpings_schedule(),
- event_loop=self.bot.loop
- )
- self.reschedule_task = scheduling.create_task(
- self.reschedule_roles(),
- name="mod-pings-reschedule",
- event_loop=self.bot.loop,
- )
+ async def cog_load(self) -> None:
+ """Schedule both when to reapply role and all mod ping schedules."""
+ # await self.reschedule_modpings_schedule()
+ await self.reschedule_roles()
async def reschedule_roles(self) -> None:
"""Reschedule moderators role re-apply times."""
@@ -62,18 +58,29 @@ class ModPings(Cog):
log.trace("Applying the moderators role to the mod team where necessary.")
for mod in mod_team.members:
- if mod in pings_on: # Make sure that on-duty mods aren't in the cache.
+ if mod in pings_on: # Make sure that on-duty mods aren't in the redis cache.
if mod.id in pings_off:
await self.pings_off_mods.delete(mod.id)
continue
- # Keep the role off only for those in the cache.
+ # Keep the role off only for those in the redis cache.
if mod.id not in pings_off:
await self.reapply_role(mod)
else:
expiry = isoparse(pings_off[mod.id])
self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod))
+ # At this stage every entry in `pings_off` is expected to have a scheduled task, but that might not be the case
+ # if the discord.py cache is missing members, or if the ID belongs to a former moderator.
+ for mod_id, expiry_iso in pings_off.items():
+ if mod_id not in self._role_scheduler:
+ mod = await get_or_fetch_member(self.guild, mod_id)
+ # Make sure the member is still a moderator and doesn't have the pingable role.
+ if mod is None or mod.get_role(Roles.mod_team) is None or mod.get_role(Roles.moderators) is not None:
+ await self.pings_off_mods.delete(mod_id)
+ else:
+ self._role_scheduler.schedule_at(isoparse(expiry_iso), mod_id, self.reapply_role(mod))
+
async def reschedule_modpings_schedule(self) -> None:
"""Reschedule moderators schedule ping."""
await self.bot.wait_until_guild_available()
@@ -170,9 +177,10 @@ class ModPings(Cog):
self._role_scheduler.cancel(mod.id)
self._role_scheduler.schedule_at(duration, mod.id, self.reapply_role(mod))
- embed = Embed(timestamp=duration, colour=Colours.bright_green)
- embed.set_footer(text="Moderators role has been removed until", icon_url=Icons.green_checkmark)
- await ctx.send(embed=embed)
+ await ctx.send(
+ f"{Emojis.check_mark} Moderators role has been removed "
+ f"until {discord_timestamp(duration, format=TimestampFormats.DAY_TIME)}."
+ )
@modpings_group.command(name='on')
@has_any_role(*MODERATION_ROLES)
@@ -232,8 +240,8 @@ class ModPings(Cog):
await ctx.send(
f"{Emojis.ok_hand} {ctx.author.mention} Scheduled mod pings from "
- f"{time.discord_timestamp(start, time.TimestampFormats.TIME)} to "
- f"{time.discord_timestamp(end, time.TimestampFormats.TIME)}!"
+ f"{discord_timestamp(start, TimestampFormats.TIME)} to "
+ f"{discord_timestamp(end, TimestampFormats.TIME)}!"
)
@schedule_modpings.command(name='delete', aliases=('del', 'd'))
@@ -243,16 +251,13 @@ class ModPings(Cog):
await self.modpings_schedule.delete(ctx.author.id)
await ctx.send(f"{Emojis.ok_hand} {ctx.author.mention} Deleted your modpings schedule!")
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Cancel role tasks when the cog unloads."""
- log.trace("Cog unload: canceling role tasks.")
- self.reschedule_task.cancel()
+ log.trace("Cog unload: cancelling all scheduled tasks.")
self._role_scheduler.cancel_all()
-
- self.modpings_schedule_task.cancel()
self._modpings_scheduler.cancel_all()
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the ModPings cog."""
- bot.add_cog(ModPings(bot))
+ await bot.add_cog(ModPings(bot))
diff --git a/bot/exts/moderation/silence.py b/bot/exts/moderation/silence.py
index 0b677dddb..578551d24 100644
--- a/bot/exts/moderation/silence.py
+++ b/bot/exts/moderation/silence.py
@@ -5,18 +5,17 @@ from datetime import datetime, timedelta, timezone
from typing import Optional, OrderedDict, Union
from async_rediscache import RedisCache
-from disnake import Guild, PermissionOverwrite, TextChannel, Thread, VoiceChannel
-from disnake.ext import commands, tasks
-from disnake.ext.commands import Context
-from disnake.utils import MISSING
+from botcore.utils.scheduling import Scheduler
+from discord import Guild, PermissionOverwrite, TextChannel, Thread, VoiceChannel
+from discord.ext import commands, tasks
+from discord.ext.commands import Context
+from discord.utils import MISSING
from bot import constants
from bot.bot import Bot
from bot.converters import HushDurationConverter
from bot.log import get_logger
-from bot.utils import scheduling
from bot.utils.lock import LockedResourceError, lock, lock_arg
-from bot.utils.scheduling import Scheduler
log = get_logger(__name__)
@@ -56,7 +55,6 @@ class SilenceNotifier(tasks.Loop):
hours=0,
count=None,
reconnect=True,
- loop=None,
time=MISSING
)
self._silenced_channels = {}
@@ -115,9 +113,7 @@ class Silence(commands.Cog):
self.bot = bot
self.scheduler = Scheduler(self.__class__.__name__)
- self._init_task = scheduling.create_task(self._async_init(), event_loop=self.bot.loop)
-
- async def _async_init(self) -> None:
+ async def cog_load(self) -> None:
"""Set instance attributes once the guild is available and reschedule unsilences."""
await self.bot.wait_until_guild_available()
@@ -177,7 +173,6 @@ class Silence(commands.Cog):
Passing a voice channel will attempt to move members out of the channel and back to force sync permissions.
If `kick` is True, members will not be added back to the voice channel, and members will be unable to rejoin.
"""
- await self._init_task
channel, duration = self.parse_silence_args(ctx, duration_or_channel, duration)
channel_info = f"#{channel} ({channel.id})"
@@ -281,7 +276,6 @@ class Silence(commands.Cog):
If the channel was silenced indefinitely, notifications for the channel will stop.
"""
- await self._init_task
if channel is None:
channel = ctx.channel
log.debug(f"Unsilencing channel #{channel} from {ctx.author}'s command.")
@@ -467,21 +461,16 @@ class Silence(commands.Cog):
log.info(f"Rescheduling silence for #{channel} ({channel.id}).")
self.scheduler.schedule_later(delta, channel_id, self._unsilence_wrapper(channel))
- def cog_unload(self) -> None:
- """Cancel the init task and scheduled tasks."""
- # It's important to wait for _init_task (specifically for _reschedule) to be cancelled
- # before cancelling scheduled tasks. Otherwise, it's possible for _reschedule to schedule
- # more tasks after cancel_all has finished, despite _init_task.cancel being called first.
- # This is cause cancel() on its own doesn't block until the task is cancelled.
- self._init_task.cancel()
- self._init_task.add_done_callback(lambda _: self.scheduler.cancel_all())
-
# This cannot be static (must have a __func__ attribute).
async def cog_check(self, ctx: Context) -> bool:
"""Only allow moderators to invoke the commands in this cog."""
return await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx)
+ async def cog_unload(self) -> None:
+ """Cancel all scheduled tasks."""
+ self.scheduler.cancel_all()
+
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Silence cog."""
- bot.add_cog(Silence(bot))
+ await bot.add_cog(Silence(bot))
diff --git a/bot/exts/moderation/slowmode.py b/bot/exts/moderation/slowmode.py
index 7fcafc01c..c43ae8b0c 100644
--- a/bot/exts/moderation/slowmode.py
+++ b/bot/exts/moderation/slowmode.py
@@ -1,8 +1,8 @@
-from typing import Optional
+from typing import Literal, Optional, Union
from dateutil.relativedelta import relativedelta
-from disnake import TextChannel
-from disnake.ext.commands import Cog, Context, group, has_any_role
+from discord import TextChannel, Thread
+from discord.ext.commands import Cog, Context, group, has_any_role
from bot.bot import Bot
from bot.constants import Channels, Emojis, MODERATION_ROLES
@@ -20,6 +20,8 @@ COMMONLY_SLOWMODED_CHANNELS = {
Channels.off_topic_0: "ot0",
}
+MessageHolder = Optional[Union[TextChannel, Thread]]
+
class Slowmode(Cog):
"""Commands for getting and setting slowmode delays of text channels."""
@@ -33,7 +35,7 @@ class Slowmode(Cog):
await ctx.send_help(ctx.command)
@slowmode_group.command(name='get', aliases=['g'])
- async def get_slowmode(self, ctx: Context, channel: Optional[TextChannel]) -> None:
+ async def get_slowmode(self, ctx: Context, channel: MessageHolder) -> None:
"""Get the slowmode delay for a text channel."""
# Use the channel this command was invoked in if one was not given
if channel is None:
@@ -44,7 +46,12 @@ class Slowmode(Cog):
await ctx.send(f'The slowmode delay for {channel.mention} is {humanized_delay}.')
@slowmode_group.command(name='set', aliases=['s'])
- async def set_slowmode(self, ctx: Context, channel: Optional[TextChannel], delay: DurationDelta) -> None:
+ async def set_slowmode(
+ self,
+ ctx: Context,
+ channel: MessageHolder,
+ delay: Union[DurationDelta, Literal["0s", "0seconds"]],
+ ) -> None:
"""Set the slowmode delay for a text channel."""
# Use the channel this command was invoked in if one was not given
if channel is None:
@@ -52,8 +59,10 @@ class Slowmode(Cog):
# Convert `dateutil.relativedelta.relativedelta` to `datetime.timedelta`
# Must do this to get the delta in a particular unit of time
- slowmode_delay = time.relativedelta_to_timedelta(delay).total_seconds()
+ if isinstance(delay, str):
+ delay = relativedelta(seconds=0)
+ slowmode_delay = time.relativedelta_to_timedelta(delay).total_seconds()
humanized_delay = time.humanize_delta(delay)
# Ensure the delay is within discord's limits
@@ -80,7 +89,7 @@ class Slowmode(Cog):
)
@slowmode_group.command(name='reset', aliases=['r'])
- async def reset_slowmode(self, ctx: Context, channel: Optional[TextChannel]) -> None:
+ async def reset_slowmode(self, ctx: Context, channel: MessageHolder) -> None:
"""Reset the slowmode delay for a text channel to 0 seconds."""
await self.set_slowmode(ctx, channel, relativedelta(seconds=0))
@@ -89,6 +98,6 @@ class Slowmode(Cog):
return await has_any_role(*MODERATION_ROLES).predicate(ctx)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Slowmode cog."""
- bot.add_cog(Slowmode(bot))
+ await bot.add_cog(Slowmode(bot))
diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py
index 7afd9f71d..a96e96511 100644
--- a/bot/exts/moderation/stream.py
+++ b/bot/exts/moderation/stream.py
@@ -2,10 +2,11 @@ from datetime import timedelta, timezone
from operator import itemgetter
import arrow
-import disnake
+import discord
from arrow import Arrow
from async_rediscache import RedisCache
-from disnake.ext import commands
+from botcore.utils import scheduling
+from discord.ext import commands
from bot.bot import Bot
from bot.constants import (
@@ -14,7 +15,7 @@ from bot.constants import (
from bot.converters import Expiry
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling, time
+from bot.utils import time
from bot.utils.members import get_or_fetch_member
log = get_logger(__name__)
@@ -24,25 +25,19 @@ class Stream(commands.Cog):
"""Grant and revoke streaming permissions from members."""
# Stores tasks to remove streaming permission
- # RedisCache[disnake.Member.id, UtcPosixTimestamp]
+ # RedisCache[discord.Member.id, UtcPosixTimestamp]
task_cache = RedisCache()
def __init__(self, bot: Bot):
self.bot = bot
self.scheduler = scheduling.Scheduler(self.__class__.__name__)
- self.reload_task = scheduling.create_task(self._reload_tasks_from_redis(), event_loop=self.bot.loop)
- def cog_unload(self) -> None:
- """Cancel all scheduled tasks."""
- self.reload_task.cancel()
- self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all())
-
- async def _revoke_streaming_permission(self, member: disnake.Member) -> None:
+ async def _revoke_streaming_permission(self, member: discord.Member) -> None:
"""Remove the streaming permission from the given Member."""
await self.task_cache.delete(member.id)
- await member.remove_roles(disnake.Object(Roles.video), reason="Streaming access revoked")
+ await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked")
- async def _reload_tasks_from_redis(self) -> None:
+ async def cog_load(self) -> None:
"""Reload outstanding tasks from redis on startup, delete the task if the member has since left the server."""
await self.bot.wait_until_guild_available()
items = await self.task_cache.items()
@@ -66,7 +61,7 @@ class Stream(commands.Cog):
self._revoke_streaming_permission(member)
)
- async def _suspend_stream(self, ctx: commands.Context, member: disnake.Member) -> None:
+ async def _suspend_stream(self, ctx: commands.Context, member: discord.Member) -> None:
"""Suspend a member's stream."""
await self.bot.wait_until_guild_available()
voice_state = member.voice
@@ -90,7 +85,7 @@ class Stream(commands.Cog):
@commands.command(aliases=("streaming",))
@commands.has_any_role(*MODERATION_ROLES)
- async def stream(self, ctx: commands.Context, member: disnake.Member, duration: Expiry = None) -> None:
+ async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None:
"""
Temporarily grant streaming permissions to a member for a given duration.
@@ -128,7 +123,7 @@ class Stream(commands.Cog):
self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member))
await self.task_cache.set(member.id, duration.timestamp())
- await member.add_roles(disnake.Object(Roles.video), reason="Temporary streaming access granted")
+ await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted")
await ctx.send(f"{Emojis.check_mark} {member.mention} can now stream until {time.discord_timestamp(duration)}.")
@@ -142,7 +137,7 @@ class Stream(commands.Cog):
@commands.command(aliases=("pstream",))
@commands.has_any_role(*MODERATION_ROLES)
- async def permanentstream(self, ctx: commands.Context, member: disnake.Member) -> None:
+ async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None:
"""Permanently grants the given member the permission to stream."""
log.trace(f"Attempting to give permanent streaming permission to {member} ({member.id}).")
@@ -163,13 +158,13 @@ class Stream(commands.Cog):
log.debug(f"{member} ({member.id}) already had permanent streaming permission.")
return
- await member.add_roles(disnake.Object(Roles.video), reason="Permanent streaming access granted")
+ await member.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted")
await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.")
log.debug(f"Successfully gave {member} ({member.id}) permanent streaming permission.")
@commands.command(aliases=("unstream", "rstream"))
@commands.has_any_role(*MODERATION_ROLES)
- async def revokestream(self, ctx: commands.Context, member: disnake.Member) -> None:
+ async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None:
"""Revoke the permission to stream from the given member."""
log.trace(f"Attempting to remove streaming permission from {member} ({member.id}).")
@@ -222,7 +217,7 @@ class Stream(commands.Cog):
# Only output the message in the pagination
lines = [line[1] for line in streamer_info]
- embed = disnake.Embed(
+ embed = discord.Embed(
title=f"Members with streaming permission (`{len(lines)}` total)",
colour=Colours.soft_green
)
@@ -230,7 +225,11 @@ class Stream(commands.Cog):
else:
await ctx.send("No members with stream permissions found.")
+ async def cog_unload(self) -> None:
+ """Cancel all scheduled tasks."""
+ self.scheduler.cancel_all()
+
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Loads the Stream cog."""
- bot.add_cog(Stream(bot))
+ await bot.add_cog(Stream(bot))
diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py
index c958aa160..306c27e06 100644
--- a/bot/exts/moderation/verification.py
+++ b/bot/exts/moderation/verification.py
@@ -1,7 +1,7 @@
import typing as t
-import disnake
-from disnake.ext.commands import Cog, Context, command, has_any_role
+import discord
+from discord.ext.commands import Cog, Context, command, has_any_role
from bot import constants
from bot.bot import Bot
@@ -51,7 +51,7 @@ async def safe_dm(coro: t.Coroutine) -> None:
"""
try:
await coro
- except disnake.HTTPException as discord_exc:
+ except discord.HTTPException as discord_exc:
log.trace(f"DM dispatch failed on status {discord_exc.status} with code: {discord_exc.code}")
if discord_exc.code != 50_007: # If any reason other than disabled DMs
raise
@@ -72,7 +72,7 @@ class Verification(Cog):
# region: listeners
@Cog.listener()
- async def on_member_join(self, member: disnake.Member) -> None:
+ async def on_member_join(self, member: discord.Member) -> None:
"""Attempt to send initial direct message to each new member."""
if member.guild.id != constants.Guild.id:
return # Only listen for PyDis events
@@ -87,11 +87,11 @@ class Verification(Cog):
log.trace(f"Sending on join message to new member: {member.id}")
try:
await safe_dm(member.send(ON_JOIN_MESSAGE))
- except disnake.HTTPException:
+ except discord.HTTPException:
log.exception("DM dispatch failed on unexpected error code")
@Cog.listener()
- async def on_member_update(self, before: disnake.Member, after: disnake.Member) -> None:
+ async def on_member_update(self, before: discord.Member, after: discord.Member) -> None:
"""Check if we need to send a verification DM to a gated user."""
if before.pending is True and after.pending is False:
try:
@@ -100,7 +100,7 @@ class Verification(Cog):
# our alternate welcome DM which includes info such as our welcome
# video.
await safe_dm(after.send(VERIFIED_MESSAGE))
- except disnake.HTTPException:
+ except discord.HTTPException:
log.exception("DM dispatch failed on unexpected error code")
# endregion
@@ -108,7 +108,7 @@ class Verification(Cog):
@command(name='verify')
@has_any_role(*constants.MODERATION_ROLES)
- async def perform_manual_verification(self, ctx: Context, user: disnake.Member) -> None:
+ async def perform_manual_verification(self, ctx: Context, user: discord.Member) -> None:
"""Command for moderators to verify any user."""
log.trace(f'verify command called by {ctx.author} for {user.id}.')
@@ -127,6 +127,6 @@ class Verification(Cog):
# endregion
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Verification cog."""
- bot.add_cog(Verification(bot))
+ await bot.add_cog(Verification(bot))
diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py
index 24ae86bdd..90f88d040 100644
--- a/bot/exts/moderation/voice_gate.py
+++ b/bot/exts/moderation/voice_gate.py
@@ -3,14 +3,14 @@ from contextlib import suppress
from datetime import timedelta
import arrow
-import disnake
+import discord
from async_rediscache import RedisCache
-from disnake import Colour, Member, VoiceState
-from disnake.ext.commands import Cog, Context, command
+from botcore.site_api import ResponseCodeError
+from discord import Colour, Member, VoiceState
+from discord.ext.commands import Cog, Context, command
-from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, Event, MODERATION_ROLES, Roles, VoiceGate as GateConf
+from bot.constants import Bot as BotConfig, Channels, MODERATION_ROLES, Roles, VoiceGate as GateConf
from bot.decorators import has_no_roles, in_whitelist
from bot.exts.moderation.modlog import ModLog
from bot.log import get_logger
@@ -37,21 +37,21 @@ MESSAGE_FIELD_MAP = {
VOICE_PING = (
"Wondering why you can't talk in the voice channels? "
- "Use the `!voiceverify` command in here to verify. "
+ f"Use the `{BotConfig.prefix}voiceverify` command in here to verify. "
"If you don't yet qualify, you'll be told why!"
)
VOICE_PING_DM = (
"Wondering why you can't talk in the voice channels? "
- "Use the `!voiceverify` command in {channel_mention} to verify. "
- "If you don't yet qualify, you'll be told why!"
+ f"Use the `{BotConfig.prefix}voiceverify` command in "
+ "{channel_mention} to verify. If you don't yet qualify, you'll be told why!"
)
class VoiceGate(Cog):
"""Voice channels verification management."""
- # RedisCache[t.Union[disnake.User.id, disnake.Member.id], t.Union[disnake.Message.id, int]]
+ # RedisCache[t.Union[discord.User.id, discord.Member.id], t.Union[discord.Message.id, int]]
# The cache's keys are the IDs of members who are verified or have joined a voice channel
# The cache's values are either the message ID of the ping message or 0 (NO_MSG) if no message is present
redis_cache = RedisCache()
@@ -75,14 +75,14 @@ class VoiceGate(Cog):
"""
if message_id := await self.redis_cache.get(member_id):
log.trace(f"Removing voice gate reminder message for user: {member_id}")
- with suppress(disnake.NotFound):
+ with suppress(discord.NotFound):
await self.bot.http.delete_message(Channels.voice_gate, message_id)
await self.redis_cache.set(member_id, NO_MSG)
else:
log.trace(f"Voice gate reminder message for user {member_id} was already removed")
@redis_cache.atomic_transaction
- async def _ping_newcomer(self, member: disnake.Member) -> tuple:
+ async def _ping_newcomer(self, member: discord.Member) -> tuple:
"""
See if `member` should be sent a voice verification notification, and send it if so.
@@ -91,7 +91,7 @@ class VoiceGate(Cog):
* The `member` is already voice-verified
Otherwise, the notification message ID is stored in `redis_cache` and return (True, channel).
- channel is either [disnake.TextChannel, disnake.DMChannel].
+ channel is either [discord.TextChannel, discord.DMChannel].
"""
if await self.redis_cache.contains(member.id):
log.trace("User already in cache. Ignore.")
@@ -111,7 +111,7 @@ class VoiceGate(Cog):
try:
message = await member.send(VOICE_PING_DM.format(channel_mention=voice_verification_channel.mention))
- except disnake.Forbidden:
+ except discord.Forbidden:
log.trace("DM failed for Voice ping message. Sending in channel.")
message = await voice_verification_channel.send(f"Hello, {member.mention}! {VOICE_PING}")
@@ -137,7 +137,7 @@ class VoiceGate(Cog):
data = await self.bot.api_client.get(f"bot/users/{ctx.author.id}/metricity_data")
except ResponseCodeError as e:
if e.status == 404:
- embed = disnake.Embed(
+ embed = discord.Embed(
title="Not found",
description=(
"We were unable to find user data for you. "
@@ -148,7 +148,7 @@ class VoiceGate(Cog):
)
log.info(f"Unable to find Metricity data about {ctx.author} ({ctx.author.id})")
else:
- embed = disnake.Embed(
+ embed = discord.Embed(
title="Unexpected response",
description=(
"We encountered an error while attempting to find data for your user. "
@@ -159,7 +159,7 @@ class VoiceGate(Cog):
log.warning(f"Got response code {e.status} while trying to get {ctx.author.id} Metricity data.")
try:
await ctx.author.send(embed=embed)
- except disnake.Forbidden:
+ except discord.Forbidden:
log.info("Could not send user DM. Sending in voice-verify channel and scheduling delete.")
await ctx.send(embed=embed)
@@ -179,7 +179,7 @@ class VoiceGate(Cog):
[self.bot.stats.incr(f"voice_gate.failed.{key}") for key, value in checks.items() if value is True]
if failed:
- embed = disnake.Embed(
+ embed = discord.Embed(
title="Voice Gate failed",
description=FAILED_MESSAGE.format(reasons="\n".join(f'• You {reason}.' for reason in failed_reasons)),
color=Colour.red()
@@ -187,12 +187,11 @@ class VoiceGate(Cog):
try:
await ctx.author.send(embed=embed)
await ctx.send(f"{ctx.author}, please check your DMs.")
- except disnake.Forbidden:
+ except discord.Forbidden:
await ctx.channel.send(ctx.author.mention, embed=embed)
return
- self.mod_log.ignore(Event.member_update, ctx.author.id)
- embed = disnake.Embed(
+ embed = discord.Embed(
title="Voice gate passed",
description="You have been granted permission to use voice channels in Python Discord.",
color=Colour.green()
@@ -204,17 +203,17 @@ class VoiceGate(Cog):
try:
await ctx.author.send(embed=embed)
await ctx.send(f"{ctx.author}, please check your DMs.")
- except disnake.Forbidden:
+ except discord.Forbidden:
await ctx.channel.send(ctx.author.mention, embed=embed)
# wait a little bit so those who don't get DMs see the response in-channel before losing perms to see it.
await asyncio.sleep(3)
- await ctx.author.add_roles(disnake.Object(Roles.voice_verified), reason="Voice Gate passed")
+ await ctx.author.add_roles(discord.Object(Roles.voice_verified), reason="Voice Gate passed")
self.bot.stats.incr("voice_gate.passed")
@Cog.listener()
- async def on_message(self, message: disnake.Message) -> None:
+ async def on_message(self, message: discord.Message) -> None:
"""Delete all non-staff messages from voice gate channel that don't invoke voice verify command."""
# Check is channel voice gate
if message.channel.id != Channels.voice_gate:
@@ -229,7 +228,7 @@ class VoiceGate(Cog):
if message.content.endswith(VOICE_PING):
log.trace("Message is the voice verification ping. Ignore.")
return
- with suppress(disnake.NotFound):
+ with suppress(discord.NotFound):
await message.delete(delay=GateConf.bot_message_delete_delay)
return
@@ -238,11 +237,7 @@ class VoiceGate(Cog):
log.trace(f"Excluding moderator message {message.id} from deletion in #{message.channel}.")
return
- # Ignore deleted voice verification messages
- if ctx.command is not None and ctx.command.name == "voice_verify":
- self.mod_log.ignore(Event.message_delete, message.id)
-
- with suppress(disnake.NotFound):
+ with suppress(discord.NotFound):
await message.delete()
@Cog.listener()
@@ -257,7 +252,7 @@ class VoiceGate(Cog):
log.trace("User not in a voice channel. Ignore.")
return
- if isinstance(after.channel, disnake.StageChannel):
+ if isinstance(after.channel, discord.StageChannel):
log.trace("User joined a stage channel. Ignore.")
return
@@ -267,7 +262,7 @@ class VoiceGate(Cog):
# Schedule the channel ping notification to be deleted after the configured delay, which is
# again delegated to an atomic helper
- if notification_sent and isinstance(message_channel, disnake.TextChannel):
+ if notification_sent and isinstance(message_channel, discord.TextChannel):
await asyncio.sleep(GateConf.voice_ping_delete_delay)
await self._delete_ping(member.id)
@@ -277,6 +272,6 @@ class VoiceGate(Cog):
error.handled = True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Loads the VoiceGate cog."""
- bot.add_cog(VoiceGate(bot))
+ await bot.add_cog(VoiceGate(bot))
diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py
index 88669ccaa..46f9c296e 100644
--- a/bot/exts/moderation/watchchannels/_watchchannel.py
+++ b/bot/exts/moderation/watchchannels/_watchchannel.py
@@ -6,11 +6,12 @@ from collections import defaultdict, deque
from dataclasses import dataclass
from typing import Any, Dict, Optional
-import disnake
-from disnake import Color, DMChannel, Embed, HTTPException, Message, errors
-from disnake.ext.commands import Cog, Context
+import discord
+from botcore.site_api import ResponseCodeError
+from botcore.utils import scheduling
+from discord import Color, DMChannel, Embed, HTTPException, Message, errors
+from discord.ext.commands import Cog, Context
-from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.constants import BigBrother as BigBrotherConfig, Guild as GuildConfig, Icons
from bot.exts.filters.token_remover import TokenRemover
@@ -18,7 +19,7 @@ from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE
from bot.exts.moderation.modlog import ModLog
from bot.log import CustomLogger, get_logger
from bot.pagination import LinePaginator
-from bot.utils import CogABCMeta, messages, scheduling, time
+from bot.utils import CogABCMeta, messages, time
from bot.utils.members import get_or_fetch_member
log = get_logger(__name__)
@@ -69,8 +70,6 @@ class WatchChannel(metaclass=CogABCMeta):
self.message_history = MessageHistory()
self.disable_header = disable_header
- self._start = scheduling.create_task(self.start_watchchannel(), event_loop=self.bot.loop)
-
@property
def modlog(self) -> ModLog:
"""Provides access to the ModLog cog for alert purposes."""
@@ -93,7 +92,7 @@ class WatchChannel(metaclass=CogABCMeta):
return True
- async def start_watchchannel(self) -> None:
+ async def cog_load(self) -> None:
"""Starts the watch channel by getting the channel, webhook, and user cache ready."""
await self.bot.wait_until_guild_available()
@@ -104,7 +103,7 @@ class WatchChannel(metaclass=CogABCMeta):
try:
self.webhook = await self.bot.fetch_webhook(self.webhook_id)
- except disnake.HTTPException:
+ except discord.HTTPException:
self.log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`")
if self.channel is None or self.webhook is None:
@@ -217,7 +216,7 @@ class WatchChannel(metaclass=CogABCMeta):
username = messages.sub_clyde(username)
try:
await self.webhook.send(content=content, username=username, avatar_url=avatar_url, embed=embed)
- except disnake.HTTPException as exc:
+ except discord.HTTPException as exc:
self.log.exception(
"Failed to send a message to the webhook",
exc_info=exc
@@ -265,7 +264,7 @@ class WatchChannel(metaclass=CogABCMeta):
username=msg.author.display_name,
avatar_url=msg.author.display_avatar.url
)
- except disnake.HTTPException as exc:
+ except discord.HTTPException as exc:
self.log.exception(
"Failed to send an attachment to the webhook",
exc_info=exc
@@ -374,7 +373,7 @@ class WatchChannel(metaclass=CogABCMeta):
self.message_queue.pop(user_id, None)
self.consumption_queue.pop(user_id, None)
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Takes care of unloading the cog and canceling the consumption task."""
self.log.trace("Unloading the cog")
if self._consume_task and not self._consume_task.done():
diff --git a/bot/exts/moderation/watchchannels/bigbrother.py b/bot/exts/moderation/watchchannels/bigbrother.py
index b0a48ceff..4a746edff 100644
--- a/bot/exts/moderation/watchchannels/bigbrother.py
+++ b/bot/exts/moderation/watchchannels/bigbrother.py
@@ -1,7 +1,7 @@
import textwrap
from collections import ChainMap
-from disnake.ext.commands import Cog, Context, group, has_any_role
+from discord.ext.commands import Cog, Context, group, has_any_role
from bot.bot import Bot
from bot.constants import Channels, MODERATION_ROLES, Webhooks
@@ -22,7 +22,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):
destination=Channels.big_brother_logs,
webhook_id=Webhooks.big_brother,
api_endpoint='bot/infractions',
- api_default_params={'active': 'true', 'type': 'watch', 'ordering': '-inserted_at'},
+ api_default_params={'active': 'true', 'type': 'watch', 'ordering': '-inserted_at', 'limit': 10_000},
logger=log
)
@@ -94,7 +94,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):
await ctx.send(f":x: {user.mention} is already being watched.")
return
- # disnake.User instances don't have a roles attribute
+ # discord.User instances don't have a roles attribute
if hasattr(user, "roles") and any(role.id in MODERATION_ROLES for role in user.roles):
await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I must be kind to my masters.")
return
@@ -169,6 +169,6 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):
await ctx.send(message)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the BigBrother cog."""
- bot.add_cog(BigBrother(bot))
+ await bot.add_cog(BigBrother(bot))
diff --git a/bot/exts/recruitment/talentpool/__init__.py b/bot/exts/recruitment/talentpool/__init__.py
index 52d27eb99..aa09a1ee2 100644
--- a/bot/exts/recruitment/talentpool/__init__.py
+++ b/bot/exts/recruitment/talentpool/__init__.py
@@ -1,8 +1,8 @@
from bot.bot import Bot
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the TalentPool cog."""
from bot.exts.recruitment.talentpool._cog import TalentPool
- bot.add_cog(TalentPool(bot))
+ await bot.add_cog(TalentPool(bot))
diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py
index 3d784ef77..9819152b0 100644
--- a/bot/exts/recruitment/talentpool/_cog.py
+++ b/bot/exts/recruitment/talentpool/_cog.py
@@ -3,19 +3,20 @@ from collections import ChainMap, defaultdict
from io import StringIO
from typing import Optional, Union
-import disnake
+import discord
from async_rediscache import RedisCache
-from disnake import Color, Embed, Member, PartialMessage, RawReactionActionEvent, User
-from disnake.ext.commands import BadArgument, Cog, Context, group, has_any_role
+from botcore.site_api import ResponseCodeError
+from botcore.utils import scheduling
+from discord import Color, Embed, Member, PartialMessage, RawReactionActionEvent, User
+from discord.ext.commands import BadArgument, Cog, Context, group, has_any_role
-from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES
+from bot.constants import Bot as BotConfig, Channels, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES
from bot.converters import MemberOrUser, UnambiguousMemberOrUser
from bot.exts.recruitment.talentpool._review import Reviewer
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling, time
+from bot.utils import time
from bot.utils.members import get_or_fetch_member
AUTOREVIEW_ENABLED_KEY = "autoreview_enabled"
@@ -96,33 +97,33 @@ class TalentPool(Cog, name="Talentpool"):
manually reviewed with the `tp post_review <user_id>` command.
"""
if await self.autoreview_enabled():
- await ctx.send(":x: Autoreview is already enabled")
+ await ctx.send(":x: Autoreview is already enabled.")
return
await self.talentpool_settings.set(AUTOREVIEW_ENABLED_KEY, True)
await self.reviewer.reschedule_reviews()
- await ctx.send(":white_check_mark: Autoreview enabled")
+ await ctx.send(":white_check_mark: Autoreview enabled.")
@nomination_autoreview_group.command(name="disable", aliases=("off",))
@has_any_role(Roles.admins)
async def autoreview_disable(self, ctx: Context) -> None:
"""Disable automatic posting of reviews."""
if not await self.autoreview_enabled():
- await ctx.send(":x: Autoreview is already disabled")
+ await ctx.send(":x: Autoreview is already disabled.")
return
await self.talentpool_settings.set(AUTOREVIEW_ENABLED_KEY, False)
self.reviewer.cancel_all()
- await ctx.send(":white_check_mark: Autoreview disabled")
+ await ctx.send(":white_check_mark: Autoreview disabled.")
@nomination_autoreview_group.command(name="status")
@has_any_role(*MODERATION_ROLES)
async def autoreview_status(self, ctx: Context) -> None:
"""Show whether automatic posting of reviews is enabled or disabled."""
if await self.autoreview_enabled():
- await ctx.send("Autoreview is currently enabled")
+ await ctx.send("Autoreview is currently enabled.")
else:
- await ctx.send("Autoreview is currently disabled")
+ await ctx.send("Autoreview is currently disabled.")
@nomination_group.command(
name="nominees",
@@ -236,10 +237,10 @@ class TalentPool(Cog, name="Talentpool"):
if any(role.id in MODERATION_ROLES for role in ctx.author.roles):
await ctx.send(
f":x: Nominations should be run in the <#{Channels.nominations}> channel. "
- "Use `!tp forcenominate` to override this check."
+ f"Use `{BotConfig.prefix}tp forcenominate` to override this check."
)
else:
- await ctx.send(f":x: Nominations must be run in the <#{Channels.nominations}> channel")
+ await ctx.send(f":x: Nominations must be run in the <#{Channels.nominations}> channel.")
return
await self._nominate_user(ctx, user, reason)
@@ -255,11 +256,11 @@ class TalentPool(Cog, name="Talentpool"):
return
if not await self.refresh_cache():
- await ctx.send(f":x: Failed to update the cache; can't add {user}")
+ await ctx.send(f":x: Failed to update the cache; can't add {user.mention}.")
return
if len(reason) > REASON_MAX_CHARS:
- await ctx.send(f":x: Maximum allowed characters for the reason is {REASON_MAX_CHARS}.")
+ await ctx.send(f":x: The reason's length must not exceed {REASON_MAX_CHARS} characters.")
return
# Manual request with `raise_for_status` as False because we want the actual response
@@ -278,9 +279,9 @@ class TalentPool(Cog, name="Talentpool"):
if resp.status == 400:
if response_data.get('user', False):
- await ctx.send(":x: The specified user can't be found in the database tables")
+ await ctx.send(f":x: {user.mention} can't be found in the database tables.")
elif response_data.get('actor', False):
- await ctx.send(":x: You have already nominated this user")
+ await ctx.send(f":x: You have already nominated {user.mention}.")
return
else:
@@ -291,9 +292,7 @@ class TalentPool(Cog, name="Talentpool"):
if await self.autoreview_enabled() and user.id not in self.reviewer:
self.reviewer.schedule_review(user.id)
- msg = f"✅ The nomination for {user.mention} has been added to the talent pool"
-
- await ctx.send(msg)
+ await ctx.send(f"✅ The nomination for {user.mention} has been added to the talent pool.")
@nomination_group.command(name='history', aliases=('info', 'search'))
@has_any_role(*MODERATION_ROLES)
@@ -307,7 +306,7 @@ class TalentPool(Cog, name="Talentpool"):
}
)
if not result:
- await ctx.send(":warning: This user has never been nominated")
+ await ctx.send(f":warning: {user.mention} has never been nominated.")
return
embed = Embed(
@@ -333,13 +332,13 @@ class TalentPool(Cog, name="Talentpool"):
Providing a `reason` is required.
"""
if len(reason) > REASON_MAX_CHARS:
- await ctx.send(f":x: Maximum allowed characters for the end reason is {REASON_MAX_CHARS}.")
+ await ctx.send(f":x: The reason's length must not exceed {REASON_MAX_CHARS} characters.")
return
if await self.end_nomination(user.id, reason):
- await ctx.send(f":white_check_mark: Successfully un-nominated {user}")
+ await ctx.send(f":white_check_mark: Successfully un-nominated {user.mention}.")
else:
- await ctx.send(":x: The specified user does not have an active nomination")
+ await ctx.send(f":x: {user.mention} doesn't have an active nomination.")
@nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True)
@has_any_role(*STAFF_ROLES)
@@ -374,7 +373,7 @@ class TalentPool(Cog, name="Talentpool"):
if not any(role.id in MODERATION_ROLES for role in ctx.author.roles):
if ctx.channel.id != Channels.nominations:
- await ctx.send(f":x: Nomination edits must be run in the <#{Channels.nominations}> channel")
+ await ctx.send(f":x: Nomination edits must be run in the <#{Channels.nominations}> channel.")
return
if nominator != ctx.author or isinstance(nominee_or_nomination_id, int):
@@ -401,7 +400,7 @@ class TalentPool(Cog, name="Talentpool"):
) -> None:
"""Edit a nomination reason in the database after validating the input."""
if len(reason) > REASON_MAX_CHARS:
- await ctx.send(f":x: Maximum allowed characters for the reason is {REASON_MAX_CHARS}.")
+ await ctx.send(f":x: The reason's length must not exceed {REASON_MAX_CHARS} characters.")
return
if isinstance(target, int):
nomination_id = target
@@ -409,7 +408,7 @@ class TalentPool(Cog, name="Talentpool"):
if nomination := self.cache.get(target.id):
nomination_id = nomination["id"]
else:
- await ctx.send("No active nomination found for that member.")
+ await ctx.send(f":x: {target.mention} doesn't have an active nomination.")
return
try:
@@ -417,13 +416,13 @@ class TalentPool(Cog, name="Talentpool"):
except ResponseCodeError as e:
if e.response.status == 404:
log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}")
- await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`")
+ await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`.")
return
else:
raise
if not nomination["active"]:
- await ctx.send(":x: Can't edit the reason of an inactive nomination.")
+ await ctx.send(f":x: <@{nomination['user']}> doesn't have an active nomination.")
return
if not any(entry["actor"] == actor.id for entry in nomination["entries"]):
@@ -437,14 +436,14 @@ class TalentPool(Cog, name="Talentpool"):
json={"actor": actor.id, "reason": reason}
)
await self.refresh_cache() # Update cache
- await ctx.send(":white_check_mark: Successfully updated nomination reason.")
+ await ctx.send(f":white_check_mark: Updated the nomination reason for <@{nomination['user']}>.")
@nomination_edit_group.command(name='end_reason')
@has_any_role(*MODERATION_ROLES)
async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None:
"""Edits the unnominate reason for the nomination with the given `id`."""
if len(reason) > REASON_MAX_CHARS:
- await ctx.send(f":x: Maximum allowed characters for the end reason is {REASON_MAX_CHARS}.")
+ await ctx.send(f":x: The reason's length must not exceed {REASON_MAX_CHARS} characters.")
return
try:
@@ -452,13 +451,15 @@ class TalentPool(Cog, name="Talentpool"):
except ResponseCodeError as e:
if e.response.status == 404:
log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}")
- await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`")
+ await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`.")
return
else:
raise
if nomination["active"]:
- await ctx.send(":x: Can't edit the end reason of an active nomination.")
+ await ctx.send(
+ f":x: Can't edit the nomination end reason for <@{nomination['user']}> because it's still active."
+ )
return
log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}")
@@ -468,7 +469,7 @@ class TalentPool(Cog, name="Talentpool"):
json={"end_reason": reason}
)
await self.refresh_cache() # Update cache.
- await ctx.send(":white_check_mark: Updated the end reason of the nomination!")
+ await ctx.send(f":white_check_mark: Updated the nomination end reason for <@{nomination['user']}>.")
@nomination_group.command(aliases=('mr',))
@has_any_role(*MODERATION_ROLES)
@@ -483,7 +484,7 @@ class TalentPool(Cog, name="Talentpool"):
async def get_review(self, ctx: Context, user_id: int) -> None:
"""Get the user's review as a markdown file."""
review, _, _ = await self.reviewer.make_review(user_id)
- file = disnake.File(StringIO(review), f"{user_id}_review.md")
+ file = discord.File(StringIO(review), f"{user_id}_review.md")
await ctx.send(file=file)
@nomination_group.command(aliases=('review',))
@@ -602,7 +603,6 @@ class TalentPool(Cog, name="Talentpool"):
return lines.strip()
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Cancels all review tasks on cog unload."""
- super().cog_unload()
self.reviewer.cancel_all()
diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py
index d496d0eb2..b6abdd24f 100644
--- a/bot/exts/recruitment/talentpool/_review.py
+++ b/bot/exts/recruitment/talentpool/_review.py
@@ -9,21 +9,22 @@ from datetime import datetime, timedelta
from typing import List, Optional, Union
import arrow
+from botcore.site_api import ResponseCodeError
+from botcore.utils.scheduling import Scheduler
from dateutil.parser import isoparse
-from disnake import Embed, Emoji, Member, Message, NoMoreItems, NotFound, PartialMessage, TextChannel
-from disnake.ext.commands import Context
+from discord import Embed, Emoji, Member, Message, NotFound, PartialMessage, TextChannel
+from discord.ext.commands import Context
-from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.constants import Channels, Colours, Emojis, Guild, Roles
from bot.log import get_logger
from bot.utils import time
from bot.utils.members import get_or_fetch_member
from bot.utils.messages import count_unique_users_reaction, pin_no_system_message
-from bot.utils.scheduling import Scheduler
if typing.TYPE_CHECKING:
from bot.exts.recruitment.talentpool._cog import TalentPool
+ from bot.exts.utils.thread_bumper import ThreadBumper
log = get_logger(__name__)
@@ -97,12 +98,17 @@ class Reviewer:
thread = await last_message.create_thread(
name=f"Nomination - {nominee}",
)
- await thread.send(fr"<@&{Roles.mod_team}> <@&{Roles.admins}>")
+ message = await thread.send(f"<@&{Roles.mod_team}> <@&{Roles.admins}>")
if update_database:
nomination = self._pool.cache.get(user_id)
await self.bot.api_client.patch(f"bot/nominations/{nomination['id']}", json={"reviewed": True})
+ bump_cog: ThreadBumper = self.bot.get_cog("ThreadBumper")
+ if bump_cog:
+ context = await self.bot.get_context(message)
+ await bump_cog.add_thread_to_bump_list(context, thread)
+
async def make_review(self, user_id: int) -> typing.Tuple[str, Optional[Emoji], Optional[Member]]:
"""Format a generic review of a user and return it with the reviewed emoji and the user themselves."""
log.trace(f"Formatting the review of {user_id}")
@@ -151,12 +157,11 @@ class Reviewer:
# We consider the first message in the nomination to contain the user ping, username#discrim, and fixed text
messages = [message]
if not NOMINATION_MESSAGE_REGEX.search(message.content):
- with contextlib.suppress(NoMoreItems):
- async for new_message in message.channel.history(before=message.created_at):
- messages.append(new_message)
+ async for new_message in message.channel.history(before=message.created_at):
+ messages.append(new_message)
- if NOMINATION_MESSAGE_REGEX.search(new_message.content):
- break
+ if NOMINATION_MESSAGE_REGEX.search(new_message.content):
+ break
log.debug(f"Found {len(messages)} messages: {', '.join(str(m.id) for m in messages)}")
diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py
index 7d18c0ed3..a312e0584 100644
--- a/bot/exts/utils/bot.py
+++ b/bot/exts/utils/bot.py
@@ -1,10 +1,10 @@
from typing import Optional
-from disnake import Embed, TextChannel
-from disnake.ext.commands import Cog, Context, command, group, has_any_role
+from discord import Embed, TextChannel
+from discord.ext.commands import Cog, Context, command, group, has_any_role
from bot.bot import Bot
-from bot.constants import Guild, MODERATION_ROLES, URLs
+from bot.constants import Bot as BotConfig, Guild, MODERATION_ROLES, URLs
from bot.log import get_logger
log = get_logger(__name__)
@@ -25,7 +25,10 @@ class BotCog(Cog, name="Bot"):
async def about_command(self, ctx: Context) -> None:
"""Get information about the bot."""
embed = Embed(
- description="A utility bot designed just for the Python server! Try `!help` for more info.",
+ description=(
+ "A utility bot designed just for the Python server! "
+ f"Try `{BotConfig.prefix}help` for more info."
+ ),
url="https://github.com/python-discord/bot"
)
@@ -61,6 +64,6 @@ class BotCog(Cog, name="Bot"):
await channel.send(embed=embed)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Bot cog."""
- bot.add_cog(BotCog(bot))
+ await bot.add_cog(BotCog(bot))
diff --git a/bot/exts/utils/extensions.py b/bot/exts/utils/extensions.py
index 3d12ae848..90249867f 100644
--- a/bot/exts/utils/extensions.py
+++ b/bot/exts/utils/extensions.py
@@ -2,9 +2,9 @@ import functools
import typing as t
from enum import Enum
-from disnake import Colour, Embed
-from disnake.ext import commands
-from disnake.ext.commands import Context, group
+from discord import Colour, Embed
+from discord.ext import commands
+from discord.ext.commands import Context, group
from bot import exts
from bot.bot import Bot
@@ -12,7 +12,6 @@ from bot.constants import Emojis, MODERATION_ROLES, Roles, URLs
from bot.converters import Extension
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils.extensions import EXTENSIONS
log = get_logger(__name__)
@@ -35,6 +34,7 @@ class Extensions(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
+ self.action_in_progress = False
@group(name="extensions", aliases=("ext", "exts", "c", "cog", "cogs"), invoke_without_command=True)
async def extensions_group(self, ctx: Context) -> None:
@@ -53,10 +53,9 @@ class Extensions(commands.Cog):
return
if "*" in extensions or "**" in extensions:
- extensions = set(EXTENSIONS) - set(self.bot.extensions.keys())
+ extensions = set(self.bot.all_extensions) - set(self.bot.extensions.keys())
- msg = self.batch_manage(Action.LOAD, *extensions)
- await ctx.send(msg)
+ await self.batch_manage(Action.LOAD, ctx, *extensions)
@extensions_group.command(name="unload", aliases=("ul",))
async def unload_command(self, ctx: Context, *extensions: Extension) -> None:
@@ -72,14 +71,12 @@ class Extensions(commands.Cog):
blacklisted = "\n".join(UNLOAD_BLACKLIST & set(extensions))
if blacklisted:
- msg = f":x: The following extension(s) may not be unloaded:```\n{blacklisted}```"
+ await ctx.send(f":x: The following extension(s) may not be unloaded:```\n{blacklisted}```")
else:
if "*" in extensions or "**" in extensions:
extensions = set(self.bot.extensions.keys()) - UNLOAD_BLACKLIST
- msg = self.batch_manage(Action.UNLOAD, *extensions)
-
- await ctx.send(msg)
+ await self.batch_manage(Action.UNLOAD, ctx, *extensions)
@extensions_group.command(name="reload", aliases=("r",), root_aliases=("reload",))
async def reload_command(self, ctx: Context, *extensions: Extension) -> None:
@@ -96,14 +93,12 @@ class Extensions(commands.Cog):
return
if "**" in extensions:
- extensions = EXTENSIONS
+ extensions = self.bot.all_extensions
elif "*" in extensions:
extensions = set(self.bot.extensions.keys()) | set(extensions)
extensions.remove("*")
- msg = self.batch_manage(Action.RELOAD, *extensions)
-
- await ctx.send(msg)
+ await self.batch_manage(Action.RELOAD, ctx, *extensions)
@extensions_group.command(name="list", aliases=("all",))
async def list_command(self, ctx: Context) -> None:
@@ -136,7 +131,7 @@ class Extensions(commands.Cog):
"""Return a mapping of extension names and statuses to their categories."""
categories = {}
- for ext in EXTENSIONS:
+ for ext in self.bot.all_extensions:
if ext in self.bot.extensions:
status = Emojis.status_online
else:
@@ -152,21 +147,31 @@ class Extensions(commands.Cog):
return categories
- def batch_manage(self, action: Action, *extensions: str) -> str:
+ async def batch_manage(self, action: Action, ctx: Context, *extensions: str) -> None:
"""
- Apply an action to multiple extensions and return a message with the results.
+ Apply an action to multiple extensions, giving feedback to the invoker while doing so.
If only one extension is given, it is deferred to `manage()`.
"""
- if len(extensions) == 1:
- msg, _ = self.manage(action, extensions[0])
- return msg
+ if self.action_in_progress:
+ await ctx.send(":x: Another action is in progress, please try again later.")
+ return
verb = action.name.lower()
+
+ self.action_in_progress = True
+ loading_message = await ctx.send(f":hourglass_flowing_sand: {verb} in progress, please wait...")
+
+ if len(extensions) == 1:
+ msg, _ = await self.manage(action, extensions[0])
+ await loading_message.edit(content=msg)
+ self.action_in_progress = False
+ return
+
failures = {}
for extension in extensions:
- _, error = self.manage(action, extension)
+ _, error = await self.manage(action, extension)
if error:
failures[extension] = error
@@ -179,19 +184,20 @@ class Extensions(commands.Cog):
log.debug(f"Batch {verb}ed extensions.")
- return msg
+ await loading_message.edit(content=msg)
+ self.action_in_progress = False
- def manage(self, action: Action, ext: str) -> t.Tuple[str, t.Optional[str]]:
+ async def manage(self, action: Action, ext: str) -> t.Tuple[str, t.Optional[str]]:
"""Apply an action to an extension and return the status message and any error message."""
verb = action.name.lower()
error_msg = None
try:
- action.value(self.bot, ext)
+ await action.value(self.bot, ext)
except (commands.ExtensionAlreadyLoaded, commands.ExtensionNotLoaded):
if action is Action.RELOAD:
# When reloading, just load the extension if it was not loaded.
- return self.manage(Action.LOAD, ext)
+ return await self.manage(Action.LOAD, ext)
msg = f":x: Extension `{ext}` is already {verb}ed."
log.debug(msg[4:])
@@ -216,12 +222,16 @@ class Extensions(commands.Cog):
# This cannot be static (must have a __func__ attribute).
async def cog_command_error(self, ctx: Context, error: Exception) -> None:
- """Handle BadArgument errors locally to prevent the help command from showing."""
+ """Handle errors locally to prevent the error handler cog from interfering when not wanted."""
+ # Safely clear the flag on unexpected errors to avoid deadlocks.
+ self.action_in_progress = False
+
+ # Handle BadArgument errors locally to prevent the help command from showing.
if isinstance(error, commands.BadArgument):
await ctx.send(str(error))
error.handled = True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Extensions cog."""
- bot.add_cog(Extensions(bot))
+ await bot.add_cog(Extensions(bot))
diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py
index 28c1867ad..3125cee75 100644
--- a/bot/exts/utils/internal.py
+++ b/bot/exts/utils/internal.py
@@ -9,13 +9,14 @@ from io import StringIO
from typing import Any, Optional, Tuple
import arrow
-import disnake
-from disnake.ext.commands import Cog, Context, group, has_any_role, is_owner
+import discord
+from discord.ext.commands import Cog, Context, group, has_any_role, is_owner
from bot.bot import Bot
from bot.constants import DEBUG_MODE, Roles
from bot.log import get_logger
from bot.utils import find_nth_occurrence, send_to_paste_service
+from bot.utils.services import PasteTooLongError, PasteUploadError
log = get_logger(__name__)
@@ -42,7 +43,7 @@ class Internal(Cog):
self.socket_event_total += 1
self.socket_events[event_type] += 1
- def _format(self, inp: str, out: Any) -> Tuple[str, Optional[disnake.Embed]]:
+ def _format(self, inp: str, out: Any) -> Tuple[str, Optional[discord.Embed]]:
"""Format the eval output into a string & attempt to format it into an Embed."""
self._ = out
@@ -103,7 +104,7 @@ class Internal(Cog):
res += f"Out[{self.ln}]: "
- if isinstance(out, disnake.Embed):
+ if isinstance(out, discord.Embed):
# We made an embed? Send that as embed
res += "<Embed>"
res = (res, out)
@@ -136,7 +137,7 @@ class Internal(Cog):
return res # Return (text, embed)
- async def _eval(self, ctx: Context, code: str) -> Optional[disnake.Message]:
+ async def _eval(self, ctx: Context, code: str) -> Optional[discord.Message]:
"""Eval the input code string & send an embed to the invoking context."""
self.ln += 1
@@ -154,8 +155,7 @@ class Internal(Cog):
"self": self,
"bot": self.bot,
"inspect": inspect,
- "discord": disnake,
- "disnake": disnake,
+ "discord": discord,
"contextlib": contextlib
}
@@ -195,11 +195,14 @@ async def func(): # (None,) -> Any
truncate_index = newline_truncate_index
if len(out) > truncate_index:
- paste_link = await send_to_paste_service(out, extension="py")
- if paste_link is not None:
- paste_text = f"full contents at {paste_link}"
- else:
+ try:
+ paste_link = await send_to_paste_service(out, extension="py")
+ except PasteTooLongError:
+ paste_text = "too long to upload to paste service."
+ except PasteUploadError:
paste_text = "failed to upload contents to paste service."
+ else:
+ paste_text = f"full contents at {paste_link}"
await ctx.send(
f"```py\n{out[:truncate_index]}\n```"
@@ -241,10 +244,10 @@ async def func(): # (None,) -> Any
per_s = self.socket_event_total / running_s
- stats_embed = disnake.Embed(
+ stats_embed = discord.Embed(
title="WebSocket statistics",
description=f"Receiving {per_s:0.2f} events per second.",
- color=disnake.Color.og_blurple()
+ color=discord.Color.og_blurple()
)
for event_type, count in self.socket_events.most_common(25):
@@ -253,6 +256,6 @@ async def func(): # (None,) -> Any
await ctx.send(embed=stats_embed)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Internal cog."""
- bot.add_cog(Internal(bot))
+ await bot.add_cog(Internal(bot))
diff --git a/bot/exts/utils/ping.py b/bot/exts/utils/ping.py
index eeb1d5ff5..67a960365 100644
--- a/bot/exts/utils/ping.py
+++ b/bot/exts/utils/ping.py
@@ -1,7 +1,7 @@
import arrow
from aiohttp import client_exceptions
-from disnake import Embed
-from disnake.ext import commands
+from discord import Embed
+from discord.ext import commands
from bot.bot import Bot
from bot.constants import Channels, STAFF_PARTNERS_COMMUNITY_ROLES, URLs
@@ -60,6 +60,6 @@ class Latency(commands.Cog):
await ctx.send(embed=embed)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Latency cog."""
- bot.add_cog(Latency(bot))
+ await bot.add_cog(Latency(bot))
diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py
index bf0e9d2ac..45cddd7a2 100644
--- a/bot/exts/utils/reminders.py
+++ b/bot/exts/utils/reminders.py
@@ -4,21 +4,22 @@ import typing as t
from datetime import datetime, timezone
from operator import itemgetter
-import disnake
+import discord
+from botcore.utils import scheduling
+from botcore.utils.scheduling import Scheduler
from dateutil.parser import isoparse
-from disnake.ext.commands import Cog, Context, Greedy, group
+from discord.ext.commands import Cog, Context, Greedy, group
from bot.bot import Bot
from bot.constants import Guild, Icons, MODERATION_ROLES, POSITIVE_REPLIES, Roles, STAFF_PARTNERS_COMMUNITY_ROLES
from bot.converters import Duration, UnambiguousUser
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import scheduling, time
+from bot.utils import time
from bot.utils.checks import has_any_role_check, has_no_roles_check
from bot.utils.lock import lock_arg
from bot.utils.members import get_or_fetch_member
from bot.utils.messages import send_denial
-from bot.utils.scheduling import Scheduler
log = get_logger(__name__)
@@ -26,8 +27,8 @@ LOCK_NAMESPACE = "reminder"
WHITELISTED_CHANNELS = Guild.reminder_whitelist
MAXIMUM_REMINDERS = 5
-Mentionable = t.Union[disnake.Member, disnake.Role]
-ReminderMention = t.Union[UnambiguousUser, disnake.Role]
+Mentionable = t.Union[discord.Member, discord.Role]
+ReminderMention = t.Union[UnambiguousUser, discord.Role]
class Reminders(Cog):
@@ -37,13 +38,11 @@ class Reminders(Cog):
self.bot = bot
self.scheduler = Scheduler(self.__class__.__name__)
- scheduling.create_task(self.reschedule_reminders(), event_loop=self.bot.loop)
-
- def cog_unload(self) -> None:
+ async def cog_unload(self) -> None:
"""Cancel scheduled tasks."""
self.scheduler.cancel_all()
- async def reschedule_reminders(self) -> None:
+ async def cog_load(self) -> None:
"""Get all current reminders from the API and reschedule them."""
await self.bot.wait_until_guild_available()
response = await self.bot.api_client.get(
@@ -66,7 +65,7 @@ class Reminders(Cog):
else:
self.schedule_reminder(reminder)
- def ensure_valid_reminder(self, reminder: dict) -> t.Tuple[bool, disnake.TextChannel]:
+ def ensure_valid_reminder(self, reminder: dict) -> t.Tuple[bool, discord.TextChannel]:
"""Ensure reminder channel can be fetched otherwise delete the reminder."""
channel = self.bot.get_channel(reminder['channel_id'])
is_valid = True
@@ -87,9 +86,9 @@ class Reminders(Cog):
reminder_id: t.Union[str, int]
) -> None:
"""Send an embed confirming the reminder change was made successfully."""
- embed = disnake.Embed(
+ embed = discord.Embed(
description=on_success,
- colour=disnake.Colour.green(),
+ colour=discord.Colour.green(),
title=random.choice(POSITIVE_REPLIES)
)
@@ -113,7 +112,7 @@ class Reminders(Cog):
if await has_no_roles_check(ctx, *STAFF_PARTNERS_COMMUNITY_ROLES):
return False, "members/roles"
elif await has_no_roles_check(ctx, *MODERATION_ROLES):
- return all(isinstance(mention, (disnake.User, disnake.Member)) for mention in mentions), "roles"
+ return all(isinstance(mention, (discord.User, discord.Member)) for mention in mentions), "roles"
else:
return True, ""
@@ -173,15 +172,15 @@ class Reminders(Cog):
if not is_valid:
# No need to cancel the task too; it'll simply be done once this coroutine returns.
return
- embed = disnake.Embed()
+ embed = discord.Embed()
if expected_time:
- embed.colour = disnake.Colour.red()
+ embed.colour = discord.Colour.red()
embed.set_author(
icon_url=Icons.remind_red,
name="Sorry, your reminder should have arrived earlier!"
)
else:
- embed.colour = disnake.Colour.og_blurple()
+ embed.colour = discord.Colour.og_blurple()
embed.set_author(
icon_url=Icons.remind_blurple,
name="It has arrived!"
@@ -200,7 +199,7 @@ class Reminders(Cog):
partial_message = channel.get_partial_message(int(jump_url.split("/")[-1]))
try:
await partial_message.reply(content=f"{additional_mentions}", embed=embed)
- except disnake.HTTPException as e:
+ except discord.HTTPException as e:
log.info(
f"There was an error when trying to reply to a reminder invocation message, {e}, "
"fall back to using jump_url"
@@ -284,7 +283,7 @@ class Reminders(Cog):
# If `content` isn't provided then we try to get message content of a replied message
if not content:
if reference := ctx.message.reference:
- if isinstance((resolved_message := reference.resolved), disnake.Message):
+ if isinstance((resolved_message := reference.resolved), discord.Message):
content = resolved_message.content
# If we weren't able to get the content of a replied message
if content is None:
@@ -361,8 +360,8 @@ class Reminders(Cog):
lines.append(text)
- embed = disnake.Embed()
- embed.colour = disnake.Colour.og_blurple()
+ embed = discord.Embed()
+ embed.colour = discord.Colour.og_blurple()
embed.title = f"Reminders for {ctx.author}"
# Remind the user that they have no reminders :^)
@@ -372,7 +371,7 @@ class Reminders(Cog):
return
# Construct the embed and paginate it.
- embed.colour = disnake.Colour.og_blurple()
+ embed.colour = discord.Colour.og_blurple()
await LinePaginator.paginate(
lines,
@@ -486,6 +485,6 @@ class Reminders(Cog):
return True
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Reminders cog."""
- bot.add_cog(Reminders(bot))
+ await bot.add_cog(Reminders(bot))
diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py
index 5f82c1cc8..8e961b67c 100644
--- a/bot/exts/utils/snekbox.py
+++ b/bot/exts/utils/snekbox.py
@@ -1,107 +1,251 @@
import asyncio
import contextlib
-import datetime
import re
-import textwrap
from functools import partial
+from operator import attrgetter
from signal import Signals
-from typing import Optional, Tuple
+from textwrap import dedent
+from typing import Literal, Optional, Tuple
+from botcore.utils import interactions
from botcore.utils.regex import FORMATTED_CODE_REGEX, RAW_CODE_REGEX
-from disnake import AllowedMentions, HTTPException, Message, NotFound, Reaction, User
-from disnake.ext.commands import Cog, Context, command, guild_only
+from discord import AllowedMentions, HTTPException, Interaction, Message, NotFound, Reaction, User, enums, ui
+from discord.ext.commands import Cog, Command, Context, Converter, command, guild_only
from bot.bot import Bot
-from bot.constants import Categories, Channels, Roles, URLs
+from bot.constants import Categories, Channels, MODERATION_ROLES, Roles, URLs
from bot.decorators import redirect_output
from bot.log import get_logger
-from bot.utils import scheduling, send_to_paste_service
-from bot.utils.messages import wait_for_deletion
+from bot.utils import send_to_paste_service
+from bot.utils.lock import LockedResourceError, lock_arg
+from bot.utils.services import PasteTooLongError, PasteUploadError
log = get_logger(__name__)
ESCAPE_REGEX = re.compile("[`\u202E\u200B]{3,}")
-MAX_PASTE_LEN = 10000
+# The timeit command should only output the very last line, so all other output should be suppressed.
+# This will be used as the setup code along with any setup code provided.
+TIMEIT_SETUP_WRAPPER = """
+import atexit
+import sys
+from collections import deque
-# `!eval` command whitelists and blacklists.
-NO_EVAL_CHANNELS = (Channels.python_general,)
-NO_EVAL_CATEGORIES = ()
-EVAL_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners)
+if not hasattr(sys, "_setup_finished"):
+ class Writer(deque):
+ '''A single-item deque wrapper for sys.stdout that will return the last line when read() is called.'''
-SIGKILL = 9
+ def __init__(self):
+ super().__init__(maxlen=1)
-REEVAL_EMOJI = '\U0001f501' # :repeat:
-REEVAL_TIMEOUT = 30
+ def write(self, string):
+ '''Append the line to the queue if it is not empty.'''
+ if string.strip():
+ self.append(string)
+ def read(self):
+ '''This method will be called when print() is called.
-class Snekbox(Cog):
- """Safe evaluation of Python code using Snekbox."""
+ The queue is emptied as we don't need the output later.
+ '''
+ return self.pop()
- def __init__(self, bot: Bot):
- self.bot = bot
- self.jobs = {}
+ def flush(self):
+ '''This method will be called eventually, but we don't need to do anything here.'''
+ pass
- async def post_eval(self, code: str) -> dict:
- """Send a POST request to the Snekbox API to evaluate code and return the results."""
- url = URLs.snekbox_eval_api
- data = {"input": code}
- async with self.bot.http_session.post(url, json=data, raise_for_status=True) as resp:
- return await resp.json()
+ sys.stdout = Writer()
- async def upload_output(self, output: str) -> Optional[str]:
- """Upload the eval output to a paste service and return a URL to it if successful."""
- log.trace("Uploading full output to paste service...")
+ def print_last_line():
+ if sys.stdout: # If the deque is empty (i.e. an error happened), calling read() will raise an error
+ # Use sys.__stdout__ here because sys.stdout is set to a Writer() instance
+ print(sys.stdout.read(), file=sys.__stdout__)
- if len(output) > MAX_PASTE_LEN:
- log.info("Full output is too long to upload")
- return "too long to upload"
- return await send_to_paste_service(output, extension="txt")
+ atexit.register(print_last_line) # When exiting, print the last line (hopefully it will be the timeit output)
+ sys._setup_finished = None
+{setup}
+"""
- @staticmethod
- def prepare_input(code: str) -> str:
+MAX_PASTE_LENGTH = 10_000
+
+# The Snekbox commands' whitelists and blacklists.
+NO_SNEKBOX_CHANNELS = (Channels.python_general,)
+NO_SNEKBOX_CATEGORIES = ()
+SNEKBOX_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners)
+
+SIGKILL = 9
+
+REDO_EMOJI = '\U0001f501' # :repeat:
+REDO_TIMEOUT = 30
+
+
+class CodeblockConverter(Converter):
+ """Attempts to extract code from a codeblock, if provided."""
+
+ @classmethod
+ async def convert(cls, ctx: Context, code: str) -> list[str]:
"""
Extract code from the Markdown, format it, and insert it into the code template.
If there is any code block, ignore text outside the code block.
Use the first code block, but prefer a fenced code block.
If there are several fenced code blocks, concatenate only the fenced code blocks.
+
+ Return a list of code blocks if any, otherwise return a list with a single string of code.
"""
if match := list(FORMATTED_CODE_REGEX.finditer(code)):
blocks = [block for block in match if block.group("block")]
if len(blocks) > 1:
- code = '\n'.join(block.group("code") for block in blocks)
+ codeblocks = [block.group("code") for block in blocks]
info = "several code blocks"
else:
match = match[0] if len(blocks) == 0 else blocks[0]
code, block, lang, delim = match.group("code", "block", "lang", "delim")
+ codeblocks = [dedent(code)]
if block:
info = (f"'{lang}' highlighted" if lang else "plain") + " code block"
else:
info = f"{delim}-enclosed inline code"
else:
- code = RAW_CODE_REGEX.fullmatch(code).group("code")
+ codeblocks = [dedent(RAW_CODE_REGEX.fullmatch(code).group("code"))]
info = "unformatted or badly formatted code"
- code = textwrap.dedent(code)
+ code = "\n".join(codeblocks)
log.trace(f"Extracted {info} for evaluation:\n{code}")
- return code
+ return codeblocks
+
+
+class PythonVersionSwitcherButton(ui.Button):
+ """A button that allows users to re-run their eval command in a different Python version."""
+
+ def __init__(
+ self,
+ job_name: str,
+ version_to_switch_to: Literal["3.10", "3.11"],
+ snekbox_cog: "Snekbox",
+ ctx: Context,
+ code: str
+ ) -> None:
+ self.version_to_switch_to = version_to_switch_to
+ super().__init__(label=f"Run in {self.version_to_switch_to}", style=enums.ButtonStyle.primary)
+
+ self.snekbox_cog = snekbox_cog
+ self.ctx = ctx
+ self.job_name = job_name
+ self.code = code
+
+ async def callback(self, interaction: Interaction) -> None:
+ """
+ Tell snekbox to re-run the user's code in the alternative Python version.
+
+ Use a task calling snekbox, as run_job is blocking while it waits for edit/reaction on the message.
+ """
+ # Defer response here so that the Discord UI doesn't mark this interaction as failed if the job
+ # takes too long to run.
+ await interaction.response.defer()
+
+ with contextlib.suppress(NotFound):
+ # Suppress this delete to cover the case where a user re-runs code and very quickly clicks the button.
+ # The log arg on send_job will stop the actual job from running.
+ await interaction.message.delete()
+
+ await self.snekbox_cog.run_job(self.job_name, self.ctx, self.version_to_switch_to, self.code)
+
+
+class Snekbox(Cog):
+ """Safe evaluation of Python code using Snekbox."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.jobs = {}
+
+ def build_python_version_switcher_view(
+ self,
+ job_name: str,
+ current_python_version: Literal["3.10", "3.11"],
+ ctx: Context,
+ code: str
+ ) -> None:
+ """Return a view that allows the user to change what version of Python their code is run on."""
+ if current_python_version == "3.10":
+ alt_python_version = "3.11"
+ else:
+ alt_python_version = "3.10"
+
+ view = interactions.ViewWithUserAndRoleCheck(
+ allowed_users=(ctx.author.id,),
+ allowed_roles=MODERATION_ROLES,
+ )
+ view.add_item(PythonVersionSwitcherButton(job_name, alt_python_version, self, ctx, code))
+ view.add_item(interactions.DeleteMessageButton())
+
+ return view
+
+ async def post_job(
+ self,
+ code: str,
+ python_version: Literal["3.10", "3.11"],
+ *,
+ args: Optional[list[str]] = None
+ ) -> dict:
+ """Send a POST request to the Snekbox API to evaluate code and return the results."""
+ if python_version == "3.10":
+ url = URLs.snekbox_eval_api
+ else:
+ url = URLs.snekbox_311_eval_api
+
+ data = {"input": code}
+
+ if args is not None:
+ data["args"] = args
+
+ async with self.bot.http_session.post(url, json=data, raise_for_status=True) as resp:
+ return await resp.json()
+
+ async def upload_output(self, output: str) -> Optional[str]:
+ """Upload the job's output to a paste service and return a URL to it if successful."""
+ log.trace("Uploading full output to paste service...")
+
+ try:
+ return await send_to_paste_service(output, extension="txt", max_length=MAX_PASTE_LENGTH)
+ except PasteTooLongError:
+ return "too long to upload"
+ except PasteUploadError:
+ return "unable to upload"
+
+ @staticmethod
+ def prepare_timeit_input(codeblocks: list[str]) -> tuple[str, list[str]]:
+ """
+ Join the codeblocks into a single string, then return the code and the arguments in a tuple.
+
+ If there are multiple codeblocks, insert the first one into the wrapped setup code.
+ """
+ args = ["-m", "timeit"]
+ setup = ""
+ if len(codeblocks) > 1:
+ setup = codeblocks.pop(0)
+
+ code = "\n".join(codeblocks)
+
+ args.extend(["-s", TIMEIT_SETUP_WRAPPER.format(setup=setup)])
+
+ return code, args
@staticmethod
- def get_results_message(results: dict) -> Tuple[str, str]:
+ def get_results_message(results: dict, job_name: str, python_version: Literal["3.10", "3.11"]) -> Tuple[str, str]:
"""Return a user-friendly message and error corresponding to the process's return code."""
stdout, returncode = results["stdout"], results["returncode"]
- msg = f"Your eval job has completed with return code {returncode}"
+ msg = f"Your {python_version} {job_name} job has completed with return code {returncode}"
error = ""
if returncode is None:
- msg = "Your eval job has failed"
+ msg = f"Your {python_version} {job_name} job has failed"
error = stdout.strip()
elif returncode == 128 + SIGKILL:
- msg = "Your eval job timed out or ran out of memory"
+ msg = f"Your {python_version} {job_name} job timed out or ran out of memory"
elif returncode == 255:
- msg = "Your eval job has failed"
+ msg = f"Your {python_version} {job_name} job has failed"
error = "A fatal NsJail error occurred"
else:
# Try to append signal's name if one exists
@@ -130,8 +274,6 @@ class Snekbox(Cog):
Prepend each line with a line number. Truncate if there are over 10 lines or 1000 characters
and upload the full output to a paste service.
"""
- log.trace("Formatting output...")
-
output = output.rstrip("\n")
original_output = output # To be uploaded to a pasting service if needed
paste_link = None
@@ -171,19 +313,29 @@ class Snekbox(Cog):
return output, paste_link
- async def send_eval(self, ctx: Context, code: str) -> Message:
+ @lock_arg("snekbox.send_job", "ctx", attrgetter("author.id"), raise_error=True)
+ async def send_job(
+ self,
+ ctx: Context,
+ python_version: Literal["3.10", "3.11"],
+ code: str,
+ *,
+ args: Optional[list[str]] = None,
+ job_name: str
+ ) -> Message:
"""
Evaluate code, format it, and send the output to the corresponding channel.
Return the bot response.
"""
async with ctx.typing():
- results = await self.post_eval(code)
- msg, error = self.get_results_message(results)
+ results = await self.post_job(code, python_version, args=args)
+ msg, error = self.get_results_message(results, job_name, python_version)
if error:
output, paste_link = error, None
else:
+ log.trace("Formatting output...")
output, paste_link = await self.format_output(results["stdout"])
icon = self.get_status_emoji(results)
@@ -191,7 +343,7 @@ class Snekbox(Cog):
if paste_link:
msg = f"{msg}\nFull output: {paste_link}"
- # Collect stats of eval fails + successes
+ # Collect stats of job fails + successes
if icon == ":x:":
self.bot.stats.incr("snekbox.python.fail")
else:
@@ -200,81 +352,150 @@ class Snekbox(Cog):
filter_cog = self.bot.get_cog("Filtering")
filter_triggered = False
if filter_cog:
- filter_triggered = await filter_cog.filter_eval(msg, ctx.message)
+ filter_triggered = await filter_cog.filter_snekbox_output(msg, ctx.message)
if filter_triggered:
response = await ctx.send("Attempt to circumvent filter detected. Moderator team has been alerted.")
else:
allowed_mentions = AllowedMentions(everyone=False, roles=False, users=[ctx.author])
- response = await ctx.send(msg, allowed_mentions=allowed_mentions)
- scheduling.create_task(wait_for_deletion(response, (ctx.author.id,)), event_loop=self.bot.loop)
+ view = self.build_python_version_switcher_view(job_name, python_version, ctx, code)
+ response = await ctx.send(msg, allowed_mentions=allowed_mentions, view=view)
+ view.message = response
- log.info(f"{ctx.author}'s job had a return code of {results['returncode']}")
+ log.info(f"{ctx.author}'s {job_name} job had a return code of {results['returncode']}")
return response
- async def continue_eval(self, ctx: Context, response: Message) -> Optional[str]:
+ async def continue_job(
+ self, ctx: Context, response: Message, job_name: str
+ ) -> tuple[Optional[str], Optional[list[str]]]:
"""
- Check if the eval session should continue.
+ Check if the job's session should continue.
- Return the new code to evaluate or None if the eval session should be terminated.
+ If the code is to be re-evaluated, return the new code, and the args if the command is the timeit command.
+ Otherwise return (None, None) if the job's session should be terminated.
"""
- _predicate_eval_message_edit = partial(predicate_eval_message_edit, ctx)
- _predicate_emoji_reaction = partial(predicate_eval_emoji_reaction, ctx)
+ _predicate_message_edit = partial(predicate_message_edit, ctx)
+ _predicate_emoji_reaction = partial(predicate_emoji_reaction, ctx)
with contextlib.suppress(NotFound):
try:
_, new_message = await self.bot.wait_for(
'message_edit',
- check=_predicate_eval_message_edit,
- timeout=REEVAL_TIMEOUT
+ check=_predicate_message_edit,
+ timeout=REDO_TIMEOUT
)
- await ctx.message.add_reaction(REEVAL_EMOJI)
+ await ctx.message.add_reaction(REDO_EMOJI)
await self.bot.wait_for(
'reaction_add',
check=_predicate_emoji_reaction,
timeout=10
)
- code = await self.get_code(new_message)
- await ctx.message.clear_reaction(REEVAL_EMOJI)
+ # Ensure the response that's about to be edited is still the most recent.
+ # This could have already been updated via a button press to switch to an alt Python version.
+ if self.jobs[ctx.message.id] != response.id:
+ return None, None
+
+ code = await self.get_code(new_message, ctx.command)
+ await ctx.message.clear_reaction(REDO_EMOJI)
with contextlib.suppress(HTTPException):
await response.delete()
+ if code is None:
+ return None, None
+
except asyncio.TimeoutError:
- await ctx.message.clear_reaction(REEVAL_EMOJI)
- return None
+ await ctx.message.clear_reaction(REDO_EMOJI)
+ return None, None
+
+ codeblocks = await CodeblockConverter.convert(ctx, code)
+
+ if job_name == "timeit":
+ return self.prepare_timeit_input(codeblocks)
+ else:
+ return "\n".join(codeblocks), None
- return code
+ return None, None
- async def get_code(self, message: Message) -> Optional[str]:
+ async def get_code(self, message: Message, command: Command) -> Optional[str]:
"""
Return the code from `message` to be evaluated.
- If the message is an invocation of the eval command, return the first argument or None if it
+ If the message is an invocation of the command, return the first argument or None if it
doesn't exist. Otherwise, return the full content of the message.
"""
log.trace(f"Getting context for message {message.id}.")
new_ctx = await self.bot.get_context(message)
- if new_ctx.command is self.eval_command:
- log.trace(f"Message {message.id} invokes eval command.")
+ if new_ctx.command is command:
+ log.trace(f"Message {message.id} invokes {command} command.")
split = message.content.split(maxsplit=1)
code = split[1] if len(split) > 1 else None
else:
- log.trace(f"Message {message.id} does not invoke eval command.")
+ log.trace(f"Message {message.id} does not invoke {command} command.")
code = message.content
return code
- @command(name="eval", aliases=("e",))
+ async def run_job(
+ self,
+ job_name: str,
+ ctx: Context,
+ python_version: Literal["3.10", "3.11"],
+ code: str,
+ *,
+ args: Optional[list[str]] = None,
+ ) -> None:
+ """Handles checks, stats and re-evaluation of a snekbox job."""
+ if Roles.helpers in (role.id for role in ctx.author.roles):
+ self.bot.stats.incr("snekbox_usages.roles.helpers")
+ else:
+ self.bot.stats.incr("snekbox_usages.roles.developers")
+
+ if ctx.channel.category_id == Categories.help_in_use:
+ self.bot.stats.incr("snekbox_usages.channels.help")
+ elif ctx.channel.id == Channels.bot_commands:
+ self.bot.stats.incr("snekbox_usages.channels.bot_commands")
+ else:
+ self.bot.stats.incr("snekbox_usages.channels.topical")
+
+ log.info(f"Received code from {ctx.author} for evaluation:\n{code}")
+
+ while True:
+ try:
+ response = await self.send_job(ctx, python_version, code, args=args, job_name=job_name)
+ except LockedResourceError:
+ await ctx.send(
+ f"{ctx.author.mention} You've already got a job running - "
+ "please wait for it to finish!"
+ )
+ return
+
+ # Store the bot's response message id per invocation, to ensure the `wait_for`s in `continue_job`
+ # don't trigger if the response has already been replaced by a new response.
+ # This can happen when a button is pressed and then original code is edited and re-run.
+ self.jobs[ctx.message.id] = response.id
+
+ code, args = await self.continue_job(ctx, response, job_name)
+ if not code:
+ break
+ log.info(f"Re-evaluating code from message {ctx.message.id}:\n{code}")
+
+ @command(name="eval", aliases=("e",), usage="[python_version] <code, ...>")
@guild_only()
@redirect_output(
destination_channel=Channels.bot_commands,
- bypass_roles=EVAL_ROLES,
- categories=NO_EVAL_CATEGORIES,
- channels=NO_EVAL_CHANNELS,
+ bypass_roles=SNEKBOX_ROLES,
+ categories=NO_SNEKBOX_CATEGORIES,
+ channels=NO_SNEKBOX_CHANNELS,
ping_user=False
)
- async def eval_command(self, ctx: Context, *, code: str = None) -> None:
+ async def eval_command(
+ self,
+ ctx: Context,
+ python_version: Optional[Literal["3.10", "3.11"]],
+ *,
+ code: CodeblockConverter
+ ) -> None:
"""
Run Python code and get the results.
@@ -282,58 +503,68 @@ class Snekbox(Cog):
block. Code can be re-evaluated by editing the original message within 10 seconds and
clicking the reaction that subsequently appears.
+ If multiple codeblocks are in a message, all of them will be joined and evaluated,
+ ignoring the text outside of them.
+
+ By default your code is run on Python's 3.11 beta release, to assist with testing. If you
+ run into issues related to this Python version, you can request the bot to use Python
+ 3.10 by specifying the `python_version` arg and setting it to `3.10`.
+
We've done our best to make this sandboxed, but do let us know if you manage to find an
issue with it!
"""
- if ctx.author.id in self.jobs:
- await ctx.send(
- f"{ctx.author.mention} You've already got a job running - "
- "please wait for it to finish!"
- )
- return
+ python_version = python_version or "3.11"
+ await self.run_job("eval", ctx, python_version, "\n".join(code))
- if not code: # None or empty string
- await ctx.send_help(ctx.command)
- return
+ @command(name="timeit", aliases=("ti",), usage="[python_version] [setup_code] <code, ...>")
+ @guild_only()
+ @redirect_output(
+ destination_channel=Channels.bot_commands,
+ bypass_roles=SNEKBOX_ROLES,
+ categories=NO_SNEKBOX_CATEGORIES,
+ channels=NO_SNEKBOX_CHANNELS,
+ ping_user=False
+ )
+ async def timeit_command(
+ self,
+ ctx: Context,
+ python_version: Optional[Literal["3.10", "3.11"]],
+ *,
+ code: CodeblockConverter
+ ) -> None:
+ """
+ Profile Python Code to find execution time.
- if Roles.helpers in (role.id for role in ctx.author.roles):
- self.bot.stats.incr("snekbox_usages.roles.helpers")
- else:
- self.bot.stats.incr("snekbox_usages.roles.developers")
+ This command supports multiple lines of code, including code wrapped inside a formatted code
+ block. Code can be re-evaluated by editing the original message within 10 seconds and
+ clicking the reaction that subsequently appears.
- if ctx.channel.category_id == Categories.help_in_use:
- self.bot.stats.incr("snekbox_usages.channels.help")
- elif ctx.channel.id == Channels.bot_commands:
- self.bot.stats.incr("snekbox_usages.channels.bot_commands")
- else:
- self.bot.stats.incr("snekbox_usages.channels.topical")
+ If multiple formatted codeblocks are provided, the first one will be the setup code, which will
+ not be timed. The remaining codeblocks will be joined together and timed.
- log.info(f"Received code from {ctx.author} for evaluation:\n{code}")
+ By default your code is run on Python's 3.11 beta release, to assist with testing. If you
+ run into issues related to this Python version, you can request the bot to use Python
+ 3.10 by specifying the `python_version` arg and setting it to `3.10`.
- while True:
- self.jobs[ctx.author.id] = datetime.datetime.now()
- code = self.prepare_input(code)
- try:
- response = await self.send_eval(ctx, code)
- finally:
- del self.jobs[ctx.author.id]
+ We've done our best to make this sandboxed, but do let us know if you manage to find an
+ issue with it!
+ """
+ python_version = python_version or "3.11"
+ code, args = self.prepare_timeit_input(code)
- code = await self.continue_eval(ctx, response)
- if not code:
- break
- log.info(f"Re-evaluating code from message {ctx.message.id}:\n{code}")
+ await self.run_job("timeit", ctx, python_version, code=code, args=args)
-def predicate_eval_message_edit(ctx: Context, old_msg: Message, new_msg: Message) -> bool:
+def predicate_message_edit(ctx: Context, old_msg: Message, new_msg: Message) -> bool:
"""Return True if the edited message is the context message and the content was indeed modified."""
return new_msg.id == ctx.message.id and old_msg.content != new_msg.content
-def predicate_eval_emoji_reaction(ctx: Context, reaction: Reaction, user: User) -> bool:
- """Return True if the reaction REEVAL_EMOJI was added by the context message author on this message."""
- return reaction.message.id == ctx.message.id and user.id == ctx.author.id and str(reaction) == REEVAL_EMOJI
+def predicate_emoji_reaction(ctx: Context, reaction: Reaction, user: User) -> bool:
+ """Return True if the reaction REDO_EMOJI was added by the context message author on this message."""
+ return reaction.message.id == ctx.message.id and user.id == ctx.author.id and str(reaction) == REDO_EMOJI
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Snekbox cog."""
- bot.add_cog(Snekbox(bot))
+ await bot.add_cog(Snekbox(bot))
diff --git a/bot/exts/utils/thread_bumper.py b/bot/exts/utils/thread_bumper.py
index d37b3b51c..a2f208484 100644
--- a/bot/exts/utils/thread_bumper.py
+++ b/bot/exts/utils/thread_bumper.py
@@ -1,29 +1,43 @@
import typing as t
-import disnake
-from async_rediscache import RedisCache
-from disnake.ext import commands
+import discord
+from botcore.site_api import ResponseCodeError
+from discord.ext import commands
from bot import constants
from bot.bot import Bot
from bot.log import get_logger
from bot.pagination import LinePaginator
-from bot.utils import channel, scheduling
+from bot.utils import channel
log = get_logger(__name__)
+THREAD_BUMP_ENDPOINT = "bot/bumped-threads"
class ThreadBumper(commands.Cog):
"""Cog that allow users to add the current thread to a list that get reopened on archive."""
- # RedisCache[disnake.Thread.id, "sentinel"]
- threads_to_bump = RedisCache()
-
def __init__(self, bot: Bot):
self.bot = bot
- self.init_task = scheduling.create_task(self.ensure_bumped_threads_are_active(), event_loop=self.bot.loop)
- async def unarchive_threads_not_manually_archived(self, threads: list[disnake.Thread]) -> None:
+ async def thread_exists_in_site(self, thread_id: int) -> bool:
+ """Return whether the given thread_id exists in the site api's bump list."""
+ # If the thread exists, site returns a 204 with no content.
+ # Due to this, `api_client.request()` cannot be used, as it always attempts to decode the response as json.
+ # Instead, call the site manually using the api_client's session, to use the auth token logic in the wrapper.
+
+ async with self.bot.api_client.session.get(
+ f"{self.bot.api_client._url_for(THREAD_BUMP_ENDPOINT)}/{thread_id}"
+ ) as response:
+ if response.status == 204:
+ return True
+ elif response.status == 404:
+ return False
+ else:
+ # A status other than 204/404 is undefined behaviour from site. Raise error for investigation.
+ raise ResponseCodeError(response, response.text())
+
+ async def unarchive_threads_not_manually_archived(self, threads: list[discord.Thread]) -> None:
"""
Iterate through and unarchive any threads that weren't manually archived recently.
@@ -35,7 +49,7 @@ class ThreadBumper(commands.Cog):
guild = self.bot.get_guild(constants.Guild.id)
recent_manually_archived_thread_ids = []
- async for thread_update in guild.audit_logs(limit=200, action=disnake.AuditLogAction.thread_update):
+ async for thread_update in guild.audit_logs(limit=200, action=discord.AuditLogAction.thread_update):
if getattr(thread_update.after, "archived", False):
recent_manually_archived_thread_ids.append(thread_update.target.id)
@@ -46,27 +60,32 @@ class ThreadBumper(commands.Cog):
thread.name,
thread.id
)
- await self.threads_to_bump.delete(thread.id)
+ await self.bot.api_client.delete(f"{THREAD_BUMP_ENDPOINT}/{thread.id}")
else:
await thread.edit(archived=False)
- async def ensure_bumped_threads_are_active(self) -> None:
+ async def cog_load(self) -> None:
"""Ensure bumped threads are active, since threads could have been archived while the bot was down."""
await self.bot.wait_until_guild_available()
threads_to_maybe_bump = []
- for thread_id, _ in await self.threads_to_bump.items():
+ for thread_id in await self.bot.api_client.get(THREAD_BUMP_ENDPOINT):
try:
thread = await channel.get_or_fetch_channel(thread_id)
- except disnake.NotFound:
+ except discord.NotFound:
log.info("Thread %d has been deleted, removing from bumped threads.", thread_id)
- await self.threads_to_bump.delete(thread_id)
+ await self.bot.api_client.delete(f"{THREAD_BUMP_ENDPOINT}/{thread_id}")
+ continue
+
+ if not isinstance(thread, discord.Thread):
+ await self.bot.api_client.delete(f"{THREAD_BUMP_ENDPOINT}/{thread_id}")
continue
if thread.archived:
threads_to_maybe_bump.append(thread)
- await self.unarchive_threads_not_manually_archived(threads_to_maybe_bump)
+ if threads_to_maybe_bump:
+ await self.unarchive_threads_not_manually_archived(threads_to_maybe_bump)
@commands.group(name="bump")
async def thread_bump_group(self, ctx: commands.Context) -> None:
@@ -75,64 +94,56 @@ class ThreadBumper(commands.Cog):
await ctx.send_help(ctx.command)
@thread_bump_group.command(name="add", aliases=("a",))
- async def add_thread_to_bump_list(self, ctx: commands.Context, thread: t.Optional[disnake.Thread]) -> None:
+ async def add_thread_to_bump_list(self, ctx: commands.Context, thread: t.Optional[discord.Thread]) -> None:
"""Add a thread to the bump list."""
- await self.init_task
-
if not thread:
- if isinstance(ctx.channel, disnake.Thread):
+ if isinstance(ctx.channel, discord.Thread):
thread = ctx.channel
else:
raise commands.BadArgument("You must provide a thread, or run this command within a thread.")
- if await self.threads_to_bump.contains(thread.id):
+ if await self.thread_exists_in_site(thread.id):
raise commands.BadArgument("This thread is already in the bump list.")
- await self.threads_to_bump.set(thread.id, "sentinel")
+ await self.bot.api_client.post(THREAD_BUMP_ENDPOINT, data={"thread_id": thread.id})
await ctx.send(f":ok_hand:{thread.mention} has been added to the bump list.")
@thread_bump_group.command(name="remove", aliases=("r", "rem", "d", "del", "delete"))
- async def remove_thread_from_bump_list(self, ctx: commands.Context, thread: t.Optional[disnake.Thread]) -> None:
+ async def remove_thread_from_bump_list(self, ctx: commands.Context, thread: t.Optional[discord.Thread]) -> None:
"""Remove a thread from the bump list."""
- await self.init_task
-
if not thread:
- if isinstance(ctx.channel, disnake.Thread):
+ if isinstance(ctx.channel, discord.Thread):
thread = ctx.channel
else:
raise commands.BadArgument("You must provide a thread, or run this command within a thread.")
- if not await self.threads_to_bump.contains(thread.id):
+ if not await self.thread_exists_in_site(thread.id):
raise commands.BadArgument("This thread is not in the bump list.")
- await self.threads_to_bump.delete(thread.id)
+ await self.bot.api_client.delete(f"{THREAD_BUMP_ENDPOINT}/{thread.id}")
await ctx.send(f":ok_hand: {thread.mention} has been removed from the bump list.")
@thread_bump_group.command(name="list", aliases=("get",))
async def list_all_threads_in_bump_list(self, ctx: commands.Context) -> None:
"""List all the threads in the bump list."""
- await self.init_task
-
- lines = [f"<#{k}>" for k, _ in await self.threads_to_bump.items()]
- embed = disnake.Embed(
+ lines = [f"<#{thread_id}>" for thread_id in await self.bot.api_client.get(THREAD_BUMP_ENDPOINT)]
+ embed = discord.Embed(
title="Threads in the bump list",
colour=constants.Colours.blue
)
- await LinePaginator.paginate(lines, ctx, embed)
+ await LinePaginator.paginate(lines, ctx, embed, max_lines=10)
@commands.Cog.listener()
- async def on_thread_update(self, _: disnake.Thread, after: disnake.Thread) -> None:
+ async def on_thread_update(self, _: discord.Thread, after: discord.Thread) -> None:
"""
Listen for thread updates and check if the thread has been archived.
If the thread has been archived, and is in the bump list, un-archive it.
"""
- await self.init_task
-
if not after.archived:
return
- if await self.threads_to_bump.contains(after.id):
+ if await self.thread_exists_in_site(after.id):
await self.unarchive_threads_not_manually_archived([after])
async def cog_check(self, ctx: commands.Context) -> bool:
@@ -142,6 +153,6 @@ class ThreadBumper(commands.Cog):
).predicate(ctx)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the ThreadBumper cog."""
- bot.add_cog(ThreadBumper(bot))
+ await bot.add_cog(ThreadBumper(bot))
diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py
index 77be3315c..67dd27728 100644
--- a/bot/exts/utils/utils.py
+++ b/bot/exts/utils/utils.py
@@ -3,14 +3,14 @@ import re
import unicodedata
from typing import Tuple, Union
-from disnake import Colour, Embed, utils
-from disnake.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role
-from disnake.utils import snowflake_time
+from discord import Colour, Embed, utils
+from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role
+from discord.utils import snowflake_time
from bot.bot import Bot
from bot.constants import Channels, MODERATION_ROLES, Roles, STAFF_PARTNERS_COMMUNITY_ROLES
from bot.converters import Snowflake
-from bot.decorators import in_whitelist
+from bot.decorators import in_whitelist, not_in_blacklist
from bot.log import get_logger
from bot.pagination import LinePaginator
from bot.utils import messages, time
@@ -48,7 +48,7 @@ class Utils(Cog):
self.bot = bot
@command()
- @in_whitelist(channels=(Channels.bot_commands, Channels.discord_bots), roles=STAFF_PARTNERS_COMMUNITY_ROLES)
+ @not_in_blacklist(channels=(Channels.python_general,), override_roles=STAFF_PARTNERS_COMMUNITY_ROLES)
async def charinfo(self, ctx: Context, *, characters: str) -> None:
"""Shows you information on up to 50 unicode characters."""
match = re.match(r"<(a?):(\w+):(\d+)>", characters)
@@ -206,6 +206,6 @@ class Utils(Cog):
await message.add_reaction(reaction)
-def setup(bot: Bot) -> None:
+async def setup(bot: Bot) -> None:
"""Load the Utils cog."""
- bot.add_cog(Utils(bot))
+ await bot.add_cog(Utils(bot))
diff --git a/bot/log.py b/bot/log.py
index 0b1d1aca6..100cd06f6 100644
--- a/bot/log.py
+++ b/bot/log.py
@@ -74,7 +74,7 @@ def setup() -> None:
coloredlogs.install(level=TRACE_LEVEL, logger=root_log, stream=sys.stdout)
root_log.setLevel(logging.DEBUG if constants.DEBUG_MODE else logging.INFO)
- get_logger("disnake").setLevel(logging.WARNING)
+ get_logger("discord").setLevel(logging.WARNING)
get_logger("websockets").setLevel(logging.WARNING)
get_logger("chardet").setLevel(logging.WARNING)
get_logger("async_rediscache").setLevel(logging.WARNING)
diff --git a/bot/pagination.py b/bot/pagination.py
index 1a014daa1..8f4353eb1 100644
--- a/bot/pagination.py
+++ b/bot/pagination.py
@@ -3,9 +3,9 @@ import typing as t
from contextlib import suppress
from functools import partial
-import disnake
-from disnake.abc import User
-from disnake.ext.commands import Context, Paginator
+import discord
+from discord.abc import User
+from discord.ext.commands import Context, Paginator
from bot import constants
from bot.log import get_logger
@@ -55,7 +55,7 @@ class LinePaginator(Paginator):
linesep: str = "\n"
) -> None:
"""
- This function overrides the Paginator.__init__ from inside disnake.ext.commands.
+ This function overrides the Paginator.__init__ from inside discord.ext.commands.
It overrides in order to allow us to configure the maximum number of lines per page.
"""
@@ -99,7 +99,7 @@ class LinePaginator(Paginator):
effort to avoid breaking up single lines across pages, while keeping the total length of the
page at a reasonable size.
- This function overrides the `Paginator.add_line` from inside `disnake.ext.commands`.
+ This function overrides the `Paginator.add_line` from inside `discord.ext.commands`.
It overrides in order to allow us to configure the maximum number of lines per page.
"""
@@ -192,7 +192,7 @@ class LinePaginator(Paginator):
cls,
lines: t.List[str],
ctx: Context,
- embed: disnake.Embed,
+ embed: discord.Embed,
prefix: str = "",
suffix: str = "",
max_lines: t.Optional[int] = None,
@@ -204,7 +204,7 @@ class LinePaginator(Paginator):
footer_text: str = None,
url: str = None,
exception_on_empty_embed: bool = False,
- ) -> t.Optional[disnake.Message]:
+ ) -> t.Optional[discord.Message]:
"""
Use a paginator and set of reactions to provide pagination over a set of lines.
@@ -219,7 +219,7 @@ class LinePaginator(Paginator):
to any user with a moderation role.
Example:
- >>> embed = disnake.Embed()
+ >>> embed = discord.Embed()
>>> embed.set_author(name="Some Operation", url=url, icon_url=icon)
>>> await LinePaginator.paginate([line for line in lines], ctx, embed)
"""
@@ -367,5 +367,5 @@ class LinePaginator(Paginator):
await message.edit(embed=embed)
log.debug("Ending pagination and clearing reactions.")
- with suppress(disnake.NotFound):
+ with suppress(discord.NotFound):
await message.clear_reactions()
diff --git a/bot/resources/media/print-return.gif b/bot/resources/media/print-return.gif
new file mode 100644
index 000000000..5d99329dc
--- /dev/null
+++ b/bot/resources/media/print-return.gif
Binary files differ
diff --git a/bot/resources/tags/dashmpip.md b/bot/resources/tags/dashmpip.md
new file mode 100644
index 000000000..0dd866aeb
--- /dev/null
+++ b/bot/resources/tags/dashmpip.md
@@ -0,0 +1,12 @@
+---
+aliases: ["minusmpip"]
+embed:
+ title: "Install packages with `python -m pip`"
+---
+When trying to install a package via `pip`, it's recommended to invoke pip as a module: `python -m pip install your_package`.
+
+**Why would we use `python -m pip` instead of `pip`?**
+Invoking pip as a module ensures you know *which* pip you're using. This is helpful if you have multiple Python versions. You always know which Python version you're installing packages to.
+
+**Note**
+The exact `python` command you invoke can vary. It may be `python3` or `py`, ensure it's correct for your system.
diff --git a/bot/resources/tags/dictcomps.md b/bot/resources/tags/dictcomps.md
index 6c8018761..75fbe0f8a 100644
--- a/bot/resources/tags/dictcomps.md
+++ b/bot/resources/tags/dictcomps.md
@@ -11,4 +11,4 @@ One can use a dict comp to change an existing dictionary using its `items` metho
>>> {key.upper(): value * 2 for key, value in first_dict.items()}
{'I': 2, 'LOVE': 8, 'PYTHON': 12}
```
-For more information and examples, check out [PEP 274](https://www.python.org/dev/peps/pep-0274/)
+For more information and examples, check out [PEP 274](https://peps.python.org/pep-0274/)
diff --git a/bot/resources/tags/docstring.md b/bot/resources/tags/docstring.md
index 20043131e..6e9d9aa09 100644
--- a/bot/resources/tags/docstring.md
+++ b/bot/resources/tags/docstring.md
@@ -15,4 +15,4 @@ You can get the docstring by using the [`inspect.getdoc`](https://docs.python.or
For the last example, you can print it by doing this: `print(inspect.getdoc(greet))`.
-For more details about what a docstring is and its usage, check out this guide by [Real Python](https://realpython.com/documenting-python-code/#docstrings-background), or the [official docstring specification](https://www.python.org/dev/peps/pep-0257/#what-is-a-docstring).
+For more details about what a docstring is and its usage, check out this guide by [Real Python](https://realpython.com/documenting-python-code/#docstrings-background), or the [official docstring specification](https://peps.python.org/pep-0257/#what-is-a-docstring).
diff --git a/bot/resources/tags/enumerate.md b/bot/resources/tags/enumerate.md
index dd984af52..da9c86a36 100644
--- a/bot/resources/tags/enumerate.md
+++ b/bot/resources/tags/enumerate.md
@@ -10,4 +10,4 @@ into beautiful, _pythonic_ code:
for index, item in enumerate(my_list):
print(f"{index}: {item}")
```
-For more information, check out [the official docs](https://docs.python.org/3/library/functions.html#enumerate), or [PEP 279](https://www.python.org/dev/peps/pep-0279/).
+For more information, check out [the official docs](https://docs.python.org/3/library/functions.html#enumerate), or [PEP 279](https://peps.python.org/pep-0279/).
diff --git a/bot/resources/tags/f-strings.md b/bot/resources/tags/f-strings.md
index 5ccafe723..ab6ec75c9 100644
--- a/bot/resources/tags/f-strings.md
+++ b/bot/resources/tags/f-strings.md
@@ -1,3 +1,6 @@
+---
+aliases: ["fstrings", "fstring", "f-string"]
+---
Creating a Python string with your variables using the `+` operator can be difficult to write and read. F-strings (*format-strings*) make it easy to insert values into a string. If you put an `f` in front of the first quote, you can then put Python expressions between curly braces in the string.
```py
diff --git a/bot/resources/tags/indent.md b/bot/resources/tags/indent.md
index dec8407b0..4c3cdd126 100644
--- a/bot/resources/tags/indent.md
+++ b/bot/resources/tags/indent.md
@@ -16,9 +16,9 @@ The first line is not indented. The next two lines are indented to be inside of
**Indentation is used after:**
**1.** [Compound statements](https://docs.python.org/3/reference/compound_stmts.html) (eg. `if`, `while`, `for`, `try`, `with`, `def`, `class`, and their counterparts)
-**2.** [Continuation lines](https://www.python.org/dev/peps/pep-0008/#indentation)
+**2.** [Continuation lines](https://peps.python.org/pep-0008/#indentation)
**More Info**
-**1.** [Indentation style guide](https://www.python.org/dev/peps/pep-0008/#indentation)
-**2.** [Tabs or Spaces?](https://www.python.org/dev/peps/pep-0008/#tabs-or-spaces)
+**1.** [Indentation style guide](https://peps.python.org/pep-0008/#indentation)
+**2.** [Tabs or Spaces?](https://peps.python.org/pep-0008/#tabs-or-spaces)
**3.** [Official docs on indentation](https://docs.python.org/3/reference/lexical_analysis.html#indentation)
diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md
index 464caf0ba..aa49d59ae 100644
--- a/bot/resources/tags/intents.md
+++ b/bot/resources/tags/intents.md
@@ -1,6 +1,6 @@
**Using intents in discord.py**
-Intents are a feature of Discord that tells the gateway exactly which events to send your bot. By default, discord.py has all intents enabled, except for the `Members` and `Presences` intents, which are needed for events such as `on_member` and to get members' statuses.
+Intents are a feature of Discord that tells the gateway exactly which events to send your bot. By default discord.py has all intents enabled except for `Members`, `Message Content`, and `Presences`. These are needed for features such as `on_member` events, to get access to message content, and to get members' statuses.
To enable one of these intents, you need to first go to the [Discord developer portal](https://discord.com/developers/applications), then to the bot page of your bot's application. Scroll down to the `Privileged Gateway Intents` section, then enable the intents that you need.
diff --git a/bot/resources/tags/or-gotcha.md b/bot/resources/tags/or-gotcha.md
index d75a73d78..25ade8620 100644
--- a/bot/resources/tags/or-gotcha.md
+++ b/bot/resources/tags/or-gotcha.md
@@ -1,5 +1,6 @@
When checking if something is equal to one thing or another, you might think that this is possible:
```py
+# Incorrect...
if favorite_fruit == 'grapefruit' or 'lemon':
print("That's a weird favorite fruit to have.")
```
diff --git a/bot/resources/tags/paste.md b/bot/resources/tags/paste.md
index 8c3c2985d..d2d54d48e 100644
--- a/bot/resources/tags/paste.md
+++ b/bot/resources/tags/paste.md
@@ -1,6 +1,6 @@
**Pasting large amounts of code**
-If your code is too long to fit in a codeblock in discord, you can paste your code here:
+If your code is too long to fit in a codeblock in Discord, you can paste your code here:
https://paste.pythondiscord.com/
After pasting your code, **save** it by clicking the floppy disk icon in the top right, or by typing `ctrl + S`. After doing that, the URL should **change**. Copy the URL and post it here so others can see it.
diff --git a/bot/resources/tags/pathlib.md b/bot/resources/tags/pathlib.md
index dfeb7ecac..24ca895d8 100644
--- a/bot/resources/tags/pathlib.md
+++ b/bot/resources/tags/pathlib.md
@@ -18,4 +18,4 @@ Python 3 comes with a new module named `Pathlib`. Since Python 3.6, `pathlib.Pat
• [**Why you should use pathlib** - Trey Hunner](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/)
• [**Answering concerns about pathlib** - Trey Hunner](https://treyhunner.com/2019/01/no-really-pathlib-is-great/)
• [**Official Documentation**](https://docs.python.org/3/library/pathlib.html)
-• [**PEP 519** - Adding a file system path protocol](https://www.python.org/dev/peps/pep-0519/)
+• [**PEP 519** - Adding a file system path protocol](https://peps.python.org/pep-0519/)
diff --git a/bot/resources/tags/pep8.md b/bot/resources/tags/pep8.md
index 57b176122..a2510d697 100644
--- a/bot/resources/tags/pep8.md
+++ b/bot/resources/tags/pep8.md
@@ -1,5 +1,5 @@
**PEP 8** is the official style guide for Python. It includes comprehensive guidelines for code formatting, variable naming, and making your code easy to read. Professional Python developers are usually required to follow the guidelines, and will often use code-linters like flake8 to verify that the code they're writing complies with the style guide.
More information:
-• [PEP 8 document](https://www.python.org/dev/peps/pep-0008)
+• [PEP 8 document](https://peps.python.org/pep-0008/)
• [Our PEP 8 song!](https://www.youtube.com/watch?v=hgI0p1zf31k) :notes:
diff --git a/bot/resources/tags/positional-keyword.md b/bot/resources/tags/positional-keyword.md
index dd6ddfc4b..d6b4e0cd4 100644
--- a/bot/resources/tags/positional-keyword.md
+++ b/bot/resources/tags/positional-keyword.md
@@ -19,7 +19,7 @@ def sum(a, b=1):
sum(1, b=5)
sum(1, 5) # same as above
```
-[Somtimes this is forced](https://www.python.org/dev/peps/pep-0570/#history-of-positional-only-parameter-semantics-in-python), in the case of the `pow()` function.
+[Somtimes this is forced](https://peps.python.org/pep-0570/#history-of-positional-only-parameter-semantics-in-python), in the case of the `pow()` function.
The reverse is also true:
```py
@@ -33,6 +33,6 @@ The reverse is also true:
```
**More info**
-• [Keyword only arguments](https://www.python.org/dev/peps/pep-3102/)
-• [Positional only arguments](https://www.python.org/dev/peps/pep-0570/)
+• [Keyword only arguments](https://peps.python.org/pep-3102/)
+• [Positional only arguments](https://peps.python.org/pep-0570/)
• `!tags param-arg` (Parameters vs. Arguments)
diff --git a/bot/resources/tags/print-return.md b/bot/resources/tags/print-return.md
new file mode 100644
index 000000000..89d37053f
--- /dev/null
+++ b/bot/resources/tags/print-return.md
@@ -0,0 +1,9 @@
+---
+embed:
+ title: Print and Return
+ image:
+ url: https://raw.githubusercontent.com/python-discord/bot/main/bot/resources/media/print-return.gif
+---
+Here's a handy animation demonstrating how `print` and `return` differ in behavior.
+
+See also: `!tags return`
diff --git a/bot/resources/tags/quotes.md b/bot/resources/tags/quotes.md
index 8421748a1..99ce93f61 100644
--- a/bot/resources/tags/quotes.md
+++ b/bot/resources/tags/quotes.md
@@ -16,5 +16,5 @@ Example:
If you need both single and double quotes inside your string, use the version that would result in the least amount of escapes. In the case of a tie, use the quotation you use the most.
**References:**
-• [pep-8 on quotes](https://www.python.org/dev/peps/pep-0008/#string-quotes)
-• [convention for triple quoted strings](https://www.python.org/dev/peps/pep-0257/)
+• [pep-8 on quotes](https://peps.python.org/pep-0008/#string-quotes)
+• [convention for triple quoted strings](https://peps.python.org/pep-0257/)
diff --git a/bot/resources/tags/regex.md b/bot/resources/tags/regex.md
index 35fee45a9..ae7960b37 100644
--- a/bot/resources/tags/regex.md
+++ b/bot/resources/tags/regex.md
@@ -5,9 +5,9 @@ Regular expressions (regex) are a tool for finding patterns in strings. The stan
We can use regex to pull out all the numbers in a sentence:
```py
>>> import re
->>> x = "On Oct 18 1963 a cat was launched aboard rocket #47"
+>>> text = "On Oct 18 1963 a cat was launched aboard rocket #47"
>>> regex_pattern = r"[0-9]{1,3}" # Matches 1-3 digits
->>> re.findall(regex_pattern, foo)
+>>> re.findall(regex_pattern, text)
['18', '196', '3', '47'] # Notice the year is cut off
```
**See Also**
diff --git a/bot/resources/tags/resources.md b/bot/resources/tags/resources.md
deleted file mode 100644
index 201e0eb1e..000000000
--- a/bot/resources/tags/resources.md
+++ /dev/null
@@ -1,6 +0,0 @@
----
-embed:
- title: "Resources"
----
-
-The [Resources page](https://www.pythondiscord.com/resources/) on our website contains a list of hand-selected learning resources that we regularly recommend to both beginners and experts.
diff --git a/bot/resources/tags/sql-fstring.md b/bot/resources/tags/sql-fstring.md
index 94dd870fd..538a0aa87 100644
--- a/bot/resources/tags/sql-fstring.md
+++ b/bot/resources/tags/sql-fstring.md
@@ -13,4 +13,4 @@ Note: Different database libraries support different placeholder styles, e.g. `%
**See Also**
• [Extended Example with SQLite](https://docs.python.org/3/library/sqlite3.html) (search for "Instead, use the DB-API's parameter substitution")
-• [PEP-249](https://www.python.org/dev/peps/pep-0249) - A specification of how database libraries in Python should work
+• [PEP-249](https://peps.python.org/pep-0249/) - A specification of how database libraries in Python should work
diff --git a/bot/resources/tags/star-imports.md b/bot/resources/tags/star-imports.md
index 3b1b6a858..6e20e2b09 100644
--- a/bot/resources/tags/star-imports.md
+++ b/bot/resources/tags/star-imports.md
@@ -36,4 +36,4 @@ Conclusion: Namespaces are one honking great idea -- let's do more of those! *[3
**[1]** If the module defines the variable `__all__`, the names defined in `__all__` will get imported by the wildcard import, otherwise all the names in the module get imported (except for names with a leading underscore)
**[2]** [Namespaces and scopes](https://www.programiz.com/python-programming/namespace)
-**[3]** [Zen of Python](https://www.python.org/dev/peps/pep-0020/)
+**[3]** [Zen of Python](https://peps.python.org/pep-0020/)
diff --git a/bot/resources/tags/type-hint.md b/bot/resources/tags/type-hint.md
new file mode 100644
index 000000000..f4a12f125
--- /dev/null
+++ b/bot/resources/tags/type-hint.md
@@ -0,0 +1,19 @@
+**Type Hints**
+
+A type hint indicates what type a variable is expected to be.
+```python
+def add(a: int, b: int) -> int:
+ return a + b
+```
+The type hints indicate that for our `add` function the parameters `a` and `b` should be integers, and the function should return an integer when called.
+
+It's important to note these are just hints and are not enforced at runtime.
+
+```python
+add("hello ", "world")
+```
+The above code won't error even though it doesn't follow the function's type hints; the two strings will be concatenated as normal.
+
+Third party tools like [mypy](https://mypy.readthedocs.io/en/stable/introduction.html) can validate your code to ensure it is type hinted correctly. This can help you identify potentially buggy code, for example it would error on the second example as our `add` function is not intended to concatenate strings.
+
+[mypy's documentation](https://mypy.readthedocs.io/en/stable/builtin_types.html) contains useful information on type hinting, and for more information check out [this documentation page](https://typing.readthedocs.io/en/latest/index.html).
diff --git a/bot/resources/tags/with.md b/bot/resources/tags/with.md
index 62d5612f2..83f160b4f 100644
--- a/bot/resources/tags/with.md
+++ b/bot/resources/tags/with.md
@@ -5,4 +5,4 @@ with open("test.txt", "r") as file:
```
The above code automatically closes `file` when the `with` block exits, so you never have to manually do a `file.close()`. Most connection types, including file readers and database connections, support this.
-For more information, read [the official docs](https://docs.python.org/3/reference/compound_stmts.html#with), watch [Corey Schafer\'s context manager video](https://www.youtube.com/watch?v=-aKFBoZpiqA), or see [PEP 343](https://www.python.org/dev/peps/pep-0343/).
+For more information, read [the official docs](https://docs.python.org/3/reference/compound_stmts.html#with), watch [Corey Schafer\'s context manager video](https://www.youtube.com/watch?v=-aKFBoZpiqA), or see [PEP 343](https://peps.python.org/pep-0343/).
diff --git a/bot/rules/attachments.py b/bot/rules/attachments.py
index 9c890e569..8903c385c 100644
--- a/bot/rules/attachments.py
+++ b/bot/rules/attachments.py
@@ -1,6 +1,6 @@
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
async def apply(
diff --git a/bot/rules/burst.py b/bot/rules/burst.py
index a943cfdeb..25c5a2f33 100644
--- a/bot/rules/burst.py
+++ b/bot/rules/burst.py
@@ -1,6 +1,6 @@
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
async def apply(
diff --git a/bot/rules/burst_shared.py b/bot/rules/burst_shared.py
index dee857e18..bbe9271b3 100644
--- a/bot/rules/burst_shared.py
+++ b/bot/rules/burst_shared.py
@@ -1,6 +1,6 @@
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
async def apply(
diff --git a/bot/rules/chars.py b/bot/rules/chars.py
index 6d2f6eb83..1f587422c 100644
--- a/bot/rules/chars.py
+++ b/bot/rules/chars.py
@@ -1,6 +1,6 @@
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
async def apply(
diff --git a/bot/rules/discord_emojis.py b/bot/rules/discord_emojis.py
index 4fe4e88f9..d979ac5e7 100644
--- a/bot/rules/discord_emojis.py
+++ b/bot/rules/discord_emojis.py
@@ -1,7 +1,7 @@
import re
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
from emoji import demojize
DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>|:\w+:")
diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py
index 77e393db0..8e4fbc12d 100644
--- a/bot/rules/duplicates.py
+++ b/bot/rules/duplicates.py
@@ -1,6 +1,6 @@
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
async def apply(
diff --git a/bot/rules/links.py b/bot/rules/links.py
index 92c13b3f4..c46b783c5 100644
--- a/bot/rules/links.py
+++ b/bot/rules/links.py
@@ -1,7 +1,7 @@
import re
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
LINK_RE = re.compile(r"(https?://[^\s]+)")
diff --git a/bot/rules/mentions.py b/bot/rules/mentions.py
index 7ee66be31..ca1d0c01c 100644
--- a/bot/rules/mentions.py
+++ b/bot/rules/mentions.py
@@ -1,23 +1,65 @@
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import DeletedReferencedMessage, Member, Message, MessageType, NotFound
+
+import bot
+from bot.log import get_logger
+
+log = get_logger(__name__)
async def apply(
last_message: Message, recent_messages: List[Message], config: Dict[str, int]
) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """Detects total mentions exceeding the limit sent by a single user."""
+ """
+ Detects total mentions exceeding the limit sent by a single user.
+
+ Excludes mentions that are bots, themselves, or replied users.
+
+ In very rare cases, may not be able to determine a
+ mention was to a reply, in which case it is not ignored.
+ """
relevant_messages = tuple(
msg
for msg in recent_messages
if msg.author == last_message.author
)
+ # We use `msg.mentions` here as that is supplied by the api itself, to determine who was mentioned.
+ # Additionally, `msg.mentions` includes the user replied to, even if the mention doesn't occur in the body.
+ # In order to exclude users who are mentioned as a reply, we check if the msg has a reference
+ #
+ # While we could use regex to parse the message content, and get a list of
+ # the mentions, that solution is very prone to breaking.
+ # We would need to deal with codeblocks, escaping markdown, and any discrepancies between
+ # our implementation and discord's markdown parser which would cause false positives or false negatives.
+ total_recent_mentions = 0
+ for msg in relevant_messages:
+ # We check if the message is a reply, and if it is try to get the author
+ # since we ignore mentions of a user that we're replying to
+ reply_author = None
- total_recent_mentions = sum(
- not user.bot
- for msg in relevant_messages
- for user in msg.mentions
- )
+ if msg.type == MessageType.reply:
+ ref = msg.reference
+
+ if not (resolved := ref.resolved):
+ # It is possible, in a very unusual situation, for a message to have a reference
+ # that is both not in the cache, and deleted while running this function.
+ # In such a situation, this will throw an error which we catch.
+ try:
+ resolved = await bot.instance.get_partial_messageable(resolved.channel_id).fetch_message(
+ resolved.message_id
+ )
+ except NotFound:
+ log.info('Could not fetch the reference message as it has been deleted.')
+
+ if resolved and not isinstance(resolved, DeletedReferencedMessage):
+ reply_author = resolved.author
+
+ for user in msg.mentions:
+ # Don't count bot or self mentions, or the user being replied to (if applicable)
+ if user.bot or user in {msg.author, reply_author}:
+ continue
+ total_recent_mentions += 1
if total_recent_mentions > config['max']:
return (
diff --git a/bot/rules/newlines.py b/bot/rules/newlines.py
index 45266648e..4e66e1359 100644
--- a/bot/rules/newlines.py
+++ b/bot/rules/newlines.py
@@ -1,7 +1,7 @@
import re
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
async def apply(
diff --git a/bot/rules/role_mentions.py b/bot/rules/role_mentions.py
index 1f7a6a74d..0649540b6 100644
--- a/bot/rules/role_mentions.py
+++ b/bot/rules/role_mentions.py
@@ -1,6 +1,6 @@
from typing import Dict, Iterable, List, Optional, Tuple
-from disnake import Member, Message
+from discord import Member, Message
async def apply(
diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py
index 13533a467..567821126 100644
--- a/bot/utils/__init__.py
+++ b/bot/utils/__init__.py
@@ -1,4 +1,12 @@
from bot.utils.helpers import CogABCMeta, find_nth_occurrence, has_lines, pad_base64
-from bot.utils.services import send_to_paste_service
+from bot.utils.services import PasteTooLongError, PasteUploadError, send_to_paste_service
-__all__ = ['CogABCMeta', 'find_nth_occurrence', 'has_lines', 'pad_base64', 'send_to_paste_service']
+__all__ = [
+ 'CogABCMeta',
+ 'find_nth_occurrence',
+ 'has_lines',
+ 'pad_base64',
+ 'send_to_paste_service',
+ 'PasteUploadError',
+ 'PasteTooLongError',
+]
diff --git a/bot/utils/channel.py b/bot/utils/channel.py
index ee0c87311..954a10e56 100644
--- a/bot/utils/channel.py
+++ b/bot/utils/channel.py
@@ -1,6 +1,6 @@
from typing import Union
-import disnake
+import discord
import bot
from bot import constants
@@ -10,7 +10,7 @@ from bot.log import get_logger
log = get_logger(__name__)
-def is_help_channel(channel: disnake.TextChannel) -> bool:
+def is_help_channel(channel: discord.TextChannel) -> bool:
"""Return True if `channel` is in one of the help categories (excluding dormant)."""
log.trace(f"Checking if #{channel} is a help channel.")
categories = (Categories.help_available, Categories.help_in_use)
@@ -18,9 +18,9 @@ def is_help_channel(channel: disnake.TextChannel) -> bool:
return any(is_in_category(channel, category) for category in categories)
-def is_mod_channel(channel: Union[disnake.TextChannel, disnake.Thread]) -> bool:
+def is_mod_channel(channel: Union[discord.TextChannel, discord.Thread]) -> bool:
"""True if channel, or channel.parent for threads, is considered a mod channel."""
- if isinstance(channel, disnake.Thread):
+ if isinstance(channel, discord.Thread):
channel = channel.parent
if channel.id in constants.MODERATION_CHANNELS:
@@ -36,11 +36,11 @@ def is_mod_channel(channel: Union[disnake.TextChannel, disnake.Thread]) -> bool:
return False
-def is_staff_channel(channel: disnake.TextChannel) -> bool:
+def is_staff_channel(channel: discord.TextChannel) -> bool:
"""True if `channel` is considered a staff channel."""
guild = bot.instance.get_guild(constants.Guild.id)
- if channel.type is disnake.ChannelType.category:
+ if channel.type is discord.ChannelType.category:
return False
# Channel is staff-only if staff have explicit read allow perms
@@ -52,12 +52,12 @@ def is_staff_channel(channel: disnake.TextChannel) -> bool:
)
-def is_in_category(channel: disnake.TextChannel, category_id: int) -> bool:
+def is_in_category(channel: discord.TextChannel, category_id: int) -> bool:
"""Return True if `channel` is within a category with `category_id`."""
return getattr(channel, "category_id", None) == category_id
-async def get_or_fetch_channel(channel_id: int) -> disnake.abc.GuildChannel:
+async def get_or_fetch_channel(channel_id: int) -> discord.abc.GuildChannel:
"""Attempt to get or fetch a channel and return it."""
log.trace(f"Getting the channel {channel_id}.")
diff --git a/bot/utils/checks.py b/bot/utils/checks.py
index 9aa9bdc14..188285684 100644
--- a/bot/utils/checks.py
+++ b/bot/utils/checks.py
@@ -1,6 +1,6 @@
from typing import Callable, Container, Iterable, Optional, Union
-from disnake.ext.commands import (
+from discord.ext.commands import (
BucketType, CheckFailure, Cog, Command, CommandOnCooldown, Context, Cooldown, CooldownMapping, NoPrivateMessage,
has_any_role
)
@@ -135,7 +135,7 @@ def cooldown_with_role_bypass(rate: int, per: float, type: BucketType = BucketTy
if any(role.id in bypass for role in ctx.author.roles):
return
- # cooldown logic, taken from disnake's internals
+ # cooldown logic, taken from discord.py internals
current = ctx.message.created_at.timestamp()
bucket = buckets.get_bucket(ctx.message)
retry_after = bucket.update_rate_limit(current)
diff --git a/bot/utils/extensions.py b/bot/utils/extensions.py
deleted file mode 100644
index 50350ea8d..000000000
--- a/bot/utils/extensions.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import importlib
-import inspect
-import pkgutil
-from typing import Iterator, NoReturn
-
-from bot import exts
-
-
-def unqualify(name: str) -> str:
- """Return an unqualified name given a qualified module/package `name`."""
- return name.rsplit(".", maxsplit=1)[-1]
-
-
-def walk_extensions() -> Iterator[str]:
- """Yield extension names from the bot.exts subpackage."""
-
- def on_error(name: str) -> NoReturn:
- raise ImportError(name=name) # pragma: no cover
-
- for module in pkgutil.walk_packages(exts.__path__, f"{exts.__name__}.", onerror=on_error):
- if unqualify(module.name).startswith("_"):
- # Ignore module/package names starting with an underscore.
- continue
-
- if module.ispkg:
- imported = importlib.import_module(module.name)
- if not inspect.isfunction(getattr(imported, "setup", None)):
- # If it lacks a setup function, it's not an extension.
- continue
-
- yield module.name
-
-
-EXTENSIONS = frozenset(walk_extensions())
diff --git a/bot/utils/function.py b/bot/utils/function.py
index bb6d8afe3..55115d7d3 100644
--- a/bot/utils/function.py
+++ b/bot/utils/function.py
@@ -94,7 +94,7 @@ def update_wrapper_globals(
"""
Update globals of `wrapper` with the globals from `wrapped`.
- For forwardrefs in command annotations disnake uses the __global__ attribute of the function
+ For forwardrefs in command annotations discordpy uses the __global__ attribute of the function
to resolve their values, with decorators that replace the function this breaks because they have
their own globals.
@@ -103,7 +103,7 @@ def update_wrapper_globals(
An exception will be raised in case `wrapper` and `wrapped` share a global name that is used by
`wrapped`'s typehints and is not in `ignored_conflict_names`,
- as this can cause incorrect objects being used by disnake's converters.
+ as this can cause incorrect objects being used by discordpy's converters.
"""
annotation_global_names = (
ann.split(".", maxsplit=1)[0] for ann in wrapped.__annotations__.values() if isinstance(ann, str)
@@ -136,7 +136,7 @@ def command_wraps(
*,
ignored_conflict_names: t.Set[str] = frozenset(),
) -> t.Callable[[types.FunctionType], types.FunctionType]:
- """Update the decorated function to look like `wrapped` and update globals for disnake forwardref evaluation."""
+ """Update the decorated function to look like `wrapped` and update globals for discordpy forwardref evaluation."""
def decorator(wrapper: types.FunctionType) -> types.FunctionType:
return functools.update_wrapper(
update_wrapper_globals(wrapper, wrapped, ignored_conflict_names=ignored_conflict_names),
diff --git a/bot/utils/helpers.py b/bot/utils/helpers.py
index 859f53fdb..3501a3933 100644
--- a/bot/utils/helpers.py
+++ b/bot/utils/helpers.py
@@ -1,7 +1,7 @@
from abc import ABCMeta
from typing import Optional
-from disnake.ext.commands import CogMeta
+from discord.ext.commands import CogMeta
class CogABCMeta(CogMeta, ABCMeta):
diff --git a/bot/utils/members.py b/bot/utils/members.py
index d46baae5b..693286045 100644
--- a/bot/utils/members.py
+++ b/bot/utils/members.py
@@ -1,13 +1,13 @@
import typing as t
-import disnake
+import discord
from bot.log import get_logger
log = get_logger(__name__)
-async def get_or_fetch_member(guild: disnake.Guild, member_id: int) -> t.Optional[disnake.Member]:
+async def get_or_fetch_member(guild: discord.Guild, member_id: int) -> t.Optional[discord.Member]:
"""
Attempt to get a member from cache; on failure fetch from the API.
@@ -18,7 +18,7 @@ async def get_or_fetch_member(guild: disnake.Guild, member_id: int) -> t.Optiona
else:
try:
member = await guild.fetch_member(member_id)
- except disnake.errors.NotFound:
+ except discord.errors.NotFound:
log.trace("Failed to fetch %d from API.", member_id)
return None
log.trace("%s fetched from API.", member)
@@ -26,23 +26,23 @@ async def get_or_fetch_member(guild: disnake.Guild, member_id: int) -> t.Optiona
async def handle_role_change(
- member: disnake.Member,
+ member: discord.Member,
coro: t.Callable[..., t.Coroutine],
- role: disnake.Role
+ role: discord.Role
) -> None:
"""
Change `member`'s cooldown role via awaiting `coro` and handle errors.
- `coro` is intended to be `disnake.Member.add_roles` or `disnake.Member.remove_roles`.
+ `coro` is intended to be `discord.Member.add_roles` or `discord.Member.remove_roles`.
"""
try:
await coro(role)
- except disnake.NotFound:
+ except discord.NotFound:
log.debug(f"Failed to change role for {member} ({member.id}): member not found")
- except disnake.Forbidden:
+ except discord.Forbidden:
log.debug(
f"Forbidden to change role for {member} ({member.id}); "
f"possibly due to role hierarchy"
)
- except disnake.HTTPException as e:
+ except discord.HTTPException as e:
log.error(f"Failed to change role for {member} ({member.id}): {e.status} {e.code}")
diff --git a/bot/utils/message_cache.py b/bot/utils/message_cache.py
index edf2111e9..f68d280c9 100644
--- a/bot/utils/message_cache.py
+++ b/bot/utils/message_cache.py
@@ -1,7 +1,7 @@
import typing as t
from math import ceil
-from disnake import Message
+from discord import Message
class MessageCache:
diff --git a/bot/utils/messages.py b/bot/utils/messages.py
index 0bdb00a29..a5ed84351 100644
--- a/bot/utils/messages.py
+++ b/bot/utils/messages.py
@@ -5,20 +5,20 @@ from functools import partial
from io import BytesIO
from typing import Callable, List, Optional, Sequence, Union
-import disnake
-from disnake.ext.commands import Context
+import discord
+from botcore.utils import scheduling
+from discord.ext.commands import Context
import bot
from bot.constants import Emojis, MODERATION_ROLES, NEGATIVE_REPLIES
from bot.log import get_logger
-from bot.utils import scheduling
log = get_logger(__name__)
def reaction_check(
- reaction: disnake.Reaction,
- user: disnake.abc.User,
+ reaction: discord.Reaction,
+ user: discord.abc.User,
*,
message_id: int,
allowed_emoji: Sequence[str],
@@ -51,14 +51,14 @@ def reaction_check(
log.trace(f"Removing reaction {reaction} by {user} on {reaction.message.id}: disallowed user.")
scheduling.create_task(
reaction.message.remove_reaction(reaction.emoji, user),
- suppressed_exceptions=(disnake.HTTPException,),
+ suppressed_exceptions=(discord.HTTPException,),
name=f"remove_reaction-{reaction}-{reaction.message.id}-{user}"
)
return False
async def wait_for_deletion(
- message: disnake.Message,
+ message: discord.Message,
user_ids: Sequence[int],
deletion_emojis: Sequence[str] = (Emojis.trashcan,),
timeout: float = 60 * 5,
@@ -82,7 +82,7 @@ async def wait_for_deletion(
for emoji in deletion_emojis:
try:
await message.add_reaction(emoji)
- except disnake.NotFound:
+ except discord.NotFound:
log.trace(f"Aborting wait_for_deletion: message {message.id} deleted prematurely.")
return
@@ -101,13 +101,13 @@ async def wait_for_deletion(
await message.clear_reactions()
else:
await message.delete()
- except disnake.NotFound:
+ except discord.NotFound:
log.trace(f"wait_for_deletion: message {message.id} deleted prematurely.")
async def send_attachments(
- message: disnake.Message,
- destination: Union[disnake.TextChannel, disnake.Webhook],
+ message: discord.Message,
+ destination: Union[discord.TextChannel, discord.Webhook],
link_large: bool = True,
use_cached: bool = False,
**kwargs
@@ -140,9 +140,9 @@ async def send_attachments(
if attachment.size <= destination.guild.filesize_limit - 512:
with BytesIO() as file:
await attachment.save(file, use_cached=use_cached)
- attachment_file = disnake.File(file, filename=attachment.filename)
+ attachment_file = discord.File(file, filename=attachment.filename)
- if isinstance(destination, disnake.TextChannel):
+ if isinstance(destination, discord.TextChannel):
msg = await destination.send(file=attachment_file, **kwargs)
urls.append(msg.attachments[0].url)
else:
@@ -151,7 +151,7 @@ async def send_attachments(
large.append(attachment)
else:
log.info(f"{failure_msg} because it's too large.")
- except disnake.HTTPException as e:
+ except discord.HTTPException as e:
if link_large and e.status == 413:
large.append(attachment)
else:
@@ -159,10 +159,10 @@ async def send_attachments(
if link_large and large:
desc = "\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large)
- embed = disnake.Embed(description=desc)
+ embed = discord.Embed(description=desc)
embed.set_footer(text="Attachments exceed upload size limit.")
- if isinstance(destination, disnake.TextChannel):
+ if isinstance(destination, discord.TextChannel):
await destination.send(embed=embed, **kwargs)
else:
await destination.send(embed=embed, **webhook_send_kwargs)
@@ -171,9 +171,9 @@ async def send_attachments(
async def count_unique_users_reaction(
- message: disnake.Message,
- reaction_predicate: Callable[[disnake.Reaction], bool] = lambda _: True,
- user_predicate: Callable[[disnake.User], bool] = lambda _: True,
+ message: discord.Message,
+ reaction_predicate: Callable[[discord.Reaction], bool] = lambda _: True,
+ user_predicate: Callable[[discord.User], bool] = lambda _: True,
count_bots: bool = True
) -> int:
"""
@@ -193,7 +193,7 @@ async def count_unique_users_reaction(
return len(unique_users)
-async def pin_no_system_message(message: disnake.Message) -> bool:
+async def pin_no_system_message(message: discord.Message) -> bool:
"""Pin the given message, wait a couple of seconds and try to delete the system message."""
await message.pin()
@@ -201,7 +201,7 @@ async def pin_no_system_message(message: disnake.Message) -> bool:
await asyncio.sleep(2)
# Search for the system message in the last 10 messages
async for historical_message in message.channel.history(limit=10):
- if historical_message.type == disnake.MessageType.pins_add:
+ if historical_message.type == discord.MessageType.pins_add:
await historical_message.delete()
return True
@@ -225,16 +225,16 @@ def sub_clyde(username: Optional[str]) -> Optional[str]:
return username # Empty string or None
-async def send_denial(ctx: Context, reason: str) -> disnake.Message:
+async def send_denial(ctx: Context, reason: str) -> discord.Message:
"""Send an embed denying the user with the given reason."""
- embed = disnake.Embed()
- embed.colour = disnake.Colour.red()
+ embed = discord.Embed()
+ embed.colour = discord.Colour.red()
embed.title = random.choice(NEGATIVE_REPLIES)
embed.description = reason
return await ctx.send(embed=embed)
-def format_user(user: disnake.abc.User) -> str:
+def format_user(user: discord.abc.User) -> str:
"""Return a string for `user` which has their mention and ID."""
return f"{user.mention} (`{user.id}`)"
diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py
deleted file mode 100644
index 23acacf74..000000000
--- a/bot/utils/scheduling.py
+++ /dev/null
@@ -1,194 +0,0 @@
-import asyncio
-import contextlib
-import inspect
-import typing as t
-from datetime import datetime
-from functools import partial
-
-from arrow import Arrow
-
-from bot.log import get_logger
-
-
-class Scheduler:
- """
- Schedule the execution of coroutines and keep track of them.
-
- When instantiating a Scheduler, a name must be provided. This name is used to distinguish the
- instance's log messages from other instances. Using the name of the class or module containing
- the instance is suggested.
-
- Coroutines can be scheduled immediately with `schedule` or in the future with `schedule_at`
- or `schedule_later`. A unique ID is required to be given in order to keep track of the
- resulting Tasks. Any scheduled task can be cancelled prematurely using `cancel` by providing
- the same ID used to schedule it. The `in` operator is supported for checking if a task with a
- given ID is currently scheduled.
-
- Any exception raised in a scheduled task is logged when the task is done.
- """
-
- def __init__(self, name: str):
- self.name = name
-
- self._log = get_logger(f"{__name__}.{name}")
- self._scheduled_tasks: t.Dict[t.Hashable, asyncio.Task] = {}
-
- def __contains__(self, task_id: t.Hashable) -> bool:
- """Return True if a task with the given `task_id` is currently scheduled."""
- return task_id in self._scheduled_tasks
-
- def schedule(self, task_id: t.Hashable, coroutine: t.Coroutine) -> None:
- """
- Schedule the execution of a `coroutine`.
-
- If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This
- prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.
- """
- self._log.trace(f"Scheduling task #{task_id}...")
-
- msg = f"Cannot schedule an already started coroutine for #{task_id}"
- assert inspect.getcoroutinestate(coroutine) == "CORO_CREATED", msg
-
- if task_id in self._scheduled_tasks:
- self._log.debug(f"Did not schedule task #{task_id}; task was already scheduled.")
- coroutine.close()
- return
-
- task = asyncio.create_task(coroutine, name=f"{self.name}_{task_id}")
- task.add_done_callback(partial(self._task_done_callback, task_id))
-
- self._scheduled_tasks[task_id] = task
- self._log.debug(f"Scheduled task #{task_id} {id(task)}.")
-
- def schedule_at(self, time: t.Union[datetime, Arrow], task_id: t.Hashable, coroutine: t.Coroutine) -> None:
- """
- Schedule `coroutine` to be executed at the given `time`.
-
- If `time` is timezone aware, then use that timezone to calculate now() when subtracting.
- If `time` is naïve, then use UTC.
-
- If `time` is in the past, schedule `coroutine` immediately.
-
- If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This
- prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.
- """
- now_datetime = datetime.now(time.tzinfo) if time.tzinfo else datetime.utcnow()
- delay = (time - now_datetime).total_seconds()
- if delay > 0:
- coroutine = self._await_later(delay, task_id, coroutine)
-
- self.schedule(task_id, coroutine)
-
- def schedule_later(self, delay: t.Union[int, float], task_id: t.Hashable, coroutine: t.Coroutine) -> None:
- """
- Schedule `coroutine` to be executed after the given `delay` number of seconds.
-
- If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This
- prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.
- """
- self.schedule(task_id, self._await_later(delay, task_id, coroutine))
-
- def cancel(self, task_id: t.Hashable) -> None:
- """Unschedule the task identified by `task_id`. Log a warning if the task doesn't exist."""
- self._log.trace(f"Cancelling task #{task_id}...")
-
- try:
- task = self._scheduled_tasks.pop(task_id)
- except KeyError:
- self._log.warning(f"Failed to unschedule {task_id} (no task found).")
- else:
- task.cancel()
-
- self._log.debug(f"Unscheduled task #{task_id} {id(task)}.")
-
- def cancel_all(self) -> None:
- """Unschedule all known tasks."""
- self._log.debug("Unscheduling all tasks")
-
- for task_id in self._scheduled_tasks.copy():
- self.cancel(task_id)
-
- async def _await_later(self, delay: t.Union[int, float], task_id: t.Hashable, coroutine: t.Coroutine) -> None:
- """Await `coroutine` after the given `delay` number of seconds."""
- try:
- self._log.trace(f"Waiting {delay} seconds before awaiting coroutine for #{task_id}.")
- await asyncio.sleep(delay)
-
- # Use asyncio.shield to prevent the coroutine from cancelling itself.
- self._log.trace(f"Done waiting for #{task_id}; now awaiting the coroutine.")
- await asyncio.shield(coroutine)
- finally:
- # Close it to prevent unawaited coroutine warnings,
- # which would happen if the task was cancelled during the sleep.
- # Only close it if it's not been awaited yet. This check is important because the
- # coroutine may cancel this task, which would also trigger the finally block.
- state = inspect.getcoroutinestate(coroutine)
- if state == "CORO_CREATED":
- self._log.debug(f"Explicitly closing the coroutine for #{task_id}.")
- coroutine.close()
- else:
- self._log.debug(f"Finally block reached for #{task_id}; {state=}")
-
- def _task_done_callback(self, task_id: t.Hashable, done_task: asyncio.Task) -> None:
- """
- Delete the task and raise its exception if one exists.
-
- If `done_task` and the task associated with `task_id` are different, then the latter
- will not be deleted. In this case, a new task was likely rescheduled with the same ID.
- """
- self._log.trace(f"Performing done callback for task #{task_id} {id(done_task)}.")
-
- scheduled_task = self._scheduled_tasks.get(task_id)
-
- if scheduled_task and done_task is scheduled_task:
- # A task for the ID exists and is the same as the done task.
- # Since this is the done callback, the task is already done so no need to cancel it.
- self._log.trace(f"Deleting task #{task_id} {id(done_task)}.")
- del self._scheduled_tasks[task_id]
- elif scheduled_task:
- # A new task was likely rescheduled with the same ID.
- self._log.debug(
- f"The scheduled task #{task_id} {id(scheduled_task)} "
- f"and the done task {id(done_task)} differ."
- )
- elif not done_task.cancelled():
- self._log.warning(
- f"Task #{task_id} not found while handling task {id(done_task)}! "
- f"A task somehow got unscheduled improperly (i.e. deleted but not cancelled)."
- )
-
- with contextlib.suppress(asyncio.CancelledError):
- exception = done_task.exception()
- # Log the exception if one exists.
- if exception:
- self._log.error(f"Error in task #{task_id} {id(done_task)}!", exc_info=exception)
-
-
-def create_task(
- coro: t.Awaitable,
- *,
- suppressed_exceptions: tuple[t.Type[Exception]] = (),
- event_loop: t.Optional[asyncio.AbstractEventLoop] = None,
- **kwargs,
-) -> asyncio.Task:
- """
- Wrapper for creating asyncio `Task`s which logs exceptions raised in the task.
-
- If the loop kwarg is provided, the task is created from that event loop, otherwise the running loop is used.
- """
- if event_loop is not None:
- task = event_loop.create_task(coro, **kwargs)
- else:
- task = asyncio.create_task(coro, **kwargs)
- task.add_done_callback(partial(_log_task_exception, suppressed_exceptions=suppressed_exceptions))
- return task
-
-
-def _log_task_exception(task: asyncio.Task, *, suppressed_exceptions: t.Tuple[t.Type[Exception]]) -> None:
- """Retrieve and log the exception raised in `task` if one exists."""
- with contextlib.suppress(asyncio.CancelledError):
- exception = task.exception()
- # Log the exception if one exists.
- if exception and not isinstance(exception, suppressed_exceptions):
- log = get_logger(__name__)
- log.error(f"Error in task {task.get_name()} {id(task)}!", exc_info=exception)
diff --git a/bot/utils/services.py b/bot/utils/services.py
index 439c8d500..a752ac0ec 100644
--- a/bot/utils/services.py
+++ b/bot/utils/services.py
@@ -1,5 +1,3 @@
-from typing import Optional
-
from aiohttp import ClientConnectorError
import bot
@@ -9,18 +7,40 @@ from bot.log import get_logger
log = get_logger(__name__)
FAILED_REQUEST_ATTEMPTS = 3
+MAX_PASTE_LENGTH = 100_000
+
+
+class PasteUploadError(Exception):
+ """Raised when an error is encountered uploading to the paste service."""
+
+
+class PasteTooLongError(Exception):
+ """Raised when content is too large to upload to the paste service."""
-async def send_to_paste_service(contents: str, *, extension: str = "") -> Optional[str]:
+async def send_to_paste_service(contents: str, *, extension: str = "", max_length: int = MAX_PASTE_LENGTH) -> str:
"""
Upload `contents` to the paste service.
- `extension` is added to the output URL
+ Add `extension` to the output URL. Use `max_length` to limit the allowed contents length
+ to lower than the maximum allowed by the paste service.
- When an error occurs, `None` is returned, otherwise the generated URL with the suffix.
+ Raise `ValueError` if `max_length` is greater than the maximum allowed by the paste service.
+ Raise `PasteTooLongError` if `contents` is too long to upload, and `PasteUploadError` if uploading fails.
+
+ Return the generated URL with the extension.
"""
+ if max_length > MAX_PASTE_LENGTH:
+ raise ValueError(f"`max_length` must not be greater than {MAX_PASTE_LENGTH}")
+
extension = extension and f".{extension}"
- log.debug(f"Sending contents of size {len(contents.encode())} bytes to paste service.")
+
+ contents_size = len(contents.encode())
+ if contents_size > max_length:
+ log.info("Contents too large to send to paste service.")
+ raise PasteTooLongError(f"Contents of size {contents_size} greater than maximum size {max_length}")
+
+ log.debug(f"Sending contents of size {contents_size} bytes to paste service.")
paste_url = URLs.paste_service.format(key="documents")
for attempt in range(1, FAILED_REQUEST_ATTEMPTS + 1):
try:
@@ -59,3 +79,5 @@ async def send_to_paste_service(contents: str, *, extension: str = "") -> Option
f"Got unexpected JSON response from paste service: {response_json}\n"
f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})."
)
+
+ raise PasteUploadError("Failed to upload contents to paste service")
diff --git a/bot/utils/webhooks.py b/bot/utils/webhooks.py
index 8ef929b79..9c916b63a 100644
--- a/bot/utils/webhooks.py
+++ b/bot/utils/webhooks.py
@@ -1,7 +1,7 @@
from typing import Optional
-import disnake
-from disnake import Embed
+import discord
+from discord import Embed
from bot.log import get_logger
from bot.utils.messages import sub_clyde
@@ -10,13 +10,13 @@ log = get_logger(__name__)
async def send_webhook(
- webhook: disnake.Webhook,
+ webhook: discord.Webhook,
content: Optional[str] = None,
username: Optional[str] = None,
avatar_url: Optional[str] = None,
embed: Optional[Embed] = None,
wait: Optional[bool] = False
-) -> disnake.Message:
+) -> discord.Message:
"""
Send a message using the provided webhook.
@@ -30,5 +30,5 @@ async def send_webhook(
embed=embed,
wait=wait,
)
- except disnake.HTTPException:
+ except discord.HTTPException:
log.exception("Failed to send a message to the webhook!")
diff --git a/config-default.yml b/config-default.yml
index 95d5b913d..604555783 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -269,6 +269,7 @@ guild:
announcements: 463658397560995840
lovefest: 542431903886606399
pyweek_announcements: 897568414044938310
+ revival_of_code: 988801794668908655
contributors: 295488872404484098
help_cooldown: 699189276025421825
@@ -383,6 +384,7 @@ urls:
# Snekbox
snekbox_eval_api: !ENV ["SNEKBOX_EVAL_API", "http://snekbox.default.svc.cluster.local/eval"]
+ snekbox_311_eval_api: !ENV ["SNEKBOX_311_EVAL_API", "http://snekbox-311.default.svc.cluster.local/eval"]
# Discord API URLs
discord_api: &DISCORD_API "https://discordapp.com/api/v7/"
diff --git a/docker-compose.yml b/docker-compose.yml
index ce78f65aa..be7370d6b 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -61,6 +61,18 @@ services:
ports:
- "127.0.0.1:8060:8060"
privileged: true
+ profiles:
+ - "3.10"
+
+ snekbox-311:
+ << : *logging
+ << : *restart_policy
+ image: ghcr.io/python-discord/snekbox:3.11-dev
+ init: true
+ ipc: none
+ ports:
+ - "127.0.0.1:8065:8060"
+ privileged: true
web:
<< : *logging
@@ -96,7 +108,7 @@ services:
depends_on:
- web
- redis
- - snekbox
+ - snekbox-311
env_file:
- .env
environment:
diff --git a/poetry.lock b/poetry.lock
index 3074f3745..1191549fc 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,28 +1,13 @@
[[package]]
-name = "aio-pika"
-version = "6.8.2"
-description = "Wrapper for the aiormq for asyncio and humans."
-category = "main"
-optional = false
-python-versions = ">=3.5, <4"
-
-[package.dependencies]
-aiormq = ">=3.2.3,<4"
-yarl = "*"
-
-[package.extras]
-develop = ["aiomisc (>=10.1.6,<10.2.0)", "async-generator", "coverage (!=4.3)", "coveralls", "pylava", "pytest", "pytest-cov", "shortuuid", "nox", "sphinx", "sphinx-autobuild", "timeout-decorator", "tox (>=2.4)"]
-
-[[package]]
name = "aiodns"
-version = "2.0.0"
+version = "3.0.0"
description = "Simple DNS resolver for asyncio"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
-pycares = ">=3.0.0"
+pycares = ">=4.0.0"
[[package]]
name = "aiohttp"
@@ -45,33 +30,6 @@ yarl = ">=1.0,<2.0"
speedups = ["aiodns", "brotli", "cchardet"]
[[package]]
-name = "aioredis"
-version = "1.3.1"
-description = "asyncio (PEP 3156) Redis support"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-async-timeout = "*"
-hiredis = "*"
-
-[[package]]
-name = "aiormq"
-version = "3.3.1"
-description = "Pure python AMQP asynchronous client library"
-category = "main"
-optional = false
-python-versions = ">3.5.*"
-
-[package.dependencies]
-pamqp = "2.3.0"
-yarl = "*"
-
-[package.extras]
-develop = ["aiomisc (>=11.0,<12.0)", "async-generator", "coverage (!=4.3)", "coveralls", "pylava", "pytest", "pytest-cov", "tox (>=2.4)"]
-
-[[package]]
name = "aiosignal"
version = "1.2.0"
description = "aiosignal: a list of registered asynchronous callbacks"
@@ -84,7 +42,7 @@ frozenlist = ">=1.1.0"
[[package]]
name = "arrow"
-version = "1.0.3"
+version = "1.2.2"
description = "Better dates & times for Python"
category = "main"
optional = false
@@ -95,18 +53,18 @@ python-dateutil = ">=2.7.0"
[[package]]
name = "async-rediscache"
-version = "0.1.4"
+version = "1.0.0rc2"
description = "An easy to use asynchronous Redis cache"
category = "main"
optional = false
python-versions = "~=3.7"
[package.dependencies]
-aioredis = ">=1"
-fakeredis = {version = ">=1.3.1", optional = true, markers = "extra == \"fakeredis\""}
+fakeredis = {version = ">=1.7.1", extras = ["lua"], optional = true, markers = "extra == \"fakeredis\""}
+redis = ">=4.2,<5.0"
[package.extras]
-fakeredis = ["fakeredis (>=1.3.1)"]
+fakeredis = ["fakeredis[lua] (>=1.7.1)"]
[[package]]
name = "async-timeout"
@@ -118,7 +76,7 @@ python-versions = ">=3.6"
[[package]]
name = "atomicwrites"
-version = "1.4.0"
+version = "1.4.1"
description = "Atomic file writes."
category = "dev"
optional = false
@@ -140,11 +98,11 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
[[package]]
name = "beautifulsoup4"
-version = "4.10.0"
+version = "4.11.1"
description = "Screen-scraping library"
category = "main"
optional = false
-python-versions = ">3.0.0"
+python-versions = ">=3.6.0"
[package.dependencies]
soupsieve = ">1.2"
@@ -155,29 +113,35 @@ lxml = ["lxml"]
[[package]]
name = "bot-core"
-version = "3.0.0"
+version = "8.0.0"
description = "Bot-Core provides the core functionality and utilities for the bots of the Python Discord community."
category = "main"
optional = false
-python-versions = "3.9.*"
+python-versions = "3.10.*"
[package.dependencies]
-disnake = ">=2,<3"
+aiodns = "3.0.0"
+async-rediscache = {version = "1.0.0rc2", extras = ["fakeredis"], optional = true, markers = "extra == \"async-rediscache\""}
+"discord.py" = {url = "https://github.com/Rapptz/discord.py/archive/0eb3d26343969a25ffc43ba72eca42538d2e7e7a.zip"}
+statsd = "3.3.0"
+
+[package.extras]
+async-rediscache = ["async-rediscache[fakeredis] (==1.0.0rc2)"]
[package.source]
type = "url"
-url = "https://github.com/python-discord/bot-core/archive/refs/tags/v3.0.0.zip"
+url = "https://github.com/python-discord/bot-core/archive/refs/tags/v8.0.0.zip"
[[package]]
name = "certifi"
-version = "2021.10.8"
+version = "2022.6.15"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
[[package]]
name = "cffi"
-version = "1.15.0"
+version = "1.15.1"
description = "Foreign Function Interface for Python calling C code."
category = "main"
optional = false
@@ -196,18 +160,18 @@ python-versions = ">=3.6.1"
[[package]]
name = "charset-normalizer"
-version = "2.0.12"
+version = "2.1.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
-python-versions = ">=3.5.0"
+python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
[[package]]
name = "colorama"
-version = "0.4.4"
+version = "0.4.5"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
@@ -215,42 +179,45 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "coloredlogs"
-version = "14.3"
+version = "15.0.1"
description = "Colored terminal output for Python's logging module"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.dependencies]
-humanfriendly = ">=7.1"
+humanfriendly = ">=9.1"
[package.extras]
cron = ["capturer (>=2.4)"]
[[package]]
name = "coverage"
-version = "5.5"
+version = "6.4.2"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+python-versions = ">=3.7"
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras]
-toml = ["toml"]
+toml = ["tomli"]
[[package]]
name = "deepdiff"
-version = "4.3.2"
+version = "5.8.1"
description = "Deep Difference and Search of any Python object/data."
category = "main"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
[package.dependencies]
-ordered-set = ">=3.1.1"
+ordered-set = ">=4.1.0,<4.2.0"
[package.extras]
-murmur = ["mmh3"]
+cli = ["click (==8.0.3)", "pyyaml (==5.4.1)", "toml (==0.10.2)", "clevercsv (==0.7.1)"]
[[package]]
name = "deprecated"
@@ -267,25 +234,29 @@ wrapt = ">=1.10,<2"
dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
[[package]]
-name = "disnake"
-version = "2.4.0"
+name = "discord.py"
+version = "2.0.0a0"
description = "A Python wrapper for the Discord API"
category = "main"
optional = false
python-versions = ">=3.8.0"
[package.dependencies]
-aiohttp = ">=3.7.0,<3.9.0"
+aiohttp = ">=3.7.4,<4"
[package.extras]
-discord = ["discord-disnake"]
-docs = ["sphinx (>=4.4.0,<4.5.0)", "sphinxcontrib-trio (==1.1.2)", "sphinx-hoverxref (>=1.0.0,<1.1.0)", "sphinx-autobuild (==2021.3.14)"]
+docs = ["sphinx (==4.4.0)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport", "typing-extensions"]
speed = ["orjson (>=3.5.4)", "aiodns (>=1.1)", "brotli", "cchardet"]
-voice = ["PyNaCl (>=1.3.0,<1.5)"]
+test = ["coverage", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mock"]
+voice = ["PyNaCl (>=1.3.0,<1.6)"]
+
+[package.source]
+type = "url"
+url = "https://github.com/Rapptz/discord.py/archive/0eb3d26343969a25ffc43ba72eca42538d2e7e7a.zip"
[[package]]
name = "distlib"
-version = "0.3.4"
+version = "0.3.5"
description = "Distribution utilities"
category = "dev"
optional = false
@@ -293,7 +264,7 @@ python-versions = "*"
[[package]]
name = "emoji"
-version = "0.6.0"
+version = "2.0.0"
description = "Emoji for Python"
category = "main"
optional = false
@@ -315,25 +286,25 @@ testing = ["pre-commit"]
[[package]]
name = "fakeredis"
-version = "1.7.1"
+version = "1.8.2"
description = "Fake implementation of redis API for testing purposes."
category = "main"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.7,<4.0"
[package.dependencies]
-packaging = "*"
-redis = "<4.2.0"
-six = ">=1.12"
-sortedcontainers = "*"
+lupa = {version = ">=1.13,<2.0", optional = true, markers = "extra == \"lua\""}
+redis = "<4.4"
+six = ">=1.16.0,<2.0.0"
+sortedcontainers = ">=2.4.0,<3.0.0"
[package.extras]
-aioredis = ["aioredis"]
-lua = ["lupa"]
+aioredis = ["aioredis (>=2.0.1,<3.0.0)"]
+lua = ["lupa (>=1.13,<2.0)"]
[[package]]
name = "feedparser"
-version = "6.0.8"
+version = "6.0.10"
description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds"
category = "main"
optional = false
@@ -344,7 +315,7 @@ sgmllib3k = "*"
[[package]]
name = "filelock"
-version = "3.6.0"
+version = "3.7.1"
description = "A platform independent file lock."
category = "main"
optional = false
@@ -356,31 +327,32 @@ testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-co
[[package]]
name = "flake8"
-version = "3.9.2"
+version = "4.0.1"
description = "the modular source code checker: pep8 pyflakes and co"
category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.6"
[package.dependencies]
mccabe = ">=0.6.0,<0.7.0"
-pycodestyle = ">=2.7.0,<2.8.0"
-pyflakes = ">=2.3.0,<2.4.0"
+pycodestyle = ">=2.8.0,<2.9.0"
+pyflakes = ">=2.4.0,<2.5.0"
[[package]]
name = "flake8-annotations"
-version = "2.7.0"
+version = "2.9.0"
description = "Flake8 Type Annotation Checks"
category = "dev"
optional = false
-python-versions = ">=3.6.2,<4.0.0"
+python-versions = ">=3.7,<4.0"
[package.dependencies]
-flake8 = ">=3.7,<5.0"
+attrs = ">=21.4,<22.0"
+flake8 = ">=3.7"
[[package]]
name = "flake8-bugbear"
-version = "20.11.1"
+version = "22.7.1"
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
category = "dev"
optional = false
@@ -391,7 +363,7 @@ attrs = ">=19.2.0"
flake8 = ">=3.0.0"
[package.extras]
-dev = ["coverage", "black", "hypothesis", "hypothesmith"]
+dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"]
[[package]]
name = "flake8-docstrings"
@@ -407,7 +379,7 @@ pydocstyle = ">=2.1"
[[package]]
name = "flake8-isort"
-version = "4.1.1"
+version = "4.1.2.post0"
description = "flake8 plugin that integrates isort ."
category = "dev"
optional = false
@@ -416,23 +388,11 @@ python-versions = "*"
[package.dependencies]
flake8 = ">=3.2.1,<5"
isort = ">=4.3.5,<6"
-testfixtures = ">=6.8.0,<7"
[package.extras]
test = ["pytest-cov"]
[[package]]
-name = "flake8-polyfill"
-version = "1.0.2"
-description = "Polyfill package for Flake8 plugins"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-flake8 = "*"
-
-[[package]]
name = "flake8-string-format"
version = "0.3.0"
description = "string format checker, plugin for flake8"
@@ -445,7 +405,7 @@ flake8 = "*"
[[package]]
name = "flake8-tidy-imports"
-version = "4.6.0"
+version = "4.8.0"
description = "A flake8 plugin that helps you write tidier imports."
category = "dev"
optional = false
@@ -467,21 +427,13 @@ pycodestyle = ">=2.0.0,<3.0.0"
[[package]]
name = "frozenlist"
-version = "1.3.0"
+version = "1.3.1"
description = "A list-like structure which implements collections.abc.MutableSequence"
category = "main"
optional = false
python-versions = ">=3.7"
[[package]]
-name = "hiredis"
-version = "2.0.0"
-description = "Python wrapper for hiredis"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
name = "humanfriendly"
version = "10.0"
description = "Human friendly output for text interfaces using Python"
@@ -494,7 +446,7 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
[[package]]
name = "identify"
-version = "2.4.11"
+version = "2.5.3"
description = "File identification library for Python"
category = "dev"
optional = false
@@ -534,8 +486,24 @@ colors = ["colorama (>=0.4.3,<0.5.0)"]
plugins = ["setuptools"]
[[package]]
+name = "jarowinkler"
+version = "1.2.0"
+description = "library for fast approximate string matching using Jaro and Jaro-Winkler similarity"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "lupa"
+version = "1.13"
+description = "Python wrapper around Lua and LuaJIT"
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
name = "lxml"
-version = "4.8.0"
+version = "4.9.1"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
category = "main"
optional = false
@@ -569,7 +537,7 @@ python-versions = "*"
[[package]]
name = "more-itertools"
-version = "8.12.0"
+version = "8.13.0"
description = "More routines for operating on iterables, beyond itertools"
category = "main"
optional = false
@@ -593,11 +561,11 @@ python-versions = ">=3.7"
[[package]]
name = "nodeenv"
-version = "1.6.0"
+version = "1.7.0"
description = "Node.js virtual environment builder"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
[[package]]
name = "ordered-set"
@@ -622,19 +590,8 @@ python-versions = ">=3.6"
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
-name = "pamqp"
-version = "2.3.0"
-description = "RabbitMQ Focused AMQP low-level library"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.extras]
-codegen = ["lxml"]
-
-[[package]]
name = "pep8-naming"
-version = "0.12.1"
+version = "0.13.1"
description = "Check PEP-8 naming conventions, plugin for flake8"
category = "dev"
optional = false
@@ -642,11 +599,10 @@ python-versions = "*"
[package.dependencies]
flake8 = ">=3.9.1"
-flake8-polyfill = ">=1.0.2,<2"
[[package]]
name = "pip-licenses"
-version = "3.5.3"
+version = "3.5.4"
description = "Dump the software license list of Python packages installed with pip."
category = "dev"
optional = false
@@ -660,15 +616,15 @@ test = ["docutils", "pytest-cov", "pytest-pycodestyle", "pytest-runner"]
[[package]]
name = "platformdirs"
-version = "2.5.1"
+version = "2.5.2"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"]
-test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
+test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
[[package]]
name = "pluggy"
@@ -679,16 +635,16 @@ optional = false
python-versions = ">=3.6"
[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
+testing = ["pytest-benchmark", "pytest"]
+dev = ["tox", "pre-commit"]
[[package]]
name = "pre-commit"
-version = "2.17.0"
+version = "2.20.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev"
optional = false
-python-versions = ">=3.6.1"
+python-versions = ">=3.7"
[package.dependencies]
cfgv = ">=2.0.0"
@@ -700,14 +656,14 @@ virtualenv = ">=20.0.8"
[[package]]
name = "psutil"
-version = "5.9.0"
+version = "5.9.1"
description = "Cross-platform lib for process and system monitoring in Python."
category = "dev"
optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.extras]
-test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"]
+test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"]
[[package]]
name = "ptable"
@@ -727,7 +683,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycares"
-version = "4.1.2"
+version = "4.2.1"
description = "Python interface for c-ares"
category = "main"
optional = false
@@ -741,11 +697,11 @@ idna = ["idna (>=2.1)"]
[[package]]
name = "pycodestyle"
-version = "2.7.0"
+version = "2.8.0"
description = "Python style guide checker"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycparser"
@@ -771,7 +727,7 @@ toml = ["toml"]
[[package]]
name = "pyflakes"
-version = "2.3.1"
+version = "2.4.0"
description = "passive checker of Python programs"
category = "dev"
optional = false
@@ -779,14 +735,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pyparsing"
-version = "3.0.7"
-description = "Python parsing module"
+version = "3.0.9"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.6.8"
[package.extras]
-diagrams = ["jinja2", "railroad-diagrams"]
+diagrams = ["railroad-diagrams", "jinja2"]
[[package]]
name = "pyreadline3"
@@ -798,11 +754,11 @@ python-versions = "*"
[[package]]
name = "pytest"
-version = "6.2.5"
+version = "7.1.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
@@ -812,26 +768,25 @@ iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
py = ">=1.8.2"
-toml = "*"
+tomli = ">=1.0.0"
[package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
[[package]]
name = "pytest-cov"
-version = "2.12.1"
+version = "3.0.0"
description = "Pytest plugin for measuring coverage."
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
[package.dependencies]
-coverage = ">=5.2.1"
+coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
-toml = "*"
[package.extras]
-testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
+testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"]
[[package]]
name = "pytest-forked"
@@ -847,7 +802,7 @@ pytest = ">=3.10"
[[package]]
name = "pytest-xdist"
-version = "2.3.0"
+version = "2.5.0"
description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
category = "dev"
optional = false
@@ -855,12 +810,12 @@ python-versions = ">=3.6"
[package.dependencies]
execnet = ">=1.1"
-psutil = {version = ">=3.0", optional = true, markers = "extra == \"psutil\""}
-pytest = ">=6.0.0"
+pytest = ">=6.2.0"
pytest-forked = "*"
[package.extras]
psutil = ["psutil (>=3.0)"]
+setproctitle = ["setproctitle"]
testing = ["filelock"]
[[package]]
@@ -876,11 +831,11 @@ six = ">=1.5"
[[package]]
name = "python-dotenv"
-version = "0.17.1"
+version = "0.20.0"
description = "Read key-value pairs from a .env file and set them as environment variables"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.5"
[package.extras]
cli = ["click (>=5.0)"]
@@ -897,37 +852,41 @@ python-versions = "*"
PyYAML = "*"
[package.extras]
+test = ["pyaml", "toml", "pytest"]
docs = ["sphinx"]
-test = ["pytest", "toml", "pyaml"]
[[package]]
name = "pyyaml"
-version = "5.4.1"
+version = "6.0"
description = "YAML parser and emitter for Python"
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+python-versions = ">=3.6"
[[package]]
name = "rapidfuzz"
-version = "1.9.1"
+version = "2.3.0"
description = "rapid fuzzy string matching"
category = "main"
optional = false
-python-versions = ">=2.7"
+python-versions = ">=3.6"
+
+[package.dependencies]
+jarowinkler = ">=1.2.0,<2.0.0"
[package.extras]
full = ["numpy"]
[[package]]
name = "redis"
-version = "4.1.4"
+version = "4.3.4"
description = "Python client for Redis database and key-value store"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
+async-timeout = ">=4.0.2"
deprecated = ">=1.2.3"
packaging = ">=20.4"
@@ -937,29 +896,29 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"
[[package]]
name = "regex"
-version = "2021.4.4"
+version = "2022.7.25"
description = "Alternative regular expression module, to replace re."
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
[[package]]
name = "requests"
-version = "2.27.1"
+version = "2.28.1"
description = "Python HTTP for Humans."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+python-versions = ">=3.7, <4"
[package.dependencies]
certifi = ">=2017.4.17"
-charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
-idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
+charset-normalizer = ">=2,<3"
+idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
-socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
-use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-file"
@@ -975,7 +934,7 @@ six = "*"
[[package]]
name = "sentry-sdk"
-version = "1.5.6"
+version = "1.8.0"
description = "Python client for Sentry (https://sentry.io)"
category = "main"
optional = false
@@ -993,6 +952,7 @@ celery = ["celery (>=3)"]
chalice = ["chalice (>=1.16.0)"]
django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
+fastapi = ["fastapi (>=0.79.0)"]
flask = ["flask (>=0.11)", "blinker (>=1.1)"]
httpx = ["httpx (>=0.16.0)"]
pure_eval = ["pure-eval", "executing", "asttokens"]
@@ -1001,6 +961,7 @@ quart = ["quart (>=0.16.1)", "blinker (>=1.1)"]
rq = ["rq (>=0.6)"]
sanic = ["sanic (>=0.8)"]
sqlalchemy = ["sqlalchemy (>=1.2)"]
+starlette = ["starlette (>=0.19.1)"]
tornado = ["tornado (>=5)"]
[[package]]
@@ -1037,7 +998,7 @@ python-versions = "*"
[[package]]
name = "soupsieve"
-version = "2.3.1"
+version = "2.3.2.post1"
description = "A modern CSS selector implementation for Beautiful Soup."
category = "main"
optional = false
@@ -1053,34 +1014,22 @@ python-versions = "*"
[[package]]
name = "taskipy"
-version = "1.7.0"
+version = "1.10.2"
description = "tasks runner for python projects"
category = "dev"
optional = false
python-versions = ">=3.6,<4.0"
[package.dependencies]
-mslex = ">=0.3.0,<0.4.0"
+colorama = ">=0.4.4,<0.5.0"
+mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""}
psutil = ">=5.7.2,<6.0.0"
-toml = ">=0.10.0,<0.11.0"
-
-[[package]]
-name = "testfixtures"
-version = "6.18.5"
-description = "A collection of helpers and mock objects for unit tests and doc tests."
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.extras]
-build = ["setuptools-git", "wheel", "twine"]
-docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"]
-test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"]
+tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""}
[[package]]
name = "tldextract"
-version = "3.2.0"
-description = "Accurately separate the TLD from the registered domain and subdomains of a URL, using the Public Suffix List. By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well."
+version = "3.3.1"
+description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well."
category = "main"
optional = false
python-versions = ">=3.7"
@@ -1100,39 +1049,46 @@ optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
name = "urllib3"
-version = "1.26.8"
+version = "1.26.11"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
-brotli = ["brotlipy (>=0.6.0)"]
+brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
-version = "20.13.2"
+version = "20.16.2"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.6"
[package.dependencies]
distlib = ">=0.3.1,<1"
filelock = ">=3.2,<4"
platformdirs = ">=2,<3"
-six = ">=1.9.0,<2"
[package.extras]
docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
-testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
+testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"]
[[package]]
name = "wrapt"
-version = "1.13.3"
+version = "1.14.1"
description = "Module for decorators, wrappers and monkey patching."
category = "main"
optional = false
@@ -1140,11 +1096,11 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "yarl"
-version = "1.7.2"
+version = "1.8.1"
description = "Yet another URL library"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
idna = ">=2.0"
@@ -1152,1066 +1108,101 @@ multidict = ">=4.0"
[metadata]
lock-version = "1.1"
-python-versions = "3.9.*"
-content-hash = "e4828d46fc4ce002fed010986558a26a2edecf410ba7884f42f96e77d91a3844"
+python-versions = "3.10.*"
+content-hash = "b0dc5e1339805bf94be5f1b6a8454f8722d4eae645b8188ff62cd7b3c925f7e6"
[metadata.files]
-aio-pika = [
- {file = "aio-pika-6.8.2.tar.gz", hash = "sha256:d89658148def0d8b8d795868a753fe2906f8d8fccee53e4a1b5093ddd3d2dc5c"},
- {file = "aio_pika-6.8.2-py3-none-any.whl", hash = "sha256:4bf23e54bceb86b789d4b4a72ed65f2d83ede429d5f343de838ca72e54f00475"},
-]
-aiodns = [
- {file = "aiodns-2.0.0-py2.py3-none-any.whl", hash = "sha256:aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de"},
- {file = "aiodns-2.0.0.tar.gz", hash = "sha256:815fdef4607474295d68da46978a54481dd1e7be153c7d60f9e72773cd38d77d"},
-]
-aiohttp = [
- {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"},
- {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"},
- {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"},
- {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"},
- {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"},
- {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"},
- {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"},
- {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"},
- {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"},
- {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"},
- {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"},
- {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"},
- {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"},
- {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"},
- {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"},
- {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"},
- {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"},
- {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"},
- {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"},
- {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"},
- {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"},
- {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"},
-]
-aioredis = [
- {file = "aioredis-1.3.1-py3-none-any.whl", hash = "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"},
- {file = "aioredis-1.3.1.tar.gz", hash = "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a"},
-]
-aiormq = [
- {file = "aiormq-3.3.1-py3-none-any.whl", hash = "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e"},
- {file = "aiormq-3.3.1.tar.gz", hash = "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573"},
-]
-aiosignal = [
- {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"},
- {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"},
-]
-arrow = [
- {file = "arrow-1.0.3-py3-none-any.whl", hash = "sha256:3515630f11a15c61dcb4cdd245883270dd334c83f3e639824e65a4b79cc48543"},
- {file = "arrow-1.0.3.tar.gz", hash = "sha256:399c9c8ae732270e1aa58ead835a79a40d7be8aa109c579898eb41029b5a231d"},
-]
-async-rediscache = [
- {file = "async-rediscache-0.1.4.tar.gz", hash = "sha256:6be8a657d724ccbcfb1946d29a80c3478c5f9ecd2f78a0a26d2f4013a622258f"},
- {file = "async_rediscache-0.1.4-py3-none-any.whl", hash = "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af"},
-]
-async-timeout = [
- {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
- {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
-]
-atomicwrites = [
- {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
- {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
-]
-attrs = [
- {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
- {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
-]
-beautifulsoup4 = [
- {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"},
- {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"},
-]
+aiodns = []
+aiohttp = []
+aiosignal = []
+arrow = []
+async-rediscache = []
+async-timeout = []
+atomicwrites = []
+attrs = []
+beautifulsoup4 = []
bot-core = []
-certifi = [
- {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
- {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
-]
-cffi = [
- {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
- {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"},
- {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"},
- {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"},
- {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"},
- {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"},
- {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"},
- {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"},
- {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"},
- {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"},
- {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"},
- {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"},
- {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"},
- {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"},
- {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"},
- {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"},
- {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"},
- {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"},
- {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"},
- {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"},
- {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"},
- {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"},
- {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"},
- {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"},
- {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"},
- {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"},
- {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"},
- {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"},
- {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"},
- {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"},
-]
-cfgv = [
- {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
- {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
-]
-charset-normalizer = [
- {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
- {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
-]
-colorama = [
- {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
- {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
-]
-coloredlogs = [
- {file = "coloredlogs-14.3-py2.py3-none-any.whl", hash = "sha256:e244a892f9d97ffd2c60f15bf1d2582ef7f9ac0f848d132249004184785702b3"},
- {file = "coloredlogs-14.3.tar.gz", hash = "sha256:7ef1a7219870c7f02c218a2f2877ce68f2f8e087bb3a55bd6fbaa2a4362b4d52"},
-]
-coverage = [
- {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"},
- {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"},
- {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"},
- {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"},
- {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"},
- {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"},
- {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"},
- {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"},
- {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"},
- {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"},
- {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"},
- {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"},
- {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"},
- {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"},
- {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"},
- {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"},
- {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"},
- {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"},
- {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"},
- {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"},
- {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"},
- {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"},
- {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"},
- {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"},
- {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"},
- {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"},
- {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"},
- {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"},
- {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"},
- {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"},
- {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"},
- {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"},
- {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"},
- {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"},
- {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"},
- {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"},
- {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"},
- {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"},
- {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"},
- {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"},
- {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"},
- {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"},
- {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"},
- {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"},
- {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"},
- {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"},
- {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"},
- {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
-]
-deepdiff = [
- {file = "deepdiff-4.3.2-py3-none-any.whl", hash = "sha256:59fc1e3e7a28dd0147b0f2b00e3e27181f0f0ef4286b251d5f214a5bcd9a9bc4"},
- {file = "deepdiff-4.3.2.tar.gz", hash = "sha256:91360be1d9d93b1d9c13ae9c5048fa83d9cff17a88eb30afaa0d7ff2d0fee17d"},
-]
-deprecated = [
- {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"},
- {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"},
-]
-disnake = [
- {file = "disnake-2.4.0-py3-none-any.whl", hash = "sha256:390250a55ed8bbcc8c5753a72fb8fff2376a30295476edfebd0d2301855fb919"},
- {file = "disnake-2.4.0.tar.gz", hash = "sha256:d7a9c83d5cbfcec42441dae1d96744f82c2a22403934db5d8862a8279ca4989c"},
-]
-distlib = [
- {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
- {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
-]
-emoji = [
- {file = "emoji-0.6.0.tar.gz", hash = "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11"},
-]
-execnet = [
- {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
- {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
-]
-fakeredis = [
- {file = "fakeredis-1.7.1-py3-none-any.whl", hash = "sha256:be3668e50f6b57d5fc4abfd27f9f655bed07a2c5aecfc8b15d0aad59f997c1ba"},
- {file = "fakeredis-1.7.1.tar.gz", hash = "sha256:7c2c4ba1b42e0a75337c54b777bf0671056b4569650e3ff927e4b9b385afc8ec"},
-]
-feedparser = [
- {file = "feedparser-6.0.8-py3-none-any.whl", hash = "sha256:1b7f57841d9cf85074deb316ed2c795091a238adb79846bc46dccdaf80f9c59a"},
- {file = "feedparser-6.0.8.tar.gz", hash = "sha256:5ce0410a05ab248c8c7cfca3a0ea2203968ee9ff4486067379af4827a59f9661"},
-]
-filelock = [
- {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"},
- {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"},
-]
-flake8 = [
- {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
- {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
-]
-flake8-annotations = [
- {file = "flake8-annotations-2.7.0.tar.gz", hash = "sha256:52e53c05b0c06cac1c2dec192ea2c36e85081238add3bd99421d56f574b9479b"},
- {file = "flake8_annotations-2.7.0-py3-none-any.whl", hash = "sha256:3edfbbfb58e404868834fe6ec3eaf49c139f64f0701259f707d043185545151e"},
-]
-flake8-bugbear = [
- {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"},
- {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"},
-]
-flake8-docstrings = [
- {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
- {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
-]
-flake8-isort = [
- {file = "flake8-isort-4.1.1.tar.gz", hash = "sha256:d814304ab70e6e58859bc5c3e221e2e6e71c958e7005239202fee19c24f82717"},
- {file = "flake8_isort-4.1.1-py3-none-any.whl", hash = "sha256:c4e8b6dcb7be9b71a02e6e5d4196cefcef0f3447be51e82730fb336fff164949"},
-]
-flake8-polyfill = [
- {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
- {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
-]
-flake8-string-format = [
- {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"},
- {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"},
-]
-flake8-tidy-imports = [
- {file = "flake8-tidy-imports-4.6.0.tar.gz", hash = "sha256:3e193d8c4bb4492408a90e956d888b27eed14c698387c9b38230da3dad78058f"},
- {file = "flake8_tidy_imports-4.6.0-py3-none-any.whl", hash = "sha256:6ae9f55d628156e19d19f4c359dd5d3e95431a9bd514f5e2748c53c1398c66b2"},
-]
-flake8-todo = [
- {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"},
-]
-frozenlist = [
- {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"},
- {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"},
- {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"},
- {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"},
- {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"},
- {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"},
- {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"},
- {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"},
- {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"},
- {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"},
- {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"},
- {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"},
- {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"},
- {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"},
- {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"},
- {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"},
- {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"},
- {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"},
- {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"},
-]
-hiredis = [
- {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
- {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"},
- {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"},
- {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"},
- {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"},
- {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"},
- {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"},
- {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"},
- {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"},
- {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"},
- {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"},
- {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"},
- {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"},
- {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"},
- {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"},
- {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"},
- {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"},
- {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"},
- {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"},
- {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"},
- {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"},
- {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"},
- {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"},
- {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"},
- {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"},
- {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"},
- {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"},
- {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"},
- {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"},
- {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"},
- {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"},
- {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"},
- {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"},
- {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"},
- {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"},
- {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"},
- {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"},
- {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"},
- {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"},
- {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"},
- {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"},
-]
-humanfriendly = [
- {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
- {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
-]
-identify = [
- {file = "identify-2.4.11-py2.py3-none-any.whl", hash = "sha256:fd906823ed1db23c7a48f9b176a1d71cb8abede1e21ebe614bac7bdd688d9213"},
- {file = "identify-2.4.11.tar.gz", hash = "sha256:2986942d3974c8f2e5019a190523b0b0e2a07cb8e89bf236727fb4b26f27f8fd"},
-]
-idna = [
- {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
- {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
-]
-iniconfig = [
- {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
- {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
-]
-isort = [
- {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
- {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
-]
-lxml = [
- {file = "lxml-4.8.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:e1ab2fac607842ac36864e358c42feb0960ae62c34aa4caaf12ada0a1fb5d99b"},
- {file = "lxml-4.8.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28d1af847786f68bec57961f31221125c29d6f52d9187c01cd34dc14e2b29430"},
- {file = "lxml-4.8.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b92d40121dcbd74831b690a75533da703750f7041b4bf951befc657c37e5695a"},
- {file = "lxml-4.8.0-cp27-cp27m-win32.whl", hash = "sha256:e01f9531ba5420838c801c21c1b0f45dbc9607cb22ea2cf132844453bec863a5"},
- {file = "lxml-4.8.0-cp27-cp27m-win_amd64.whl", hash = "sha256:6259b511b0f2527e6d55ad87acc1c07b3cbffc3d5e050d7e7bcfa151b8202df9"},
- {file = "lxml-4.8.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1010042bfcac2b2dc6098260a2ed022968dbdfaf285fc65a3acf8e4eb1ffd1bc"},
- {file = "lxml-4.8.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fa56bb08b3dd8eac3a8c5b7d075c94e74f755fd9d8a04543ae8d37b1612dd170"},
- {file = "lxml-4.8.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:31ba2cbc64516dcdd6c24418daa7abff989ddf3ba6d3ea6f6ce6f2ed6e754ec9"},
- {file = "lxml-4.8.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:31499847fc5f73ee17dbe1b8e24c6dafc4e8d5b48803d17d22988976b0171f03"},
- {file = "lxml-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5f7d7d9afc7b293147e2d506a4596641d60181a35279ef3aa5778d0d9d9123fe"},
- {file = "lxml-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a3c5f1a719aa11866ffc530d54ad965063a8cbbecae6515acbd5f0fae8f48eaa"},
- {file = "lxml-4.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6268e27873a3d191849204d00d03f65c0e343b3bcb518a6eaae05677c95621d1"},
- {file = "lxml-4.8.0-cp310-cp310-win32.whl", hash = "sha256:330bff92c26d4aee79c5bc4d9967858bdbe73fdbdbacb5daf623a03a914fe05b"},
- {file = "lxml-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:b2582b238e1658c4061ebe1b4df53c435190d22457642377fd0cb30685cdfb76"},
- {file = "lxml-4.8.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a2bfc7e2a0601b475477c954bf167dee6d0f55cb167e3f3e7cefad906e7759f6"},
- {file = "lxml-4.8.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a1547ff4b8a833511eeaceacbcd17b043214fcdb385148f9c1bc5556ca9623e2"},
- {file = "lxml-4.8.0-cp35-cp35m-win32.whl", hash = "sha256:a9f1c3489736ff8e1c7652e9dc39f80cff820f23624f23d9eab6e122ac99b150"},
- {file = "lxml-4.8.0-cp35-cp35m-win_amd64.whl", hash = "sha256:530f278849031b0eb12f46cca0e5db01cfe5177ab13bd6878c6e739319bae654"},
- {file = "lxml-4.8.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:078306d19a33920004addeb5f4630781aaeabb6a8d01398045fcde085091a169"},
- {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:86545e351e879d0b72b620db6a3b96346921fa87b3d366d6c074e5a9a0b8dadb"},
- {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24f5c5ae618395ed871b3d8ebfcbb36e3f1091fd847bf54c4de623f9107942f3"},
- {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bbab6faf6568484707acc052f4dfc3802bdb0cafe079383fbaa23f1cdae9ecd4"},
- {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7993232bd4044392c47779a3c7e8889fea6883be46281d45a81451acfd704d7e"},
- {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d6483b1229470e1d8835e52e0ff3c6973b9b97b24cd1c116dca90b57a2cc613"},
- {file = "lxml-4.8.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ad4332a532e2d5acb231a2e5d33f943750091ee435daffca3fec0a53224e7e33"},
- {file = "lxml-4.8.0-cp36-cp36m-win32.whl", hash = "sha256:db3535733f59e5605a88a706824dfcb9bd06725e709ecb017e165fc1d6e7d429"},
- {file = "lxml-4.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5f148b0c6133fb928503cfcdfdba395010f997aa44bcf6474fcdd0c5398d9b63"},
- {file = "lxml-4.8.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:8a31f24e2a0b6317f33aafbb2f0895c0bce772980ae60c2c640d82caac49628a"},
- {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:719544565c2937c21a6f76d520e6e52b726d132815adb3447ccffbe9f44203c4"},
- {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:c0b88ed1ae66777a798dc54f627e32d3b81c8009967c63993c450ee4cbcbec15"},
- {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fa9b7c450be85bfc6cd39f6df8c5b8cbd76b5d6fc1f69efec80203f9894b885f"},
- {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9f84ed9f4d50b74fbc77298ee5c870f67cb7e91dcdc1a6915cb1ff6a317476c"},
- {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1d650812b52d98679ed6c6b3b55cbb8fe5a5460a0aef29aeb08dc0b44577df85"},
- {file = "lxml-4.8.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:80bbaddf2baab7e6de4bc47405e34948e694a9efe0861c61cdc23aa774fcb141"},
- {file = "lxml-4.8.0-cp37-cp37m-win32.whl", hash = "sha256:6f7b82934c08e28a2d537d870293236b1000d94d0b4583825ab9649aef7ddf63"},
- {file = "lxml-4.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e1fd7d2fe11f1cb63d3336d147c852f6d07de0d0020d704c6031b46a30b02ca8"},
- {file = "lxml-4.8.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:5045ee1ccd45a89c4daec1160217d363fcd23811e26734688007c26f28c9e9e7"},
- {file = "lxml-4.8.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0c1978ff1fd81ed9dcbba4f91cf09faf1f8082c9d72eb122e92294716c605428"},
- {file = "lxml-4.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cbf2ff155b19dc4d4100f7442f6a697938bf4493f8d3b0c51d45568d5666b5"},
- {file = "lxml-4.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ce13d6291a5f47c1c8dbd375baa78551053bc6b5e5c0e9bb8e39c0a8359fd52f"},
- {file = "lxml-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11527dc23d5ef44d76fef11213215c34f36af1608074561fcc561d983aeb870"},
- {file = "lxml-4.8.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:60d2f60bd5a2a979df28ab309352cdcf8181bda0cca4529769a945f09aba06f9"},
- {file = "lxml-4.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:62f93eac69ec0f4be98d1b96f4d6b964855b8255c345c17ff12c20b93f247b68"},
- {file = "lxml-4.8.0-cp38-cp38-win32.whl", hash = "sha256:20b8a746a026017acf07da39fdb10aa80ad9877046c9182442bf80c84a1c4696"},
- {file = "lxml-4.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:891dc8f522d7059ff0024cd3ae79fd224752676447f9c678f2a5c14b84d9a939"},
- {file = "lxml-4.8.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b6fc2e2fb6f532cf48b5fed57567ef286addcef38c28874458a41b7837a57807"},
- {file = "lxml-4.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:74eb65ec61e3c7c019d7169387d1b6ffcfea1b9ec5894d116a9a903636e4a0b1"},
- {file = "lxml-4.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:627e79894770783c129cc5e89b947e52aa26e8e0557c7e205368a809da4b7939"},
- {file = "lxml-4.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:545bd39c9481f2e3f2727c78c169425efbfb3fbba6e7db4f46a80ebb249819ca"},
- {file = "lxml-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5a58d0b12f5053e270510bf12f753a76aaf3d74c453c00942ed7d2c804ca845c"},
- {file = "lxml-4.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec4b4e75fc68da9dc0ed73dcdb431c25c57775383fec325d23a770a64e7ebc87"},
- {file = "lxml-4.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5804e04feb4e61babf3911c2a974a5b86f66ee227cc5006230b00ac6d285b3a9"},
- {file = "lxml-4.8.0-cp39-cp39-win32.whl", hash = "sha256:aa0cf4922da7a3c905d000b35065df6184c0dc1d866dd3b86fd961905bbad2ea"},
- {file = "lxml-4.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd10383f1d6b7edf247d0960a3db274c07e96cf3a3fc7c41c8448f93eac3fb1c"},
- {file = "lxml-4.8.0-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:2403a6d6fb61c285969b71f4a3527873fe93fd0abe0832d858a17fe68c8fa507"},
- {file = "lxml-4.8.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:986b7a96228c9b4942ec420eff37556c5777bfba6758edcb95421e4a614b57f9"},
- {file = "lxml-4.8.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6fe4ef4402df0250b75ba876c3795510d782def5c1e63890bde02d622570d39e"},
- {file = "lxml-4.8.0-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:f10ce66fcdeb3543df51d423ede7e238be98412232fca5daec3e54bcd16b8da0"},
- {file = "lxml-4.8.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:730766072fd5dcb219dd2b95c4c49752a54f00157f322bc6d71f7d2a31fecd79"},
- {file = "lxml-4.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8b99ec73073b37f9ebe8caf399001848fced9c08064effdbfc4da2b5a8d07b93"},
- {file = "lxml-4.8.0.tar.gz", hash = "sha256:f63f62fc60e6228a4ca9abae28228f35e1bd3ce675013d1dfb828688d50c6e23"},
-]
-markdownify = [
- {file = "markdownify-0.6.1-py3-none-any.whl", hash = "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"},
- {file = "markdownify-0.6.1.tar.gz", hash = "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d"},
-]
-mccabe = [
- {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
- {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
-]
-more-itertools = [
- {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"},
- {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"},
-]
-mslex = [
- {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"},
- {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"},
-]
-multidict = [
- {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"},
- {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"},
- {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"},
- {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"},
- {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"},
- {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"},
- {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"},
- {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"},
- {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"},
- {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"},
- {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"},
- {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"},
- {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"},
- {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"},
- {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"},
- {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"},
- {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"},
- {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"},
- {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"},
-]
-nodeenv = [
- {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
- {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
-]
-ordered-set = [
- {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"},
- {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"},
-]
-packaging = [
- {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
- {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
-]
-pamqp = [
- {file = "pamqp-2.3.0-py2.py3-none-any.whl", hash = "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02"},
- {file = "pamqp-2.3.0.tar.gz", hash = "sha256:5cd0f5a85e89f20d5f8e19285a1507788031cfca4a9ea6f067e3cf18f5e294e8"},
-]
-pep8-naming = [
- {file = "pep8-naming-0.12.1.tar.gz", hash = "sha256:bb2455947757d162aa4cad55dba4ce029005cd1692f2899a21d51d8630ca7841"},
- {file = "pep8_naming-0.12.1-py2.py3-none-any.whl", hash = "sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37"},
-]
-pip-licenses = [
- {file = "pip-licenses-3.5.3.tar.gz", hash = "sha256:f44860e00957b791c6c6005a3328f2d5eaeee96ddb8e7d87d4b0aa25b02252e4"},
- {file = "pip_licenses-3.5.3-py3-none-any.whl", hash = "sha256:59c148d6a03784bf945d232c0dc0e9de4272a3675acaa0361ad7712398ca86ba"},
-]
-platformdirs = [
- {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"},
- {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
-]
-pluggy = [
- {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
- {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
-]
-pre-commit = [
- {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"},
- {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"},
-]
-psutil = [
- {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"},
- {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"},
- {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"},
- {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"},
- {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"},
- {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"},
- {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"},
- {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"},
- {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"},
- {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"},
- {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"},
- {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"},
- {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"},
- {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"},
- {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"},
- {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"},
- {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"},
- {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"},
- {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"},
- {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"},
- {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"},
- {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"},
- {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"},
- {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"},
- {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"},
- {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"},
- {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"},
- {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"},
- {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"},
- {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"},
- {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"},
- {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"},
-]
-ptable = [
- {file = "PTable-0.9.2.tar.gz", hash = "sha256:aa7fc151cb40f2dabcd2275ba6f7fd0ff8577a86be3365cd3fb297cbe09cc292"},
-]
-py = [
- {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
- {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
-]
-pycares = [
- {file = "pycares-4.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71b99b9e041ae3356b859822c511f286f84c8889ec9ed1fbf6ac30fb4da13e4c"},
- {file = "pycares-4.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c000942f5fc64e6e046aa61aa53b629b576ba11607d108909727c3c8f211a157"},
- {file = "pycares-4.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b0e50ddc78252f2e2b6b5f2c73e5b2449dfb6bea7a5a0e21dfd1e2bcc9e17382"},
- {file = "pycares-4.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6831e963a910b0a8cbdd2750ffcdf5f2bb0edb3f53ca69ff18484de2cc3807c4"},
- {file = "pycares-4.1.2-cp310-cp310-win32.whl", hash = "sha256:ad7b28e1b6bc68edd3d678373fa3af84e39d287090434f25055d21b4716b2fc6"},
- {file = "pycares-4.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:27a6f09dbfb69bb79609724c0f90dfaa7c215876a7cd9f12d585574d1f922112"},
- {file = "pycares-4.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e5a060f5fa90ae245aa99a4a8ad13ec39c2340400de037c7e8d27b081e1a3c64"},
- {file = "pycares-4.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:056330275dea42b7199494047a745e1d9785d39fb8c4cd469dca043532240b80"},
- {file = "pycares-4.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0aa897543a786daba74ec5e19638bd38b2b432d179a0e248eac1e62de5756207"},
- {file = "pycares-4.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cbceaa9b2c416aa931627466d3240aecfc905c292c842252e3d77b8630072505"},
- {file = "pycares-4.1.2-cp36-cp36m-win32.whl", hash = "sha256:112e1385c451069112d6b5ea1f9c378544f3c6b89882ff964e9a64be3336d7e4"},
- {file = "pycares-4.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:c6680f7fdc0f1163e8f6c2a11d11b9a0b524a61000d2a71f9ccd410f154fb171"},
- {file = "pycares-4.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a41a2baabcd95266db776c510d349d417919407f03510fc87ac7488730d913"},
- {file = "pycares-4.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a810d01c9a426ee8b0f36969c2aef5fb966712be9d7e466920beb328cd9cefa3"},
- {file = "pycares-4.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b266cec81dcea2c3efbbd3dda00af8d7eb0693ae9e47e8706518334b21f27d4a"},
- {file = "pycares-4.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8319afe4838e09df267c421ca93da408f770b945ec6217dda72f1f6a493e37e4"},
- {file = "pycares-4.1.2-cp37-cp37m-win32.whl", hash = "sha256:4d5da840aa0d9b15fa51107f09270c563a348cb77b14ae9653d0bbdbe326fcc2"},
- {file = "pycares-4.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5632f21d92cc0225ba5ff906e4e5dec415ef0b3df322c461d138190681cd5d89"},
- {file = "pycares-4.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8fd1ff17a26bb004f0f6bb902ba7dddd810059096ae0cc3b45e4f5be46315d19"},
- {file = "pycares-4.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439799be4b7576e907139a7f9b3c8a01b90d3e38af4af9cd1fc6c1ee9a42b9e6"},
- {file = "pycares-4.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:40079ed58efa91747c50aac4edf8ecc7e570132ab57dc0a4030eb0d016a6cab8"},
- {file = "pycares-4.1.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e190471a015f8225fa38069617192e06122771cce2b169ac7a60bfdbd3d4ab2"},
- {file = "pycares-4.1.2-cp38-cp38-win32.whl", hash = "sha256:2b837315ed08c7df009b67725fe1f50489e99de9089f58ec1b243dc612f172aa"},
- {file = "pycares-4.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:c7eba3c8354b730a54d23237d0b6445a2f68570fa68d0848887da23a3f3b71f3"},
- {file = "pycares-4.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2f5f84fe9f83eab9cd68544b165b74ba6e3412d029cc9ab20098d9c332869fc5"},
- {file = "pycares-4.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569eef8597b5e02b1bc4644b9f272160304d8c9985357d7ecfcd054da97c0771"},
- {file = "pycares-4.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e1489aa25d14dbf7176110ead937c01176ed5a0ebefd3b092bbd6b202241814c"},
- {file = "pycares-4.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dc942692fca0e27081b7bb414bb971d34609c80df5e953f6d0c62ecc8019acd9"},
- {file = "pycares-4.1.2-cp39-cp39-win32.whl", hash = "sha256:ed71dc4290d9c3353945965604ef1f6a4de631733e9819a7ebc747220b27e641"},
- {file = "pycares-4.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:ec00f3594ee775665167b1a1630edceefb1b1283af9ac57480dba2fb6fd6c360"},
- {file = "pycares-4.1.2.tar.gz", hash = "sha256:03490be0e7b51a0c8073f877bec347eff31003f64f57d9518d419d9369452837"},
-]
-pycodestyle = [
- {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
- {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
-]
-pycparser = [
- {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
- {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
-]
-pydocstyle = [
- {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
- {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
-]
-pyflakes = [
- {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"},
- {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
-]
-pyparsing = [
- {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
- {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
-]
-pyreadline3 = [
- {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
- {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
-]
-pytest = [
- {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
- {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
-]
-pytest-cov = [
- {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"},
- {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"},
-]
-pytest-forked = [
- {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"},
- {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"},
-]
-pytest-xdist = [
- {file = "pytest-xdist-2.3.0.tar.gz", hash = "sha256:e8ecde2f85d88fbcadb7d28cb33da0fa29bca5cf7d5967fa89fc0e97e5299ea5"},
- {file = "pytest_xdist-2.3.0-py3-none-any.whl", hash = "sha256:ed3d7da961070fce2a01818b51f6888327fb88df4379edeb6b9d990e789d9c8d"},
-]
-python-dateutil = [
- {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
- {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
-]
-python-dotenv = [
- {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"},
- {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"},
-]
-python-frontmatter = [
- {file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"},
- {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"},
-]
-pyyaml = [
- {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"},
- {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"},
- {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"},
- {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"},
- {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"},
- {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"},
- {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"},
- {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"},
- {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"},
- {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"},
- {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"},
- {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"},
- {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"},
- {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"},
- {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"},
- {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"},
- {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"},
- {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"},
- {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"},
- {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"},
- {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"},
- {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"},
- {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"},
- {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"},
- {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"},
- {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"},
- {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"},
- {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"},
- {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"},
-]
-rapidfuzz = [
- {file = "rapidfuzz-1.9.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:68227a8b25291d6a2140aef049271ea30a77be5ef672a58e582a55a5cc1fce93"},
- {file = "rapidfuzz-1.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c33541995b96ff40025c1456b8c74b7dd2ab9cbf91943fc35a7bb621f48940e2"},
- {file = "rapidfuzz-1.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:c2fafbbf97a4632822248f4201601b691e2eac5fdb30e5d7a96d07a6d058a7d4"},
- {file = "rapidfuzz-1.9.1-cp27-cp27m-win32.whl", hash = "sha256:364795f617a99e1dbb55ac3947ab8366588b72531cb2d6152666287d20610706"},
- {file = "rapidfuzz-1.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:f171d9e66144b0647f9b998ef10bdd919a640e4b1357250c8ef6259deb5ffe0d"},
- {file = "rapidfuzz-1.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:c83801a7c5209663aa120b815a4f2c39e95fe8e0b774ec58a1e0affd6a2fcfc6"},
- {file = "rapidfuzz-1.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:67e61c2baa6bb1848c4a33752f1781124dcc90bf3f31b18b44db1ae4e4e26634"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8ab7eb003a18991347174910f11d38ff40399081185d9e3199ec277535f7828b"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5ad450badf06ddf98a246140b5059ba895ee8445e8102a5a289908327f551f81"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:402b2174bded62a793c5f7d9aec16bc32c661402360a934819ae72b54cfbce1e"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92066ccb054efc2e17afb4049c98b550969653cd58f71dd756cfcc8e6864630a"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8dc0bf1814accee08a9c9bace6672ef06eae6b0446fce88e3e97e23dfaf3ea10"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbd387efb8478605951344f327dd03bf053c138d757369a43404305b99e55db"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-win32.whl", hash = "sha256:b1c54807e556dbcc6caf4ce0f24446c01b195f3cc46e2a6e74b82d3a21eaa45d"},
- {file = "rapidfuzz-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:ac3273364cd1619cab3bf0ba731efea5405833f9eba362da7dcd70bd42073d8e"},
- {file = "rapidfuzz-1.9.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:d9faf62606c08a0a6992dd480c72b6a068733ae02688dc35f2e36ba0d44673f4"},
- {file = "rapidfuzz-1.9.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6a56a48be047637b1b0b2459a11cf7cd5aa7bbe16a439bd4f73b4af39e620e4"},
- {file = "rapidfuzz-1.9.1-cp35-cp35m-win32.whl", hash = "sha256:aa91609979e9d2700f0ff100df99b36e7d700b70169ee385d43d5de9e471ae97"},
- {file = "rapidfuzz-1.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b4cfdd0915ab4cec86c2ff6bab9f01b03454f3de0963c37f9f219df2ddf42b95"},
- {file = "rapidfuzz-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6bfa4ad0158a093cd304f795ceefdc3861ae6942a61432b2a50858be6de88ca"},
- {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:eb0ea02295d9278bd2dcd2df4760b0f2887b6c3f2f374005ec5af320d8d3a37e"},
- {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d5187cd5cd6273e9fee07de493a42a2153134a4914df74cb1abb0744551c548a"},
- {file = "rapidfuzz-1.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6e5b8af63f9c05b64454460759ed84a715d581d598ec4484f4ec512f398e8b1"},
- {file = "rapidfuzz-1.9.1-cp36-cp36m-win32.whl", hash = "sha256:36137f88f2b28115af506118e64e11c816611eab2434293af7fdacd1290ffb9d"},
- {file = "rapidfuzz-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:fcc420cad46be7c9887110edf04cdee545f26dbf22650a443d89790fc35f7b88"},
- {file = "rapidfuzz-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b06de314f426aebff8a44319016bbe2b22f7848c84e44224f80b0690b7b08b18"},
- {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e5de44e719faea79e45322b037f0d4a141d750b80d2204fa68f43a42a24f0fbc"},
- {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f9439df09a782afd01b67005a3b110c70bbf9e1cf06d2ac9b293ce2d02d3c549"},
- {file = "rapidfuzz-1.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e903d4702647465721e2d0431c95f04fd56a06577f06f41e2960c83fd63c1bad"},
- {file = "rapidfuzz-1.9.1-cp37-cp37m-win32.whl", hash = "sha256:a5298f4ac1975edcbb15583eab659a44b33aebaf3bccf172e185cfea68771c08"},
- {file = "rapidfuzz-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:103193a01921b54fcdad6b01cfda3a68e00aeafca236b7ecd5b1b2c2e7e96337"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1d98a3187040dca855e02179a35c137f72ef83ce243783d44ea59efa86b94b3a"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb92bf7fc911b787055a88d9295ca3b4fe8576e3b59271f070f1b1b181eb087d"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3f014a0f5f8159a94c6ee884fedd1c30e07fb866a5d76ff2c18091bc6363b76f"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:31474074a99f72289ac325fbd77983e7d355d48860bfe7a4f6f6396fdb24410a"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec67d79af5a2d7b0cf67b570a5579710e461cadda4120478e813b63491f394dd"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebc0d3d15ed32f98f0052cf6e3e9c9b8010fb93c04fb74d2022e3c51ec540e2"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-win32.whl", hash = "sha256:477ab1a3044bab89db45caabc562b158f68765ecaa638b73ba17e92f09dfa5ff"},
- {file = "rapidfuzz-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:8e872763dc0367d7544aa585d2e8b27af233323b8a7cd2f9b78cafa05bae5018"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8401c41e219ae36ca7a88762776a6270511650d4cc70d024ae61561e96d67e47"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea10bd8e0436801c3264f7084a5ea194f12ba9fe1ba898aa4a2107d276501292"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:433737914b46c1ffa0c678eceae1c260dc6b7fb5b6cad4c725d3e3607c764b32"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c3b08e90e45acbc469d1f456681643256e952bf84ec7714f58979baba0c8a1c"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bbcd265b3c86176e5db4cbba7b4364d7333c214ee80e2d259c7085929934ca9d"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d69fabcd635783cd842e7d5ee4b77164314c5124b82df5a0c436ab3d698f8a9"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-win32.whl", hash = "sha256:01f16b6f3fa5d1a26c12f5da5de0032f1e12c919d876005b57492a8ec9a5c043"},
- {file = "rapidfuzz-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:0bcc5bbfdbe6068cc2cf0029ab6cde08dceac498d232fa3a61dd34fbfa0b3f36"},
- {file = "rapidfuzz-1.9.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:de869c8f4e8edb9b2f7b8232a04896645501defcbd9d85bc0202ff3ec6285f6b"},
- {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:db5978e970fb0955974d51021da4b929e2e4890fef17792989ee32658e2b159c"},
- {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:33479f75f36ac3a1d8421365d4fa906e013490790730a89caba31d06e6f71738"},
- {file = "rapidfuzz-1.9.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:af991cb333ec526d894923163050931b3a870b7694bf7687aaa6154d341a98f5"},
- {file = "rapidfuzz-1.9.1.tar.gz", hash = "sha256:bd7a4fe33ba49db3417f0f57a8af02462554f1296dedcf35b026cd3525efef74"},
-]
-redis = [
- {file = "redis-4.1.4-py3-none-any.whl", hash = "sha256:04629f8e42be942c4f7d1812f2094568f04c612865ad19ad3ace3005da70631a"},
- {file = "redis-4.1.4.tar.gz", hash = "sha256:1d9a0cdf89fdd93f84261733e24f55a7bbd413a9b219fdaf56e3e728ca9a2306"},
-]
-regex = [
- {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"},
- {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"},
- {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"},
- {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"},
- {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"},
- {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"},
- {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"},
- {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"},
- {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"},
- {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"},
- {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"},
- {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"},
- {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"},
- {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"},
- {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"},
- {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"},
- {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"},
- {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"},
- {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"},
- {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"},
- {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"},
- {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"},
- {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"},
- {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"},
- {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"},
- {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"},
- {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"},
- {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"},
- {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"},
- {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"},
- {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"},
- {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"},
- {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"},
- {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"},
- {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"},
- {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"},
- {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"},
- {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"},
- {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"},
- {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"},
- {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"},
-]
-requests = [
- {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
- {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
-]
-requests-file = [
- {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"},
- {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"},
-]
-sentry-sdk = [
- {file = "sentry-sdk-1.5.6.tar.gz", hash = "sha256:ac2a50128409d57655279817aedcb7800cace1f76b266f3dd62055d5afd6e098"},
- {file = "sentry_sdk-1.5.6-py2.py3-none-any.whl", hash = "sha256:1ab34e3851a34aeb3d1af1a0f77cec73978c4e9698e5210d050e4932953cb241"},
-]
-sgmllib3k = [
- {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
-]
-six = [
- {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
- {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
-]
-snowballstemmer = [
- {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
- {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
-]
-sortedcontainers = [
- {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
- {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
-]
-soupsieve = [
- {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"},
- {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"},
-]
-statsd = [
- {file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"},
- {file = "statsd-3.3.0.tar.gz", hash = "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f"},
-]
-taskipy = [
- {file = "taskipy-1.7.0-py3-none-any.whl", hash = "sha256:9e284c10898e9dee01a3e72220b94b192b1daa0f560271503a6df1da53d03844"},
- {file = "taskipy-1.7.0.tar.gz", hash = "sha256:960e480b1004971e76454ecd1a0484e640744a30073a1069894a311467f85ed8"},
-]
-testfixtures = [
- {file = "testfixtures-6.18.5-py2.py3-none-any.whl", hash = "sha256:7de200e24f50a4a5d6da7019fb1197aaf5abd475efb2ec2422fdcf2f2eb98c1d"},
- {file = "testfixtures-6.18.5.tar.gz", hash = "sha256:02dae883f567f5b70fd3ad3c9eefb95912e78ac90be6c7444b5e2f46bf572c84"},
-]
-tldextract = [
- {file = "tldextract-3.2.0-py3-none-any.whl", hash = "sha256:427703b65db54644f7b81d3dcb79bf355c1a7c28a12944e5cc6787531ccc828a"},
- {file = "tldextract-3.2.0.tar.gz", hash = "sha256:3d4b6a2105600b7d0290ea237bf30b6b0dc763e50fcbe40e849a019bd6dbcbff"},
-]
-toml = [
- {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
- {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
-]
-urllib3 = [
- {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"},
- {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"},
-]
-virtualenv = [
- {file = "virtualenv-20.13.2-py2.py3-none-any.whl", hash = "sha256:e7b34c9474e6476ee208c43a4d9ac1510b041c68347eabfe9a9ea0c86aa0a46b"},
- {file = "virtualenv-20.13.2.tar.gz", hash = "sha256:01f5f80744d24a3743ce61858123488e91cb2dd1d3bdf92adaf1bba39ffdedf0"},
-]
-wrapt = [
- {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"},
- {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"},
- {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"},
- {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"},
- {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"},
- {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"},
- {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"},
- {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"},
- {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"},
- {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"},
- {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"},
- {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"},
- {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"},
- {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"},
- {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"},
- {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"},
- {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"},
- {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"},
- {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"},
- {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"},
- {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"},
- {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"},
- {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"},
- {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"},
- {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"},
- {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"},
- {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"},
- {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"},
- {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"},
- {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"},
- {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"},
- {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"},
- {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"},
- {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"},
- {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"},
- {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"},
- {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"},
- {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"},
- {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"},
- {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"},
- {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"},
- {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"},
- {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"},
- {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"},
- {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"},
- {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"},
- {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"},
- {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"},
- {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"},
- {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"},
- {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"},
-]
-yarl = [
- {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"},
- {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"},
- {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"},
- {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"},
- {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"},
- {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"},
- {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"},
- {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"},
- {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"},
- {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"},
- {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"},
- {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"},
- {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"},
- {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"},
- {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"},
- {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"},
- {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"},
- {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"},
- {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"},
- {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"},
- {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"},
- {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"},
-]
+certifi = []
+cffi = []
+cfgv = []
+charset-normalizer = []
+colorama = []
+coloredlogs = []
+coverage = []
+deepdiff = []
+deprecated = []
+"discord.py" = []
+distlib = []
+emoji = []
+execnet = []
+fakeredis = []
+feedparser = []
+filelock = []
+flake8 = []
+flake8-annotations = []
+flake8-bugbear = []
+flake8-docstrings = []
+flake8-isort = []
+flake8-string-format = []
+flake8-tidy-imports = []
+flake8-todo = []
+frozenlist = []
+humanfriendly = []
+identify = []
+idna = []
+iniconfig = []
+isort = []
+jarowinkler = []
+lupa = []
+lxml = []
+markdownify = []
+mccabe = []
+more-itertools = []
+mslex = []
+multidict = []
+nodeenv = []
+ordered-set = []
+packaging = []
+pep8-naming = []
+pip-licenses = []
+platformdirs = []
+pluggy = []
+pre-commit = []
+psutil = []
+ptable = []
+py = []
+pycares = []
+pycodestyle = []
+pycparser = []
+pydocstyle = []
+pyflakes = []
+pyparsing = []
+pyreadline3 = []
+pytest = []
+pytest-cov = []
+pytest-forked = []
+pytest-xdist = []
+python-dateutil = []
+python-dotenv = []
+python-frontmatter = []
+pyyaml = []
+rapidfuzz = []
+redis = []
+regex = []
+requests = []
+requests-file = []
+sentry-sdk = []
+sgmllib3k = []
+six = []
+snowballstemmer = []
+sortedcontainers = []
+soupsieve = []
+statsd = []
+taskipy = []
+tldextract = []
+toml = []
+tomli = []
+urllib3 = []
+virtualenv = []
+wrapt = []
+yarl = []
diff --git a/pyproject.toml b/pyproject.toml
index 06795fd0d..43eb799b6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -6,52 +6,55 @@ authors = ["Python Discord <[email protected]>"]
license = "MIT"
[tool.poetry.dependencies]
-python = "3.9.*"
-disnake = "~=2.4"
+python = "3.10.*"
+
# See https://bot-core.pythondiscord.com/ for docs.
-bot-core = {url = "https://github.com/python-discord/bot-core/archive/refs/tags/v3.0.0.zip"}
-aio-pika = "~=6.1"
-aiodns = "~=2.0"
-aiohttp = "~=3.7"
-aioredis = "~=1.3.1"
-arrow = "~=1.0.3"
-async-rediscache = { version = "~=0.1.2", extras = ["fakeredis"] }
-beautifulsoup4 = "~=4.9"
-colorama = { version = "~=0.4.3", markers = "sys_platform == 'win32'" }
-coloredlogs = "~=14.0"
-deepdiff = "~=4.0"
-emoji = "~=0.6"
-feedparser = "~=6.0.2"
-rapidfuzz = "~=1.4"
-lxml = "~=4.6"
-markdownify = "==0.6.1"
-more_itertools = "~=8.2"
-python-dateutil = "~=2.8"
-python-frontmatter = "~=1.0.0"
-pyyaml = "~=5.1"
-regex = "==2021.4.4"
-sentry-sdk = "~=1.3"
-statsd = "~=3.3"
-tldextract = "^3.1.2"
+bot-core = { url = "https://github.com/python-discord/bot-core/archive/refs/tags/v8.0.0.zip", extras = ["async-rediscache"] }
+redis = "4.3.4"
+fakeredis = { version = "1.8.2", extras = ["lua"] }
+
+aiohttp = "3.8.1"
+arrow = "1.2.2"
+beautifulsoup4 = "4.11.1"
+colorama = { version = "0.4.5", markers = "sys_platform == 'win32'" }
+coloredlogs = "15.0.1"
+deepdiff = "5.8.1"
+emoji = "2.0.0"
+feedparser = "6.0.10"
+rapidfuzz = "2.3.0"
+lxml = "4.9.1"
+
+# Must be kept on this version unless doc command output is fixed
+# See https://github.com/python-discord/bot/pull/2156
+markdownify = "0.6.1"
+
+more_itertools = "8.13.0"
+python-dateutil = "2.8.2"
+python-frontmatter = "1.0.0"
+pyyaml = "6.0"
+regex = "2022.7.25"
+sentry-sdk = "1.8.0"
+statsd = "3.3.0"
+tldextract = "3.3.1"
[tool.poetry.dev-dependencies]
-coverage = "~=5.0"
-flake8 = "~=3.8"
-flake8-annotations = "~=2.0"
-flake8-bugbear = "~=20.1"
-flake8-docstrings = "~=1.4"
-flake8-string-format = "~=0.2"
-flake8-tidy-imports = "~=4.0"
-flake8-todo = "~=0.7"
-flake8-isort = "~=4.0"
-pep8-naming = "~=0.9"
-pre-commit = "~=2.1"
-taskipy = "~=1.7.0"
-pip-licenses = "~=3.5.3"
-python-dotenv = "~=0.17.1"
-pytest = "~=6.2.4"
-pytest-cov = "~=2.12.1"
-pytest-xdist = { version = "~=2.3.0", extras = ["psutil"] }
+coverage = "6.4.2"
+flake8 = "4.0.1"
+flake8-annotations = "2.9.0"
+flake8-bugbear = "22.7.1"
+flake8-docstrings = "1.6.0"
+flake8-string-format = "0.3.0"
+flake8-tidy-imports = "4.8.0"
+flake8-todo = "0.7"
+flake8-isort = "4.1.2.post0"
+pep8-naming = "0.13.1"
+pre-commit = "2.20.0"
+taskipy = "1.10.2"
+pip-licenses = "3.5.4"
+python-dotenv = "0.20.0"
+pytest = "7.1.2"
+pytest-cov = "3.0.0"
+pytest-xdist = "2.5.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/tests/README.md b/tests/README.md
index fc03b3d43..b7fddfaa2 100644
--- a/tests/README.md
+++ b/tests/README.md
@@ -121,9 +121,9 @@ As we are trying to test our "units" of code independently, we want to make sure
However, the features that we are trying to test often depend on those objects generated by external pieces of code. It would be difficult to test a bot command without having access to a `Context` instance. Fortunately, there's a solution for that: we use fake objects that act like the true object. We call these fake objects "mocks".
-To create these mock object, we mainly use the [`unittest.mock`](https://docs.python.org/3/library/unittest.mock.html) module. In addition, we have also defined a couple of specialized mock objects that mock specific `disnake` types (see the section on the below.).
+To create these mock object, we mainly use the [`unittest.mock`](https://docs.python.org/3/library/unittest.mock.html) module. In addition, we have also defined a couple of specialized mock objects that mock specific `discord.py` types (see the section on the below.).
-An example of mocking is when we provide a command with a mocked version of `disnake.ext.commands.Context` object instead of a real `Context` object. This makes sure we can then check (_assert_) if the `send` method of the mocked Context object was called with the correct message content (without having to send a real message to the Discord API!):
+An example of mocking is when we provide a command with a mocked version of `discord.ext.commands.Context` object instead of a real `Context` object. This makes sure we can then check (_assert_) if the `send` method of the mocked Context object was called with the correct message content (without having to send a real message to the Discord API!):
```py
import asyncio
@@ -152,15 +152,15 @@ class BotCogTests(unittest.TestCase):
By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. The [`AsyncMock`](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.AsyncMock) that has been [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest) is an asynchronous version of `MagicMock` that can be used anywhere a coroutine is expected.
-### Special mocks for some `disnake` types
+### Special mocks for some `discord.py` types
To quote Ned Batchelder, Mock objects are "automatic chameleons". This means that they will happily allow the access to any attribute or method and provide a mocked value in return. One downside to this is that if the code you are testing gets the name of the attribute wrong, your mock object will not complain and the test may still pass.
-In order to avoid that, we have defined a number of Mock types in [`helpers.py`](/tests/helpers.py) that follow the specifications of the actual disnake types they are mocking. This means that trying to access an attribute or method on a mocked object that does not exist on the equivalent `disnake` object will result in an `AttributeError`. In addition, these mocks have some sensible defaults and **pass `isinstance` checks for the types they are mocking**.
+In order to avoid that, we have defined a number of Mock types in [`helpers.py`](/tests/helpers.py) that follow the specifications of the actual Discord types they are mocking. This means that trying to access an attribute or method on a mocked object that does not exist on the equivalent `discord.py` object will result in an `AttributeError`. In addition, these mocks have some sensible defaults and **pass `isinstance` checks for the types they are mocking**.
These special mocks are added when they are needed, so if you think it would be sensible to add another one, feel free to propose one in your PR.
-**Note:** These mock types only "know" the attributes that are set by default when these `disnake` types are first initialized. If you need to work with dynamically set attributes that are added after initialization, you can still explicitly mock them:
+**Note:** These mock types only "know" the attributes that are set by default when these `discord.py` types are first initialized. If you need to work with dynamically set attributes that are added after initialization, you can still explicitly mock them:
```py
import unittest.mock
@@ -245,7 +245,7 @@ All in all, it's not only important to consider if all statements or branches we
### Unit Testing vs Integration Testing
-Another restriction of unit testing is that it tests, well, in units. Even if we can guarantee that the units work as they should independently, we have no guarantee that they will actually work well together. Even more, while the mocking described above gives us a lot of flexibility in factoring out external code, we are work under the implicit assumption that we fully understand those external parts and utilize it correctly. What if our mocked `Context` object works with a `send` method, but `disnake` has changed it to a `send_message` method in a recent update? It could mean our tests are passing, but the code it's testing still doesn't work in production.
+Another restriction of unit testing is that it tests, well, in units. Even if we can guarantee that the units work as they should independently, we have no guarantee that they will actually work well together. Even more, while the mocking described above gives us a lot of flexibility in factoring out external code, we are work under the implicit assumption that we fully understand those external parts and utilize it correctly. What if our mocked `Context` object works with a `send` method, but `discord.py` has changed it to a `send_message` method in a recent update? It could mean our tests are passing, but the code it's testing still doesn't work in production.
The answer to this is that we also need to make sure that the individual parts come together into a working application. In addition, we will also need to make sure that the application communicates correctly with external applications. Since we currently have no automated integration tests or functional tests, that means **it's still very important to fire up the bot and test the code you've written manually** in addition to the unit tests you've written.
diff --git a/tests/base.py b/tests/base.py
index dea7dd678..4863a1821 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -3,8 +3,9 @@ import unittest
from contextlib import contextmanager
from typing import Dict
-import disnake
-from disnake.ext import commands
+import discord
+from async_rediscache import RedisSession
+from discord.ext import commands
from bot.log import get_logger
from tests import helpers
@@ -80,7 +81,7 @@ class LoggingTestsMixin:
class CommandTestCase(unittest.IsolatedAsyncioTestCase):
- """TestCase with additional assertions that are useful for testing disnake commands."""
+ """TestCase with additional assertions that are useful for testing Discord commands."""
async def assertHasPermissionsCheck( # noqa: N802
self,
@@ -98,9 +99,32 @@ class CommandTestCase(unittest.IsolatedAsyncioTestCase):
permissions = {k: not v for k, v in permissions.items()}
ctx = helpers.MockContext()
- ctx.channel.permissions_for.return_value = disnake.Permissions(**permissions)
+ ctx.channel.permissions_for.return_value = discord.Permissions(**permissions)
with self.assertRaises(commands.MissingPermissions) as cm:
await cmd.can_run(ctx)
self.assertCountEqual(permissions.keys(), cm.exception.missing_permissions)
+
+
+class RedisTestCase(unittest.IsolatedAsyncioTestCase):
+ """
+ Use this as a base class for any test cases that require a redis session.
+
+ This will prepare a fresh redis instance for each test function, and will
+ not make any assertions on its own. Tests can mutate the instance as they wish.
+ """
+
+ session = None
+
+ async def flush(self):
+ """Flush everything from the redis database to prevent carry-overs between tests."""
+ await self.session.client.flushall()
+
+ async def asyncSetUp(self):
+ self.session = await RedisSession(use_fakeredis=True).connect()
+ await self.flush()
+
+ async def asyncTearDown(self):
+ if self.session:
+ await self.session.client.close()
diff --git a/tests/bot/exts/backend/sync/test_base.py b/tests/bot/exts/backend/sync/test_base.py
index 9dc46005b..a17c1fa10 100644
--- a/tests/bot/exts/backend/sync/test_base.py
+++ b/tests/bot/exts/backend/sync/test_base.py
@@ -1,7 +1,8 @@
import unittest
from unittest import mock
-from bot.api import ResponseCodeError
+from botcore.site_api import ResponseCodeError
+
from bot.exts.backend.sync._syncers import Syncer
from tests import helpers
diff --git a/tests/bot/exts/backend/sync/test_cog.py b/tests/bot/exts/backend/sync/test_cog.py
index 4ed7de64d..87b76c6b4 100644
--- a/tests/bot/exts/backend/sync/test_cog.py
+++ b/tests/bot/exts/backend/sync/test_cog.py
@@ -1,10 +1,10 @@
import unittest
from unittest import mock
-import disnake
+import discord
+from botcore.site_api import ResponseCodeError
from bot import constants
-from bot.api import ResponseCodeError
from bot.exts.backend import sync
from bot.exts.backend.sync._cog import Sync
from bot.exts.backend.sync._syncers import Syncer
@@ -16,11 +16,11 @@ class SyncExtensionTests(unittest.IsolatedAsyncioTestCase):
"""Tests for the sync extension."""
@staticmethod
- def test_extension_setup():
+ async def test_extension_setup():
"""The Sync cog should be added."""
bot = helpers.MockBot()
- sync.setup(bot)
- bot.add_cog.assert_called_once()
+ await sync.setup(bot)
+ bot.add_cog.assert_awaited_once()
class SyncCogTestCase(unittest.IsolatedAsyncioTestCase):
@@ -60,22 +60,18 @@ class SyncCogTestCase(unittest.IsolatedAsyncioTestCase):
class SyncCogTests(SyncCogTestCase):
"""Tests for the Sync cog."""
- @mock.patch("bot.utils.scheduling.create_task")
- @mock.patch.object(Sync, "sync_guild", new_callable=mock.MagicMock)
- def test_sync_cog_init(self, sync_guild, create_task):
- """Should instantiate syncers and run a sync for the guild."""
- # Reset because a Sync cog was already instantiated in setUp.
+ async def test_sync_cog_sync_on_load(self):
+ """Roles and users should be synced on cog load."""
+ guild = helpers.MockGuild()
+ self.bot.get_guild = mock.MagicMock(return_value=guild)
+
self.RoleSyncer.reset_mock()
self.UserSyncer.reset_mock()
- mock_sync_guild_coro = mock.MagicMock()
- sync_guild.return_value = mock_sync_guild_coro
-
- Sync(self.bot)
+ await self.cog.cog_load()
- sync_guild.assert_called_once_with()
- create_task.assert_called_once()
- self.assertEqual(create_task.call_args.args[0], mock_sync_guild_coro)
+ self.RoleSyncer.sync.assert_called_once_with(guild)
+ self.UserSyncer.sync.assert_called_once_with(guild)
async def test_sync_cog_sync_guild(self):
"""Roles and users should be synced only if a guild is successfully retrieved."""
@@ -87,7 +83,7 @@ class SyncCogTests(SyncCogTestCase):
self.bot.get_guild = mock.MagicMock(return_value=guild)
- await self.cog.sync_guild()
+ await self.cog.cog_load()
self.bot.wait_until_guild_available.assert_called_once()
self.bot.get_guild.assert_called_once_with(constants.Guild.id)
@@ -257,9 +253,9 @@ class SyncCogListenerTests(SyncCogTestCase):
self.assertTrue(self.cog.on_member_update.__cog_listener__)
subtests = (
- ("activities", disnake.Game("Pong"), disnake.Game("Frogger")),
+ ("activities", discord.Game("Pong"), discord.Game("Frogger")),
("nick", "old nick", "new nick"),
- ("status", disnake.Status.online, disnake.Status.offline),
+ ("status", discord.Status.online, discord.Status.offline),
)
for attribute, old_value, new_value in subtests:
diff --git a/tests/bot/exts/backend/sync/test_roles.py b/tests/bot/exts/backend/sync/test_roles.py
index 9ecb8fae0..541074336 100644
--- a/tests/bot/exts/backend/sync/test_roles.py
+++ b/tests/bot/exts/backend/sync/test_roles.py
@@ -1,7 +1,7 @@
import unittest
from unittest import mock
-import disnake
+import discord
from bot.exts.backend.sync._syncers import RoleSyncer, _Diff, _Role
from tests import helpers
@@ -34,8 +34,8 @@ class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase):
for role in roles:
mock_role = helpers.MockRole(**role)
- mock_role.colour = disnake.Colour(role["colour"])
- mock_role.permissions = disnake.Permissions(role["permissions"])
+ mock_role.colour = discord.Colour(role["colour"])
+ mock_role.permissions = discord.Permissions(role["permissions"])
guild.roles.append(mock_role)
return guild
diff --git a/tests/bot/exts/backend/sync/test_users.py b/tests/bot/exts/backend/sync/test_users.py
index f55f5360f..2fc97af2d 100644
--- a/tests/bot/exts/backend/sync/test_users.py
+++ b/tests/bot/exts/backend/sync/test_users.py
@@ -1,7 +1,7 @@
import unittest
from unittest import mock
-from disnake.errors import NotFound
+from discord.errors import NotFound
from bot.exts.backend.sync._syncers import UserSyncer, _Diff
from tests import helpers
diff --git a/tests/bot/exts/backend/test_error_handler.py b/tests/bot/exts/backend/test_error_handler.py
index 83b5f2749..7562f6aa8 100644
--- a/tests/bot/exts/backend/test_error_handler.py
+++ b/tests/bot/exts/backend/test_error_handler.py
@@ -1,11 +1,11 @@
import unittest
from unittest.mock import AsyncMock, MagicMock, call, patch
-from disnake.ext.commands import errors
+from botcore.site_api import ResponseCodeError
+from discord.ext.commands import errors
-from bot.api import ResponseCodeError
from bot.errors import InvalidInfractedUserError, LockedResourceError
-from bot.exts.backend.error_handler import ErrorHandler, setup
+from bot.exts.backend import error_handler
from bot.exts.info.tags import Tags
from bot.exts.moderation.silence import Silence
from bot.utils.checks import InWhitelistCheckFailure
@@ -18,14 +18,14 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
def setUp(self):
self.bot = MockBot()
self.ctx = MockContext(bot=self.bot)
+ self.cog = error_handler.ErrorHandler(self.bot)
async def test_error_handler_already_handled(self):
"""Should not do anything when error is already handled by local error handler."""
self.ctx.reset_mock()
- cog = ErrorHandler(self.bot)
error = errors.CommandError()
error.handled = "foo"
- self.assertIsNone(await cog.on_command_error(self.ctx, error))
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, error))
self.ctx.send.assert_not_awaited()
async def test_error_handler_command_not_found_error_not_invoked_by_handler(self):
@@ -45,27 +45,27 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
"called_try_get_tag": True
}
)
- cog = ErrorHandler(self.bot)
- cog.try_silence = AsyncMock()
- cog.try_get_tag = AsyncMock()
+ self.cog.try_silence = AsyncMock()
+ self.cog.try_get_tag = AsyncMock()
+ self.cog.try_run_eval = AsyncMock(return_value=False)
for case in test_cases:
with self.subTest(try_silence_return=case["try_silence_return"], try_get_tag=case["called_try_get_tag"]):
self.ctx.reset_mock()
- cog.try_silence.reset_mock(return_value=True)
- cog.try_get_tag.reset_mock()
+ self.cog.try_silence.reset_mock(return_value=True)
+ self.cog.try_get_tag.reset_mock()
- cog.try_silence.return_value = case["try_silence_return"]
+ self.cog.try_silence.return_value = case["try_silence_return"]
self.ctx.channel.id = 1234
- self.assertIsNone(await cog.on_command_error(self.ctx, error))
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, error))
if case["try_silence_return"]:
- cog.try_get_tag.assert_not_awaited()
- cog.try_silence.assert_awaited_once()
+ self.cog.try_get_tag.assert_not_awaited()
+ self.cog.try_silence.assert_awaited_once()
else:
- cog.try_silence.assert_awaited_once()
- cog.try_get_tag.assert_awaited_once()
+ self.cog.try_silence.assert_awaited_once()
+ self.cog.try_get_tag.assert_awaited_once()
self.ctx.send.assert_not_awaited()
@@ -73,57 +73,54 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
"""Should do nothing when error is `CommandNotFound` and have attribute `invoked_from_error_handler`."""
ctx = MockContext(bot=self.bot, invoked_from_error_handler=True)
- cog = ErrorHandler(self.bot)
- cog.try_silence = AsyncMock()
- cog.try_get_tag = AsyncMock()
+ self.cog.try_silence = AsyncMock()
+ self.cog.try_get_tag = AsyncMock()
+ self.cog.try_run_eval = AsyncMock()
error = errors.CommandNotFound()
- self.assertIsNone(await cog.on_command_error(ctx, error))
+ self.assertIsNone(await self.cog.on_command_error(ctx, error))
- cog.try_silence.assert_not_awaited()
- cog.try_get_tag.assert_not_awaited()
+ self.cog.try_silence.assert_not_awaited()
+ self.cog.try_get_tag.assert_not_awaited()
+ self.cog.try_run_eval.assert_not_awaited()
self.ctx.send.assert_not_awaited()
async def test_error_handler_user_input_error(self):
"""Should await `ErrorHandler.handle_user_input_error` when error is `UserInputError`."""
self.ctx.reset_mock()
- cog = ErrorHandler(self.bot)
- cog.handle_user_input_error = AsyncMock()
+ self.cog.handle_user_input_error = AsyncMock()
error = errors.UserInputError()
- self.assertIsNone(await cog.on_command_error(self.ctx, error))
- cog.handle_user_input_error.assert_awaited_once_with(self.ctx, error)
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, error))
+ self.cog.handle_user_input_error.assert_awaited_once_with(self.ctx, error)
async def test_error_handler_check_failure(self):
"""Should await `ErrorHandler.handle_check_failure` when error is `CheckFailure`."""
self.ctx.reset_mock()
- cog = ErrorHandler(self.bot)
- cog.handle_check_failure = AsyncMock()
+ self.cog.handle_check_failure = AsyncMock()
error = errors.CheckFailure()
- self.assertIsNone(await cog.on_command_error(self.ctx, error))
- cog.handle_check_failure.assert_awaited_once_with(self.ctx, error)
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, error))
+ self.cog.handle_check_failure.assert_awaited_once_with(self.ctx, error)
async def test_error_handler_command_on_cooldown(self):
"""Should send error with `ctx.send` when error is `CommandOnCooldown`."""
self.ctx.reset_mock()
- cog = ErrorHandler(self.bot)
error = errors.CommandOnCooldown(10, 9, type=None)
- self.assertIsNone(await cog.on_command_error(self.ctx, error))
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, error))
self.ctx.send.assert_awaited_once_with(error)
async def test_error_handler_command_invoke_error(self):
"""Should call `handle_api_error` or `handle_unexpected_error` depending on original error."""
- cog = ErrorHandler(self.bot)
- cog.handle_api_error = AsyncMock()
- cog.handle_unexpected_error = AsyncMock()
+ self.cog.handle_api_error = AsyncMock()
+ self.cog.handle_unexpected_error = AsyncMock()
test_cases = (
{
"args": (self.ctx, errors.CommandInvokeError(ResponseCodeError(AsyncMock()))),
- "expect_mock_call": cog.handle_api_error
+ "expect_mock_call": self.cog.handle_api_error
},
{
"args": (self.ctx, errors.CommandInvokeError(TypeError)),
- "expect_mock_call": cog.handle_unexpected_error
+ "expect_mock_call": self.cog.handle_unexpected_error
},
{
"args": (self.ctx, errors.CommandInvokeError(LockedResourceError("abc", "test"))),
@@ -138,7 +135,7 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
for case in test_cases:
with self.subTest(args=case["args"], expect_mock_call=case["expect_mock_call"]):
self.ctx.send.reset_mock()
- self.assertIsNone(await cog.on_command_error(*case["args"]))
+ self.assertIsNone(await self.cog.on_command_error(*case["args"]))
if case["expect_mock_call"] == "send":
self.ctx.send.assert_awaited_once()
else:
@@ -148,29 +145,27 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
async def test_error_handler_conversion_error(self):
"""Should call `handle_api_error` or `handle_unexpected_error` depending on original error."""
- cog = ErrorHandler(self.bot)
- cog.handle_api_error = AsyncMock()
- cog.handle_unexpected_error = AsyncMock()
+ self.cog.handle_api_error = AsyncMock()
+ self.cog.handle_unexpected_error = AsyncMock()
cases = (
{
"error": errors.ConversionError(AsyncMock(), ResponseCodeError(AsyncMock())),
- "mock_function_to_call": cog.handle_api_error
+ "mock_function_to_call": self.cog.handle_api_error
},
{
"error": errors.ConversionError(AsyncMock(), TypeError),
- "mock_function_to_call": cog.handle_unexpected_error
+ "mock_function_to_call": self.cog.handle_unexpected_error
}
)
for case in cases:
with self.subTest(**case):
- self.assertIsNone(await cog.on_command_error(self.ctx, case["error"]))
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, case["error"]))
case["mock_function_to_call"].assert_awaited_once_with(self.ctx, case["error"].original)
async def test_error_handler_two_other_errors(self):
"""Should call `handle_unexpected_error` if error is `MaxConcurrencyReached` or `ExtensionError`."""
- cog = ErrorHandler(self.bot)
- cog.handle_unexpected_error = AsyncMock()
+ self.cog.handle_unexpected_error = AsyncMock()
errs = (
errors.MaxConcurrencyReached(1, MagicMock()),
errors.ExtensionError(name="foo")
@@ -178,16 +173,15 @@ class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
for err in errs:
with self.subTest(error=err):
- cog.handle_unexpected_error.reset_mock()
- self.assertIsNone(await cog.on_command_error(self.ctx, err))
- cog.handle_unexpected_error.assert_awaited_once_with(self.ctx, err)
+ self.cog.handle_unexpected_error.reset_mock()
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, err))
+ self.cog.handle_unexpected_error.assert_awaited_once_with(self.ctx, err)
@patch("bot.exts.backend.error_handler.log")
async def test_error_handler_other_errors(self, log_mock):
"""Should `log.debug` other errors."""
- cog = ErrorHandler(self.bot)
error = errors.DisabledCommand() # Use this just as a other error
- self.assertIsNone(await cog.on_command_error(self.ctx, error))
+ self.assertIsNone(await self.cog.on_command_error(self.ctx, error))
log_mock.debug.assert_called_once()
@@ -199,7 +193,7 @@ class TrySilenceTests(unittest.IsolatedAsyncioTestCase):
self.silence = Silence(self.bot)
self.bot.get_command.return_value = self.silence.silence
self.ctx = MockContext(bot=self.bot)
- self.cog = ErrorHandler(self.bot)
+ self.cog = error_handler.ErrorHandler(self.bot)
async def test_try_silence_context_invoked_from_error_handler(self):
"""Should set `Context.invoked_from_error_handler` to `True`."""
@@ -331,7 +325,7 @@ class TryGetTagTests(unittest.IsolatedAsyncioTestCase):
self.bot = MockBot()
self.ctx = MockContext()
self.tag = Tags(self.bot)
- self.cog = ErrorHandler(self.bot)
+ self.cog = error_handler.ErrorHandler(self.bot)
self.bot.get_command.return_value = self.tag.get_command
async def test_try_get_tag_get_command(self):
@@ -396,7 +390,7 @@ class IndividualErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
def setUp(self):
self.bot = MockBot()
self.ctx = MockContext(bot=self.bot)
- self.cog = ErrorHandler(self.bot)
+ self.cog = error_handler.ErrorHandler(self.bot)
async def test_handle_input_error_handler_errors(self):
"""Should handle each error probably."""
@@ -477,11 +471,11 @@ class IndividualErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
@patch("bot.exts.backend.error_handler.log")
async def test_handle_api_error(self, log_mock):
- """Should `ctx.send` on HTTP error codes, `log.debug|warning` depends on code."""
+ """Should `ctx.send` on HTTP error codes, and log at correct level."""
test_cases = (
{
"error": ResponseCodeError(AsyncMock(status=400)),
- "log_level": "debug"
+ "log_level": "error"
},
{
"error": ResponseCodeError(AsyncMock(status=404)),
@@ -505,6 +499,8 @@ class IndividualErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
self.ctx.send.assert_awaited_once()
if case["log_level"] == "warning":
log_mock.warning.assert_called_once()
+ elif case["log_level"] == "error":
+ log_mock.error.assert_called_once()
else:
log_mock.debug.assert_called_once()
@@ -544,11 +540,11 @@ class IndividualErrorHandlerTests(unittest.IsolatedAsyncioTestCase):
push_scope_mock.set_extra.has_calls(set_extra_calls)
-class ErrorHandlerSetupTests(unittest.TestCase):
+class ErrorHandlerSetupTests(unittest.IsolatedAsyncioTestCase):
"""Tests for `ErrorHandler` `setup` function."""
- def test_setup(self):
+ async def test_setup(self):
"""Should call `bot.add_cog` with `ErrorHandler`."""
bot = MockBot()
- setup(bot)
- bot.add_cog.assert_called_once()
+ await error_handler.setup(bot)
+ bot.add_cog.assert_awaited_once()
diff --git a/tests/bot/exts/events/test_code_jams.py b/tests/bot/exts/events/test_code_jams.py
index fdff36b61..684f7abcd 100644
--- a/tests/bot/exts/events/test_code_jams.py
+++ b/tests/bot/exts/events/test_code_jams.py
@@ -1,8 +1,8 @@
import unittest
from unittest.mock import AsyncMock, MagicMock, create_autospec, patch
-from disnake import CategoryChannel
-from disnake.ext.commands import BadArgument
+from discord import CategoryChannel
+from discord.ext.commands import BadArgument
from bot.constants import Roles
from bot.exts.events import code_jams
@@ -160,11 +160,11 @@ class JamCodejamCreateTests(unittest.IsolatedAsyncioTestCase):
member.add_roles.assert_not_awaited()
-class CodeJamSetup(unittest.TestCase):
+class CodeJamSetup(unittest.IsolatedAsyncioTestCase):
"""Test for `setup` function of `CodeJam` cog."""
- def test_setup(self):
+ async def test_setup(self):
"""Should call `bot.add_cog`."""
bot = MockBot()
- code_jams.setup(bot)
- bot.add_cog.assert_called_once()
+ await code_jams.setup(bot)
+ bot.add_cog.assert_awaited_once()
diff --git a/tests/bot/exts/filters/test_antimalware.py b/tests/bot/exts/filters/test_antimalware.py
index 0cab405d0..7282334e2 100644
--- a/tests/bot/exts/filters/test_antimalware.py
+++ b/tests/bot/exts/filters/test_antimalware.py
@@ -1,7 +1,7 @@
import unittest
from unittest.mock import AsyncMock, Mock
-from disnake import NotFound
+from discord import NotFound
from bot.constants import Channels, STAFF_ROLES
from bot.exts.filters import antimalware
@@ -192,11 +192,11 @@ class AntiMalwareCogTests(unittest.IsolatedAsyncioTestCase):
self.assertCountEqual(disallowed_extensions, expected_disallowed_extensions)
-class AntiMalwareSetupTests(unittest.TestCase):
+class AntiMalwareSetupTests(unittest.IsolatedAsyncioTestCase):
"""Tests setup of the `AntiMalware` cog."""
- def test_setup(self):
+ async def test_setup(self):
"""Setup of the extension should call add_cog."""
bot = MockBot()
- antimalware.setup(bot)
- bot.add_cog.assert_called_once()
+ await antimalware.setup(bot)
+ bot.add_cog.assert_awaited_once()
diff --git a/tests/bot/exts/filters/test_filtering.py b/tests/bot/exts/filters/test_filtering.py
index 8ae59c1f1..bd26532f1 100644
--- a/tests/bot/exts/filters/test_filtering.py
+++ b/tests/bot/exts/filters/test_filtering.py
@@ -11,7 +11,7 @@ class FilteringCogTests(unittest.IsolatedAsyncioTestCase):
def setUp(self):
"""Instantiate the bot and cog."""
self.bot = MockBot()
- with patch("bot.utils.scheduling.create_task", new=lambda task, **_: task.close()):
+ with patch("botcore.utils.scheduling.create_task", new=lambda task, **_: task.close()):
self.cog = filtering.Filtering(self.bot)
@autospec(filtering.Filtering, "_get_filterlist_items", pass_mocks=False, return_value=["TOKEN"])
diff --git a/tests/bot/exts/filters/test_security.py b/tests/bot/exts/filters/test_security.py
index 46fa82fd7..007b7b1eb 100644
--- a/tests/bot/exts/filters/test_security.py
+++ b/tests/bot/exts/filters/test_security.py
@@ -1,7 +1,6 @@
import unittest
-from unittest.mock import MagicMock
-from disnake.ext.commands import NoPrivateMessage
+from discord.ext.commands import NoPrivateMessage
from bot.exts.filters import security
from tests.helpers import MockBot, MockContext
@@ -44,11 +43,11 @@ class SecurityCogTests(unittest.TestCase):
self.assertTrue(self.cog.check_on_guild(self.ctx))
-class SecurityCogLoadTests(unittest.TestCase):
+class SecurityCogLoadTests(unittest.IsolatedAsyncioTestCase):
"""Tests loading the `Security` cog."""
- def test_security_cog_load(self):
+ async def test_security_cog_load(self):
"""Setup of the extension should call add_cog."""
- bot = MagicMock()
- security.setup(bot)
- bot.add_cog.assert_called_once()
+ bot = MockBot()
+ await security.setup(bot)
+ bot.add_cog.assert_awaited_once()
diff --git a/tests/bot/exts/filters/test_token_remover.py b/tests/bot/exts/filters/test_token_remover.py
index dd56c10dd..c1f3762ac 100644
--- a/tests/bot/exts/filters/test_token_remover.py
+++ b/tests/bot/exts/filters/test_token_remover.py
@@ -3,7 +3,7 @@ from re import Match
from unittest import mock
from unittest.mock import MagicMock
-from disnake import Colour, NotFound
+from discord import Colour, NotFound
from bot import constants
from bot.exts.filters import token_remover
@@ -395,15 +395,15 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase):
self.msg.channel.send.assert_not_awaited()
-class TokenRemoverExtensionTests(unittest.TestCase):
+class TokenRemoverExtensionTests(unittest.IsolatedAsyncioTestCase):
"""Tests for the token_remover extension."""
@autospec("bot.exts.filters.token_remover", "TokenRemover")
- def test_extension_setup(self, cog):
+ async def test_extension_setup(self, cog):
"""The TokenRemover cog should be added."""
bot = MockBot()
- token_remover.setup(bot)
+ await token_remover.setup(bot)
cog.assert_called_once_with(bot)
- bot.add_cog.assert_called_once()
+ bot.add_cog.assert_awaited_once()
self.assertTrue(isinstance(bot.add_cog.call_args.args[0], TokenRemover))
diff --git a/tests/bot/exts/info/test_help.py b/tests/bot/exts/info/test_help.py
index 604c69671..2644ae40d 100644
--- a/tests/bot/exts/info/test_help.py
+++ b/tests/bot/exts/info/test_help.py
@@ -12,7 +12,6 @@ class HelpCogTests(unittest.IsolatedAsyncioTestCase):
self.bot = MockBot()
self.cog = help.Help(self.bot)
self.ctx = MockContext(bot=self.bot)
- self.bot.help_command.context = self.ctx
@autospec(help.CustomHelpCommand, "get_all_help_choices", return_value={"help"}, pass_mocks=False)
async def test_help_fuzzy_matching(self):
diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py
index 9a35de7a9..d896b7652 100644
--- a/tests/bot/exts/info/test_information.py
+++ b/tests/bot/exts/info/test_information.py
@@ -3,7 +3,7 @@ import unittest
import unittest.mock
from datetime import datetime
-import disnake
+import discord
from bot import constants
from bot.exts.info import information
@@ -43,7 +43,7 @@ class InformationCogTests(unittest.IsolatedAsyncioTestCase):
embed = kwargs.pop('embed')
self.assertEqual(embed.title, "Role information (Total 1 role)")
- self.assertEqual(embed.colour, disnake.Colour.og_blurple())
+ self.assertEqual(embed.colour, discord.Colour.og_blurple())
self.assertEqual(embed.description, f"\n`{self.moderator_role.id}` - {self.moderator_role.mention}\n")
async def test_role_info_command(self):
@@ -51,19 +51,19 @@ class InformationCogTests(unittest.IsolatedAsyncioTestCase):
dummy_role = helpers.MockRole(
name="Dummy",
id=112233445566778899,
- colour=disnake.Colour.og_blurple(),
+ colour=discord.Colour.og_blurple(),
position=10,
members=[self.ctx.author],
- permissions=disnake.Permissions(0)
+ permissions=discord.Permissions(0)
)
admin_role = helpers.MockRole(
name="Admins",
id=998877665544332211,
- colour=disnake.Colour.red(),
+ colour=discord.Colour.red(),
position=3,
members=[self.ctx.author],
- permissions=disnake.Permissions(0),
+ permissions=discord.Permissions(0),
)
self.ctx.guild.roles.extend([dummy_role, admin_role])
@@ -81,7 +81,7 @@ class InformationCogTests(unittest.IsolatedAsyncioTestCase):
admin_embed = admin_kwargs["embed"]
self.assertEqual(dummy_embed.title, "Dummy info")
- self.assertEqual(dummy_embed.colour, disnake.Colour.og_blurple())
+ self.assertEqual(dummy_embed.colour, discord.Colour.og_blurple())
self.assertEqual(dummy_embed.fields[0].value, str(dummy_role.id))
self.assertEqual(dummy_embed.fields[1].value, f"#{dummy_role.colour.value:0>6x}")
@@ -91,7 +91,7 @@ class InformationCogTests(unittest.IsolatedAsyncioTestCase):
self.assertEqual(dummy_embed.fields[5].value, "0")
self.assertEqual(admin_embed.title, "Admins info")
- self.assertEqual(admin_embed.colour, disnake.Colour.red())
+ self.assertEqual(admin_embed.colour, discord.Colour.red())
class UserInfractionHelperMethodTests(unittest.IsolatedAsyncioTestCase):
@@ -449,7 +449,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
user.created_at = user.joined_at = datetime.utcnow()
embed = await self.cog.create_user_embed(ctx, user, False)
- self.assertEqual(embed.colour, disnake.Colour(100))
+ self.assertEqual(embed.colour, discord.Colour(100))
@unittest.mock.patch(
f"{COG_PATH}.basic_user_infraction_counts",
@@ -463,11 +463,11 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
"""The embed should be created with the og blurple colour if the user has no assigned roles."""
ctx = helpers.MockContext()
- user = helpers.MockMember(id=217, colour=disnake.Colour.default())
+ user = helpers.MockMember(id=217, colour=discord.Colour.default())
user.created_at = user.joined_at = datetime.utcnow()
embed = await self.cog.create_user_embed(ctx, user, False)
- self.assertEqual(embed.colour, disnake.Colour.og_blurple())
+ self.assertEqual(embed.colour, discord.Colour.og_blurple())
@unittest.mock.patch(
f"{COG_PATH}.basic_user_infraction_counts",
diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py
index b85d086c9..052048053 100644
--- a/tests/bot/exts/moderation/infraction/test_infractions.py
+++ b/tests/bot/exts/moderation/infraction/test_infractions.py
@@ -3,7 +3,7 @@ import textwrap
import unittest
from unittest.mock import ANY, AsyncMock, DEFAULT, MagicMock, Mock, patch
-from disnake.errors import NotFound
+from discord.errors import NotFound
from bot.constants import Event
from bot.exts.moderation.clean import Clean
diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py
index eaa0e701e..5cf02033d 100644
--- a/tests/bot/exts/moderation/infraction/test_utils.py
+++ b/tests/bot/exts/moderation/infraction/test_utils.py
@@ -3,9 +3,9 @@ from collections import namedtuple
from datetime import datetime
from unittest.mock import AsyncMock, MagicMock, call, patch
-from disnake import Embed, Forbidden, HTTPException, NotFound
+from botcore.site_api import ResponseCodeError
+from discord import Embed, Forbidden, HTTPException, NotFound
-from bot.api import ResponseCodeError
from bot.constants import Colours, Icons
from bot.exts.moderation.infraction import _utils as utils
from tests.helpers import MockBot, MockContext, MockMember, MockUser
diff --git a/tests/bot/exts/moderation/test_incidents.py b/tests/bot/exts/moderation/test_incidents.py
index 725455bbe..53d98360c 100644
--- a/tests/bot/exts/moderation/test_incidents.py
+++ b/tests/bot/exts/moderation/test_incidents.py
@@ -1,4 +1,5 @@
import asyncio
+import datetime
import enum
import logging
import typing as t
@@ -7,24 +8,27 @@ from unittest import mock
from unittest.mock import AsyncMock, MagicMock, Mock, call, patch
import aiohttp
-import disnake
-from async_rediscache import RedisSession
+import discord
from bot.constants import Colours
from bot.exts.moderation import incidents
from bot.utils.messages import format_user
+from bot.utils.time import TimestampFormats, discord_timestamp
+from tests.base import RedisTestCase
from tests.helpers import (
MockAsyncWebhook, MockAttachment, MockBot, MockMember, MockMessage, MockReaction, MockRole, MockTextChannel,
MockUser
)
+CURRENT_TIME = datetime.datetime(2022, 1, 1, tzinfo=datetime.timezone.utc)
+
class MockAsyncIterable:
"""
Helper for mocking asynchronous for loops.
It does not appear that the `unittest` library currently provides anything that would
- allow us to simply mock an async iterator, such as `disnake.TextChannel.history`.
+ allow us to simply mock an async iterator, such as `discord.TextChannel.history`.
We therefore write our own helper to wrap a regular synchronous iterable, and feed
its values via `__anext__` rather than `__next__`.
@@ -60,7 +64,7 @@ class MockSignal(enum.Enum):
B = "B"
-mock_404 = disnake.NotFound(
+mock_404 = discord.NotFound(
response=MagicMock(aiohttp.ClientResponse), # Mock the erroneous response
message="Not found",
)
@@ -70,8 +74,8 @@ class TestDownloadFile(unittest.IsolatedAsyncioTestCase):
"""Collection of tests for the `download_file` helper function."""
async def test_download_file_success(self):
- """If `to_file` succeeds, function returns the acquired `disnake.File`."""
- file = MagicMock(disnake.File, filename="bigbadlemon.jpg")
+ """If `to_file` succeeds, function returns the acquired `discord.File`."""
+ file = MagicMock(discord.File, filename="bigbadlemon.jpg")
attachment = MockAttachment(to_file=AsyncMock(return_value=file))
acquired_file = await incidents.download_file(attachment)
@@ -86,7 +90,7 @@ class TestDownloadFile(unittest.IsolatedAsyncioTestCase):
async def test_download_file_fail(self):
"""If `to_file` fails on a non-404 error, function logs the exception & returns None."""
- arbitrary_error = disnake.HTTPException(MagicMock(aiohttp.ClientResponse), "Arbitrary API error")
+ arbitrary_error = discord.HTTPException(MagicMock(aiohttp.ClientResponse), "Arbitrary API error")
attachment = MockAttachment(to_file=AsyncMock(side_effect=arbitrary_error))
with self.assertLogs(logger=incidents.log, level=logging.ERROR):
@@ -100,30 +104,45 @@ class TestMakeEmbed(unittest.IsolatedAsyncioTestCase):
async def test_make_embed_actioned(self):
"""Embed is coloured green and footer contains 'Actioned' when `outcome=Signal.ACTIONED`."""
- embed, file = await incidents.make_embed(MockMessage(), incidents.Signal.ACTIONED, MockMember())
+ embed, file = await incidents.make_embed(
+ incident=MockMessage(created_at=CURRENT_TIME),
+ outcome=incidents.Signal.ACTIONED,
+ actioned_by=MockMember()
+ )
self.assertEqual(embed.colour.value, Colours.soft_green)
self.assertIn("Actioned", embed.footer.text)
async def test_make_embed_not_actioned(self):
"""Embed is coloured red and footer contains 'Rejected' when `outcome=Signal.NOT_ACTIONED`."""
- embed, file = await incidents.make_embed(MockMessage(), incidents.Signal.NOT_ACTIONED, MockMember())
+ embed, file = await incidents.make_embed(
+ incident=MockMessage(created_at=CURRENT_TIME),
+ outcome=incidents.Signal.NOT_ACTIONED,
+ actioned_by=MockMember()
+ )
self.assertEqual(embed.colour.value, Colours.soft_red)
self.assertIn("Rejected", embed.footer.text)
async def test_make_embed_content(self):
"""Incident content appears as embed description."""
- incident = MockMessage(content="this is an incident")
+ incident = MockMessage(content="this is an incident", created_at=CURRENT_TIME)
+
+ reported_timestamp = discord_timestamp(CURRENT_TIME)
+ relative_timestamp = discord_timestamp(CURRENT_TIME, TimestampFormats.RELATIVE)
+
embed, file = await incidents.make_embed(incident, incidents.Signal.ACTIONED, MockMember())
- self.assertEqual(incident.content, embed.description)
+ self.assertEqual(
+ f"{incident.content}\n\n*Reported {reported_timestamp} ({relative_timestamp}).*",
+ embed.description
+ )
async def test_make_embed_with_attachment_succeeds(self):
"""Incident's attachment is downloaded and displayed in the embed's image field."""
- file = MagicMock(disnake.File, filename="bigbadjoe.jpg")
+ file = MagicMock(discord.File, filename="bigbadjoe.jpg")
attachment = MockAttachment(filename="bigbadjoe.jpg")
- incident = MockMessage(content="this is an incident", attachments=[attachment])
+ incident = MockMessage(content="this is an incident", attachments=[attachment], created_at=CURRENT_TIME)
# Patch `download_file` to return our `file`
with patch("bot.exts.moderation.incidents.download_file", AsyncMock(return_value=file)):
@@ -135,7 +154,7 @@ class TestMakeEmbed(unittest.IsolatedAsyncioTestCase):
async def test_make_embed_with_attachment_fails(self):
"""Incident's attachment fails to download, proxy url is linked instead."""
attachment = MockAttachment(proxy_url="discord.com/bigbadjoe.jpg")
- incident = MockMessage(content="this is an incident", attachments=[attachment])
+ incident = MockMessage(content="this is an incident", attachments=[attachment], created_at=CURRENT_TIME)
# Patch `download_file` to return None as if the download failed
with patch("bot.exts.moderation.incidents.download_file", AsyncMock(return_value=None)):
@@ -270,7 +289,7 @@ class TestAddSignals(unittest.IsolatedAsyncioTestCase):
self.incident.add_reaction.assert_not_called()
-class TestIncidents(unittest.IsolatedAsyncioTestCase):
+class TestIncidents(RedisTestCase):
"""
Tests for bound methods of the `Incidents` cog.
@@ -279,22 +298,6 @@ class TestIncidents(unittest.IsolatedAsyncioTestCase):
the instance as they wish.
"""
- session = None
-
- async def flush(self):
- """Flush everything from the database to prevent carry-overs between tests."""
- with await self.session.pool as connection:
- await connection.flushall()
-
- async def asyncSetUp(self): # noqa: N802
- self.session = RedisSession(use_fakeredis=True)
- await self.session.connect()
- await self.flush()
-
- async def asyncTearDown(self): # noqa: N802
- if self.session:
- await self.session.close()
-
def setUp(self):
"""
Prepare a fresh `Incidents` instance for each test.
@@ -365,7 +368,6 @@ class TestCrawlIncidents(TestIncidents):
class TestArchive(TestIncidents):
"""Tests for the `Incidents.archive` coroutine."""
-
async def test_archive_webhook_not_found(self):
"""
Method recovers and returns False when the webhook is not found.
@@ -375,7 +377,11 @@ class TestArchive(TestIncidents):
"""
self.cog_instance.bot.fetch_webhook = AsyncMock(side_effect=mock_404)
self.assertFalse(
- await self.cog_instance.archive(incident=MockMessage(), outcome=MagicMock(), actioned_by=MockMember())
+ await self.cog_instance.archive(
+ incident=MockMessage(created_at=CURRENT_TIME),
+ outcome=MagicMock(),
+ actioned_by=MockMember()
+ )
)
async def test_archive_relays_incident(self):
@@ -391,10 +397,10 @@ class TestArchive(TestIncidents):
# Define our own `incident` to be archived
incident = MockMessage(
content="this is an incident",
- author=MockUser(name="author_name", display_avatar=Mock(url="author_avatar")),
+ author=MockUser(display_name="author_name", display_avatar=Mock(url="author_avatar")),
id=123,
)
- built_embed = MagicMock(disnake.Embed, id=123) # We patch `make_embed` to return this
+ built_embed = MagicMock(discord.Embed, id=123) # We patch `make_embed` to return this
with patch("bot.exts.moderation.incidents.make_embed", AsyncMock(return_value=(built_embed, None))):
archive_return = await self.cog_instance.archive(incident, MagicMock(value="A"), MockMember())
@@ -422,7 +428,7 @@ class TestArchive(TestIncidents):
webhook = MockAsyncWebhook()
self.cog_instance.bot.fetch_webhook = AsyncMock(return_value=webhook)
- message_from_clyde = MockMessage(author=MockUser(name="clyde the great"))
+ message_from_clyde = MockMessage(author=MockUser(display_name="clyde the great"), created_at=CURRENT_TIME)
await self.cog_instance.archive(message_from_clyde, MagicMock(incidents.Signal), MockMember())
self.assertNotIn("clyde", webhook.send.call_args.kwargs["username"])
@@ -521,12 +527,13 @@ class TestProcessEvent(TestIncidents):
async def test_process_event_confirmation_task_is_awaited(self):
"""Task given by `Incidents.make_confirmation_task` is awaited before method exits."""
mock_task = AsyncMock()
+ mock_member = MockMember(display_name="Bobby Johnson", roles=[MockRole(id=1)])
with patch("bot.exts.moderation.incidents.Incidents.make_confirmation_task", mock_task):
await self.cog_instance.process_event(
reaction=incidents.Signal.ACTIONED.value,
- incident=MockMessage(id=123),
- member=MockMember(roles=[MockRole(id=1)])
+ incident=MockMessage(author=mock_member, id=123, created_at=CURRENT_TIME),
+ member=mock_member
)
mock_task.assert_awaited()
@@ -545,7 +552,7 @@ class TestProcessEvent(TestIncidents):
with patch("bot.exts.moderation.incidents.Incidents.make_confirmation_task", mock_task):
await self.cog_instance.process_event(
reaction=incidents.Signal.ACTIONED.value,
- incident=MockMessage(id=123),
+ incident=MockMessage(id=123, created_at=CURRENT_TIME),
member=MockMember(roles=[MockRole(id=1)])
)
except asyncio.TimeoutError:
@@ -616,7 +623,7 @@ class TestResolveMessage(TestIncidents):
"""
self.cog_instance.bot._connection._get_message = MagicMock(return_value=None) # Cache returns None
- arbitrary_error = disnake.HTTPException(
+ arbitrary_error = discord.HTTPException(
response=MagicMock(aiohttp.ClientResponse),
message="Arbitrary error",
)
@@ -649,14 +656,14 @@ class TestOnRawReactionAdd(TestIncidents):
super().setUp() # Ensure `cog_instance` is assigned
self.payload = MagicMock(
- disnake.RawReactionActionEvent,
+ discord.RawReactionActionEvent,
channel_id=123, # Patched at class level
message_id=456,
member=MockMember(bot=False),
emoji="reaction",
)
- async def asyncSetUp(self): # noqa: N802
+ async def asyncSetUp(self):
"""
Prepare an empty task and assign it as `crawl_task`.
diff --git a/tests/bot/exts/moderation/test_modlog.py b/tests/bot/exts/moderation/test_modlog.py
index 6c9ebed95..79e04837d 100644
--- a/tests/bot/exts/moderation/test_modlog.py
+++ b/tests/bot/exts/moderation/test_modlog.py
@@ -1,6 +1,6 @@
import unittest
-import disnake
+import discord
from bot.exts.moderation.modlog import ModLog
from tests.helpers import MockBot, MockTextChannel
@@ -19,7 +19,7 @@ class ModLogTests(unittest.IsolatedAsyncioTestCase):
self.bot.get_channel.return_value = self.channel
await self.cog.send_log_message(
icon_url="foo",
- colour=disnake.Colour.blue(),
+ colour=discord.Colour.blue(),
title="bar",
text="foo bar" * 3000
)
diff --git a/tests/bot/exts/moderation/test_silence.py b/tests/bot/exts/moderation/test_silence.py
index 539651d6c..2622f46a7 100644
--- a/tests/bot/exts/moderation/test_silence.py
+++ b/tests/bot/exts/moderation/test_silence.py
@@ -1,4 +1,3 @@
-import asyncio
import itertools
import unittest
from datetime import datetime, timezone
@@ -6,31 +5,15 @@ from typing import List, Tuple
from unittest import mock
from unittest.mock import AsyncMock, Mock
-from async_rediscache import RedisSession
-from disnake import PermissionOverwrite
+from discord import PermissionOverwrite
from bot.constants import Channels, Guild, MODERATION_ROLES, Roles
from bot.exts.moderation import silence
+from tests.base import RedisTestCase
from tests.helpers import (
MockBot, MockContext, MockGuild, MockMember, MockRole, MockTextChannel, MockVoiceChannel, autospec
)
-redis_session = None
-redis_loop = asyncio.get_event_loop()
-
-
-def setUpModule(): # noqa: N802
- """Create and connect to the fakeredis session."""
- global redis_session
- redis_session = RedisSession(use_fakeredis=True)
- redis_loop.run_until_complete(redis_session.connect())
-
-
-def tearDownModule(): # noqa: N802
- """Close the fakeredis session."""
- if redis_session:
- redis_loop.run_until_complete(redis_session.close())
-
# Have to subclass it because builtins can't be patched.
class PatchedDatetime(datetime):
@@ -39,8 +22,24 @@ class PatchedDatetime(datetime):
now = mock.create_autospec(datetime, "now")
-class SilenceNotifierTests(unittest.IsolatedAsyncioTestCase):
+class SilenceTest(RedisTestCase):
+ """A base class for Silence tests that correctly sets up the cog and redis."""
+
+ @autospec(silence, "Scheduler", pass_mocks=False)
+ @autospec(silence.Silence, "_reschedule", pass_mocks=False)
def setUp(self) -> None:
+ self.bot = MockBot(get_channel=lambda _id: MockTextChannel(id=_id))
+ self.cog = silence.Silence(self.bot)
+
+ @autospec(silence, "SilenceNotifier", pass_mocks=False)
+ async def asyncSetUp(self) -> None:
+ await super().asyncSetUp()
+ await self.cog.cog_load() # Populate instance attributes.
+
+
+class SilenceNotifierTests(SilenceTest):
+ def setUp(self) -> None:
+ super().setUp()
self.alert_channel = MockTextChannel()
self.notifier = silence.SilenceNotifier(self.alert_channel)
self.notifier.stop = self.notifier_stop_mock = Mock()
@@ -105,54 +104,36 @@ class SilenceNotifierTests(unittest.IsolatedAsyncioTestCase):
@autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False)
-class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
+class SilenceCogTests(SilenceTest):
"""Tests for the general functionality of the Silence cog."""
- @autospec(silence, "Scheduler", pass_mocks=False)
- def setUp(self) -> None:
- self.bot = MockBot()
- self.cog = silence.Silence(self.bot)
-
@autospec(silence, "SilenceNotifier", pass_mocks=False)
- async def test_async_init_got_guild(self):
+ async def test_cog_load_got_guild(self):
"""Bot got guild after it became available."""
- await self.cog._async_init()
self.bot.wait_until_guild_available.assert_awaited_once()
self.bot.get_guild.assert_called_once_with(Guild.id)
@autospec(silence, "SilenceNotifier", pass_mocks=False)
- async def test_async_init_got_channels(self):
+ async def test_cog_load_got_channels(self):
"""Got channels from bot."""
- self.bot.get_channel.side_effect = lambda id_: MockTextChannel(id=id_)
-
- await self.cog._async_init()
+ await self.cog.cog_load()
self.assertEqual(self.cog._mod_alerts_channel.id, Channels.mod_alerts)
@autospec(silence, "SilenceNotifier")
- async def test_async_init_got_notifier(self, notifier):
+ async def test_cog_load_got_notifier(self, notifier):
"""Notifier was started with channel."""
- self.bot.get_channel.side_effect = lambda id_: MockTextChannel(id=id_)
-
- await self.cog._async_init()
+ await self.cog.cog_load()
notifier.assert_called_once_with(MockTextChannel(id=Channels.mod_log))
self.assertEqual(self.cog.notifier, notifier.return_value)
@autospec(silence, "SilenceNotifier", pass_mocks=False)
- async def test_async_init_rescheduled(self):
+ async def testcog_load_rescheduled(self):
"""`_reschedule_` coroutine was awaited."""
self.cog._reschedule = mock.create_autospec(self.cog._reschedule)
- await self.cog._async_init()
+ await self.cog.cog_load()
self.cog._reschedule.assert_awaited_once_with()
- def test_cog_unload_cancelled_tasks(self):
- """The init task was cancelled."""
- self.cog._init_task = asyncio.Future()
- self.cog.cog_unload()
-
- # It's too annoying to test cancel_all since it's a done callback and wrapped in a lambda.
- self.assertTrue(self.cog._init_task.cancelled())
-
- @autospec("disnake.ext.commands", "has_any_role")
+ @autospec("discord.ext.commands", "has_any_role")
@mock.patch.object(silence.constants, "MODERATION_ROLES", new=(1, 2, 3))
async def test_cog_check(self, role_check):
"""Role check was called with `MODERATION_ROLES`"""
@@ -165,7 +146,7 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
async def test_force_voice_sync(self):
"""Tests the _force_voice_sync helper function."""
- await self.cog._async_init()
+ await self.cog.cog_load()
# Create a regular member, and one member for each of the moderation roles
moderation_members = [MockMember(roles=[MockRole(id=role)]) for role in MODERATION_ROLES]
@@ -187,7 +168,7 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
async def test_force_voice_sync_no_channel(self):
"""Test to ensure _force_voice_sync can create its own voice channel if one is not available."""
- await self.cog._async_init()
+ await self.cog.cog_load()
channel = MockVoiceChannel(guild=MockGuild(afk_channel=None))
new_channel = MockVoiceChannel(delete=AsyncMock())
@@ -206,7 +187,7 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
async def test_voice_kick(self):
"""Test to ensure kick function can remove all members from a voice channel."""
- await self.cog._async_init()
+ await self.cog.cog_load()
# Create a regular member, and one member for each of the moderation roles
moderation_members = [MockMember(roles=[MockRole(id=role)]) for role in MODERATION_ROLES]
@@ -236,7 +217,7 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
async def test_kick_move_to_error(self):
"""Test to ensure move_to gets called on all members during kick, even if some fail."""
- await self.cog._async_init()
+ await self.cog.cog_load()
_, members = self.create_erroneous_members()
await self.cog._kick_voice_members(MockVoiceChannel(members=members))
@@ -245,7 +226,7 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
async def test_sync_move_to_error(self):
"""Test to ensure move_to gets called on all members during sync, even if some fail."""
- await self.cog._async_init()
+ await self.cog.cog_load()
failing_member, members = self.create_erroneous_members()
await self.cog._force_voice_sync(MockVoiceChannel(members=members))
@@ -253,15 +234,9 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
self.assertEqual(member.move_to.call_count, 1 if member == failing_member else 2)
-class SilenceArgumentParserTests(unittest.IsolatedAsyncioTestCase):
+class SilenceArgumentParserTests(SilenceTest):
"""Tests for the silence argument parser utility function."""
- def setUp(self):
- self.bot = MockBot()
- self.cog = silence.Silence(self.bot)
- self.cog._init_task = asyncio.Future()
- self.cog._init_task.set_result(None)
-
@autospec(silence.Silence, "send_message", pass_mocks=False)
@autospec(silence.Silence, "_set_silence_overwrites", return_value=False, pass_mocks=False)
@autospec(silence.Silence, "parse_silence_args")
@@ -329,17 +304,19 @@ class SilenceArgumentParserTests(unittest.IsolatedAsyncioTestCase):
@autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False)
-class RescheduleTests(unittest.IsolatedAsyncioTestCase):
+class RescheduleTests(RedisTestCase):
"""Tests for the rescheduling of cached unsilences."""
- @autospec(silence, "Scheduler", "SilenceNotifier", pass_mocks=False)
- def setUp(self):
+ @autospec(silence, "Scheduler", pass_mocks=False)
+ def setUp(self) -> None:
self.bot = MockBot()
self.cog = silence.Silence(self.bot)
self.cog._unsilence_wrapper = mock.create_autospec(self.cog._unsilence_wrapper)
- with mock.patch.object(self.cog, "_reschedule", autospec=True):
- asyncio.run(self.cog._async_init()) # Populate instance attributes.
+ @autospec(silence, "SilenceNotifier", pass_mocks=False)
+ async def asyncSetUp(self) -> None:
+ await super().asyncSetUp()
+ await self.cog.cog_load() # Populate instance attributes.
async def test_skipped_missing_channel(self):
"""Did nothing because the channel couldn't be retrieved."""
@@ -414,22 +391,14 @@ def voice_sync_helper(function):
@autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False)
-class SilenceTests(unittest.IsolatedAsyncioTestCase):
+class SilenceTests(SilenceTest):
"""Tests for the silence command and its related helper methods."""
- @autospec(silence.Silence, "_reschedule", pass_mocks=False)
- @autospec(silence, "Scheduler", "SilenceNotifier", pass_mocks=False)
def setUp(self) -> None:
- self.bot = MockBot(get_channel=lambda _: MockTextChannel())
- self.cog = silence.Silence(self.bot)
- self.cog._init_task = asyncio.Future()
- self.cog._init_task.set_result(None)
+ super().setUp()
# Avoid unawaited coroutine warnings.
self.cog.scheduler.schedule_later.side_effect = lambda delay, task_id, coro: coro.close()
-
- asyncio.run(self.cog._async_init()) # Populate instance attributes.
-
self.text_channel = MockTextChannel()
self.text_overwrite = PermissionOverwrite(
send_messages=True,
@@ -687,24 +656,13 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase):
@autospec(silence.Silence, "unsilence_timestamps", pass_mocks=False)
-class UnsilenceTests(unittest.IsolatedAsyncioTestCase):
+class UnsilenceTests(SilenceTest):
"""Tests for the unsilence command and its related helper methods."""
- @autospec(silence.Silence, "_reschedule", pass_mocks=False)
- @autospec(silence, "Scheduler", "SilenceNotifier", pass_mocks=False)
def setUp(self) -> None:
- self.bot = MockBot(get_channel=lambda _: MockTextChannel())
- self.cog = silence.Silence(self.bot)
- self.cog._init_task = asyncio.Future()
- self.cog._init_task.set_result(None)
-
- overwrites_cache = mock.create_autospec(self.cog.previous_overwrites, spec_set=True)
- self.cog.previous_overwrites = overwrites_cache
-
- asyncio.run(self.cog._async_init()) # Populate instance attributes.
+ super().setUp()
self.cog.scheduler.__contains__.return_value = True
- overwrites_cache.get.return_value = '{"send_messages": true, "add_reactions": false}'
self.text_channel = MockTextChannel()
self.text_overwrite = PermissionOverwrite(send_messages=False, add_reactions=False)
self.text_channel.overwrites_for.return_value = self.text_overwrite
@@ -713,6 +671,13 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase):
self.voice_overwrite = PermissionOverwrite(connect=True, speak=True)
self.voice_channel.overwrites_for.return_value = self.voice_overwrite
+ async def asyncSetUp(self) -> None:
+ await super().asyncSetUp()
+ overwrites_cache = mock.create_autospec(self.cog.previous_overwrites, spec_set=True)
+ self.cog.previous_overwrites = overwrites_cache
+
+ overwrites_cache.get.return_value = '{"send_messages": true, "add_reactions": false}'
+
async def test_sent_correct_message(self):
"""Appropriate failure/success message was sent by the command."""
unsilenced_overwrite = PermissionOverwrite(send_messages=True, add_reactions=True)
diff --git a/tests/bot/exts/test_cogs.py b/tests/bot/exts/test_cogs.py
index 5cb071d58..f8e120262 100644
--- a/tests/bot/exts/test_cogs.py
+++ b/tests/bot/exts/test_cogs.py
@@ -8,7 +8,7 @@ from collections import defaultdict
from types import ModuleType
from unittest import mock
-from disnake.ext import commands
+from discord.ext import commands
from bot import exts
@@ -34,7 +34,7 @@ class CommandNameTests(unittest.TestCase):
raise ImportError(name=name) # pragma: no cover
# The mock prevents asyncio.get_event_loop() from being called.
- with mock.patch("disnake.ext.tasks.loop"):
+ with mock.patch("discord.ext.tasks.loop"):
prefix = f"{exts.__name__}."
for module in pkgutil.walk_packages(exts.__path__, prefix, onerror=on_error):
if not module.ispkg:
diff --git a/tests/bot/exts/utils/test_snekbox.py b/tests/bot/exts/utils/test_snekbox.py
index bec7574fb..b1f32c210 100644
--- a/tests/bot/exts/utils/test_snekbox.py
+++ b/tests/bot/exts/utils/test_snekbox.py
@@ -2,13 +2,14 @@ import asyncio
import unittest
from unittest.mock import AsyncMock, MagicMock, Mock, call, create_autospec, patch
-from disnake import AllowedMentions
-from disnake.ext import commands
+from discord import AllowedMentions
+from discord.ext import commands
from bot import constants
+from bot.errors import LockedResourceError
from bot.exts.utils import snekbox
from bot.exts.utils.snekbox import Snekbox
-from tests.helpers import MockBot, MockContext, MockMessage, MockReaction, MockUser
+from tests.helpers import MockBot, MockContext, MockMember, MockMessage, MockReaction, MockUser
class SnekboxTests(unittest.IsolatedAsyncioTestCase):
@@ -17,7 +18,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
self.bot = MockBot()
self.cog = Snekbox(bot=self.bot)
- async def test_post_eval(self):
+ async def test_post_job(self):
"""Post the eval code to the URLs.snekbox_eval_api endpoint."""
resp = MagicMock()
resp.json = AsyncMock(return_value="return")
@@ -26,7 +27,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
context_manager.__aenter__.return_value = resp
self.bot.http_session.post.return_value = context_manager
- self.assertEqual(await self.cog.post_eval("import random"), "return")
+ self.assertEqual(await self.cog.post_job("import random", "3.10"), "return")
self.bot.http_session.post.assert_called_with(
constants.URLs.snekbox_eval_api,
json={"input": "import random"},
@@ -35,17 +36,18 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
resp.json.assert_awaited_once()
async def test_upload_output_reject_too_long(self):
- """Reject output longer than MAX_PASTE_LEN."""
- result = await self.cog.upload_output("-" * (snekbox.MAX_PASTE_LEN + 1))
+ """Reject output longer than MAX_PASTE_LENGTH."""
+ result = await self.cog.upload_output("-" * (snekbox.MAX_PASTE_LENGTH + 1))
self.assertEqual(result, "too long to upload")
@patch("bot.exts.utils.snekbox.send_to_paste_service")
async def test_upload_output(self, mock_paste_util):
"""Upload the eval output to the URLs.paste_service.format(key="documents") endpoint."""
await self.cog.upload_output("Test output.")
- mock_paste_util.assert_called_once_with("Test output.", extension="txt")
+ mock_paste_util.assert_called_once_with("Test output.", extension="txt", max_length=snekbox.MAX_PASTE_LENGTH)
- def test_prepare_input(self):
+ async def test_codeblock_converter(self):
+ ctx = MockContext()
cases = (
('print("Hello world!")', 'print("Hello world!")', 'non-formatted'),
('`print("Hello world!")`', 'print("Hello world!")', 'one line code block'),
@@ -61,33 +63,50 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
)
for case, expected, testname in cases:
with self.subTest(msg=f'Extract code from {testname}.'):
- self.assertEqual(self.cog.prepare_input(case), expected)
+ self.assertEqual(
+ '\n'.join(await snekbox.CodeblockConverter.convert(ctx, case)), expected
+ )
+
+ def test_prepare_timeit_input(self):
+ """Test the prepare_timeit_input codeblock detection."""
+ base_args = ('-m', 'timeit', '-s')
+ cases = (
+ (['print("Hello World")'], '', 'single block of code'),
+ (['x = 1', 'print(x)'], 'x = 1', 'two blocks of code'),
+ (['x = 1', 'print(x)', 'print("Some other code.")'], 'x = 1', 'three blocks of code')
+ )
+
+ for case, setup_code, testname in cases:
+ setup = snekbox.TIMEIT_SETUP_WRAPPER.format(setup=setup_code)
+ expected = ('\n'.join(case[1:] if setup_code else case), [*base_args, setup])
+ with self.subTest(msg=f'Test with {testname} and expected return {expected}'):
+ self.assertEqual(self.cog.prepare_timeit_input(case), expected)
def test_get_results_message(self):
"""Return error and message according to the eval result."""
cases = (
- ('ERROR', None, ('Your eval job has failed', 'ERROR')),
- ('', 128 + snekbox.SIGKILL, ('Your eval job timed out or ran out of memory', '')),
- ('', 255, ('Your eval job has failed', 'A fatal NsJail error occurred'))
+ ('ERROR', None, ('Your 3.11 eval job has failed', 'ERROR')),
+ ('', 128 + snekbox.SIGKILL, ('Your 3.11 eval job timed out or ran out of memory', '')),
+ ('', 255, ('Your 3.11 eval job has failed', 'A fatal NsJail error occurred'))
)
for stdout, returncode, expected in cases:
with self.subTest(stdout=stdout, returncode=returncode, expected=expected):
- actual = self.cog.get_results_message({'stdout': stdout, 'returncode': returncode})
+ actual = self.cog.get_results_message({'stdout': stdout, 'returncode': returncode}, 'eval', '3.11')
self.assertEqual(actual, expected)
@patch('bot.exts.utils.snekbox.Signals', side_effect=ValueError)
def test_get_results_message_invalid_signal(self, mock_signals: Mock):
self.assertEqual(
- self.cog.get_results_message({'stdout': '', 'returncode': 127}),
- ('Your eval job has completed with return code 127', '')
+ self.cog.get_results_message({'stdout': '', 'returncode': 127}, 'eval', '3.11'),
+ ('Your 3.11 eval job has completed with return code 127', '')
)
@patch('bot.exts.utils.snekbox.Signals')
def test_get_results_message_valid_signal(self, mock_signals: Mock):
mock_signals.return_value.name = 'SIGTEST'
self.assertEqual(
- self.cog.get_results_message({'stdout': '', 'returncode': 127}),
- ('Your eval job has completed with return code 127 (SIGTEST)', '')
+ self.cog.get_results_message({'stdout': '', 'returncode': 127}, 'eval', '3.11'),
+ ('Your 3.11 eval job has completed with return code 127 (SIGTEST)', '')
)
def test_get_status_emoji(self):
@@ -156,64 +175,64 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
"""Test the eval command procedure."""
ctx = MockContext()
response = MockMessage()
- self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode')
- self.cog.send_eval = AsyncMock(return_value=response)
- self.cog.continue_eval = AsyncMock(return_value=None)
+ ctx.command = MagicMock()
+
+ self.cog.send_job = AsyncMock(return_value=response)
+ self.cog.continue_job = AsyncMock(return_value=(None, None))
- await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode')
- self.cog.prepare_input.assert_called_once_with('MyAwesomeCode')
- self.cog.send_eval.assert_called_once_with(ctx, 'MyAwesomeFormattedCode')
- self.cog.continue_eval.assert_called_once_with(ctx, response)
+ await self.cog.eval_command(self.cog, ctx=ctx, python_version='3.11', code=['MyAwesomeCode'])
+ self.cog.send_job.assert_called_once_with(ctx, '3.11', 'MyAwesomeCode', args=None, job_name='eval')
+ self.cog.continue_job.assert_called_once_with(ctx, response, 'eval')
async def test_eval_command_evaluate_twice(self):
"""Test the eval and re-eval command procedure."""
ctx = MockContext()
response = MockMessage()
- self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode')
- self.cog.send_eval = AsyncMock(return_value=response)
- self.cog.continue_eval = AsyncMock()
- self.cog.continue_eval.side_effect = ('MyAwesomeCode-2', None)
-
- await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode')
- self.cog.prepare_input.has_calls(call('MyAwesomeCode'), call('MyAwesomeCode-2'))
- self.cog.send_eval.assert_called_with(ctx, 'MyAwesomeFormattedCode')
- self.cog.continue_eval.assert_called_with(ctx, response)
+ ctx.command = MagicMock()
+ self.cog.send_job = AsyncMock(return_value=response)
+ self.cog.continue_job = AsyncMock()
+ self.cog.continue_job.side_effect = (('MyAwesomeFormattedCode', None), (None, None))
+
+ await self.cog.eval_command(self.cog, ctx=ctx, python_version='3.11', code=['MyAwesomeCode'])
+ self.cog.send_job.assert_called_with(
+ ctx, '3.11', 'MyAwesomeFormattedCode', args=None, job_name='eval'
+ )
+ self.cog.continue_job.assert_called_with(ctx, response, 'eval')
async def test_eval_command_reject_two_eval_at_the_same_time(self):
"""Test if the eval command rejects an eval if the author already have a running eval."""
ctx = MockContext()
ctx.author.id = 42
- ctx.author.mention = '@LemonLemonishBeard#0042'
- ctx.send = AsyncMock()
- self.cog.jobs = (42,)
- await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode')
- ctx.send.assert_called_once_with(
- "@LemonLemonishBeard#0042 You've already got a job running - please wait for it to finish!"
- )
- async def test_eval_command_call_help(self):
- """Test if the eval command call the help command if no code is provided."""
- ctx = MockContext(command="sentinel")
- await self.cog.eval_command(self.cog, ctx=ctx, code='')
- ctx.send_help.assert_called_once_with(ctx.command)
+ async def delay_with_side_effect(*args, **kwargs) -> dict:
+ """Delay the post_job call to ensure the job runs long enough to conflict."""
+ await asyncio.sleep(1)
+ return {'stdout': '', 'returncode': 0}
+
+ self.cog.post_job = AsyncMock(side_effect=delay_with_side_effect)
+ with self.assertRaises(LockedResourceError):
+ await asyncio.gather(
+ self.cog.send_job(ctx, '3.11', 'MyAwesomeCode', job_name='eval'),
+ self.cog.send_job(ctx, '3.11', 'MyAwesomeCode', job_name='eval'),
+ )
- async def test_send_eval(self):
- """Test the send_eval function."""
+ async def test_send_job(self):
+ """Test the send_job function."""
ctx = MockContext()
ctx.message = MockMessage()
ctx.send = AsyncMock()
ctx.author = MockUser(mention='@LemonLemonishBeard#0042')
- self.cog.post_eval = AsyncMock(return_value={'stdout': '', 'returncode': 0})
+ self.cog.post_job = AsyncMock(return_value={'stdout': '', 'returncode': 0})
self.cog.get_results_message = MagicMock(return_value=('Return code 0', ''))
self.cog.get_status_emoji = MagicMock(return_value=':yay!:')
self.cog.format_output = AsyncMock(return_value=('[No output]', None))
mocked_filter_cog = MagicMock()
- mocked_filter_cog.filter_eval = AsyncMock(return_value=False)
+ mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False)
self.bot.get_cog.return_value = mocked_filter_cog
- await self.cog.send_eval(ctx, 'MyAwesomeCode')
+ await self.cog.send_job(ctx, '3.11', 'MyAwesomeCode', job_name='eval')
ctx.send.assert_called_once()
self.assertEqual(
@@ -224,28 +243,28 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
expected_allowed_mentions = AllowedMentions(everyone=False, roles=False, users=[ctx.author])
self.assertEqual(allowed_mentions.to_dict(), expected_allowed_mentions.to_dict())
- self.cog.post_eval.assert_called_once_with('MyAwesomeCode')
+ self.cog.post_job.assert_called_once_with('MyAwesomeCode', '3.11', args=None)
self.cog.get_status_emoji.assert_called_once_with({'stdout': '', 'returncode': 0})
- self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0})
+ self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0}, 'eval', '3.11')
self.cog.format_output.assert_called_once_with('')
- async def test_send_eval_with_paste_link(self):
- """Test the send_eval function with a too long output that generate a paste link."""
+ async def test_send_job_with_paste_link(self):
+ """Test the send_job function with a too long output that generate a paste link."""
ctx = MockContext()
ctx.message = MockMessage()
ctx.send = AsyncMock()
ctx.author.mention = '@LemonLemonishBeard#0042'
- self.cog.post_eval = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0})
+ self.cog.post_job = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0})
self.cog.get_results_message = MagicMock(return_value=('Return code 0', ''))
self.cog.get_status_emoji = MagicMock(return_value=':yay!:')
self.cog.format_output = AsyncMock(return_value=('Way too long beard', 'lookatmybeard.com'))
mocked_filter_cog = MagicMock()
- mocked_filter_cog.filter_eval = AsyncMock(return_value=False)
+ mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False)
self.bot.get_cog.return_value = mocked_filter_cog
- await self.cog.send_eval(ctx, 'MyAwesomeCode')
+ await self.cog.send_job(ctx, '3.11', 'MyAwesomeCode', job_name='eval')
ctx.send.assert_called_once()
self.assertEqual(
@@ -254,27 +273,29 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
'\n\n```\nWay too long beard\n```\nFull output: lookatmybeard.com'
)
- self.cog.post_eval.assert_called_once_with('MyAwesomeCode')
+ self.cog.post_job.assert_called_once_with('MyAwesomeCode', '3.11', args=None)
self.cog.get_status_emoji.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0})
- self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0})
+ self.cog.get_results_message.assert_called_once_with(
+ {'stdout': 'Way too long beard', 'returncode': 0}, 'eval', '3.11'
+ )
self.cog.format_output.assert_called_once_with('Way too long beard')
- async def test_send_eval_with_non_zero_eval(self):
- """Test the send_eval function with a code returning a non-zero code."""
+ async def test_send_job_with_non_zero_eval(self):
+ """Test the send_job function with a code returning a non-zero code."""
ctx = MockContext()
ctx.message = MockMessage()
ctx.send = AsyncMock()
ctx.author.mention = '@LemonLemonishBeard#0042'
- self.cog.post_eval = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127})
+ self.cog.post_job = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127})
self.cog.get_results_message = MagicMock(return_value=('Return code 127', 'Beard got stuck in the eval'))
self.cog.get_status_emoji = MagicMock(return_value=':nope!:')
self.cog.format_output = AsyncMock() # This function isn't called
mocked_filter_cog = MagicMock()
- mocked_filter_cog.filter_eval = AsyncMock(return_value=False)
+ mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False)
self.bot.get_cog.return_value = mocked_filter_cog
- await self.cog.send_eval(ctx, 'MyAwesomeCode')
+ await self.cog.send_job(ctx, '3.11', 'MyAwesomeCode', job_name='eval')
ctx.send.assert_called_once()
self.assertEqual(
@@ -282,45 +303,53 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
'@LemonLemonishBeard#0042 :nope!: Return code 127.\n\n```\nBeard got stuck in the eval\n```'
)
- self.cog.post_eval.assert_called_once_with('MyAwesomeCode')
+ self.cog.post_job.assert_called_once_with('MyAwesomeCode', '3.11', args=None)
self.cog.get_status_emoji.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127})
- self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127})
+ self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}, 'eval', '3.11')
self.cog.format_output.assert_not_called()
@patch("bot.exts.utils.snekbox.partial")
- async def test_continue_eval_does_continue(self, partial_mock):
- """Test that the continue_eval function does continue if required conditions are met."""
- ctx = MockContext(message=MockMessage(add_reaction=AsyncMock(), clear_reactions=AsyncMock()))
- response = MockMessage(delete=AsyncMock())
+ async def test_continue_job_does_continue(self, partial_mock):
+ """Test that the continue_job function does continue if required conditions are met."""
+ ctx = MockContext(
+ message=MockMessage(
+ id=4,
+ add_reaction=AsyncMock(),
+ clear_reactions=AsyncMock()
+ ),
+ author=MockMember(id=14)
+ )
+ response = MockMessage(id=42, delete=AsyncMock())
new_msg = MockMessage()
+ self.cog.jobs = {4: 42}
self.bot.wait_for.side_effect = ((None, new_msg), None)
expected = "NewCode"
self.cog.get_code = create_autospec(self.cog.get_code, spec_set=True, return_value=expected)
- actual = await self.cog.continue_eval(ctx, response)
- self.cog.get_code.assert_awaited_once_with(new_msg)
- self.assertEqual(actual, expected)
+ actual = await self.cog.continue_job(ctx, response, self.cog.eval_command)
+ self.cog.get_code.assert_awaited_once_with(new_msg, ctx.command)
+ self.assertEqual(actual, (expected, None))
self.bot.wait_for.assert_has_awaits(
(
call(
'message_edit',
- check=partial_mock(snekbox.predicate_eval_message_edit, ctx),
- timeout=snekbox.REEVAL_TIMEOUT,
+ check=partial_mock(snekbox.predicate_message_edit, ctx),
+ timeout=snekbox.REDO_TIMEOUT,
),
- call('reaction_add', check=partial_mock(snekbox.predicate_eval_emoji_reaction, ctx), timeout=10)
+ call('reaction_add', check=partial_mock(snekbox.predicate_emoji_reaction, ctx), timeout=10)
)
)
- ctx.message.add_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI)
- ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI)
+ ctx.message.add_reaction.assert_called_once_with(snekbox.REDO_EMOJI)
+ ctx.message.clear_reaction.assert_called_once_with(snekbox.REDO_EMOJI)
response.delete.assert_called_once()
- async def test_continue_eval_does_not_continue(self):
+ async def test_continue_job_does_not_continue(self):
ctx = MockContext(message=MockMessage(clear_reactions=AsyncMock()))
self.bot.wait_for.side_effect = asyncio.TimeoutError
- actual = await self.cog.continue_eval(ctx, MockMessage())
- self.assertEqual(actual, None)
- ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI)
+ actual = await self.cog.continue_job(ctx, MockMessage(), self.cog.eval_command)
+ self.assertEqual(actual, (None, None))
+ ctx.message.clear_reaction.assert_called_once_with(snekbox.REDO_EMOJI)
async def test_get_code(self):
"""Should return 1st arg (or None) if eval cmd in message, otherwise return full content."""
@@ -343,13 +372,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
self.bot.get_context.return_value = MockContext(command=command)
message = MockMessage(content=content)
- actual_code = await self.cog.get_code(message)
+ actual_code = await self.cog.get_code(message, self.cog.eval_command)
self.bot.get_context.assert_awaited_once_with(message)
self.assertEqual(actual_code, expected_code)
- def test_predicate_eval_message_edit(self):
- """Test the predicate_eval_message_edit function."""
+ def test_predicate_message_edit(self):
+ """Test the predicate_message_edit function."""
msg0 = MockMessage(id=1, content='abc')
msg1 = MockMessage(id=2, content='abcdef')
msg2 = MockMessage(id=1, content='abcdef')
@@ -362,18 +391,18 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
for ctx_msg, new_msg, expected, testname in cases:
with self.subTest(msg=f'Messages with {testname} return {expected}'):
ctx = MockContext(message=ctx_msg)
- actual = snekbox.predicate_eval_message_edit(ctx, ctx_msg, new_msg)
+ actual = snekbox.predicate_message_edit(ctx, ctx_msg, new_msg)
self.assertEqual(actual, expected)
- def test_predicate_eval_emoji_reaction(self):
- """Test the predicate_eval_emoji_reaction function."""
+ def test_predicate_emoji_reaction(self):
+ """Test the predicate_emoji_reaction function."""
valid_reaction = MockReaction(message=MockMessage(id=1))
- valid_reaction.__str__.return_value = snekbox.REEVAL_EMOJI
+ valid_reaction.__str__.return_value = snekbox.REDO_EMOJI
valid_ctx = MockContext(message=MockMessage(id=1), author=MockUser(id=2))
valid_user = MockUser(id=2)
invalid_reaction_id = MockReaction(message=MockMessage(id=42))
- invalid_reaction_id.__str__.return_value = snekbox.REEVAL_EMOJI
+ invalid_reaction_id.__str__.return_value = snekbox.REDO_EMOJI
invalid_user_id = MockUser(id=42)
invalid_reaction_str = MockReaction(message=MockMessage(id=1))
invalid_reaction_str.__str__.return_value = ':longbeard:'
@@ -386,15 +415,15 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
)
for reaction, user, expected, testname in cases:
with self.subTest(msg=f'Test with {testname} and expected return {expected}'):
- actual = snekbox.predicate_eval_emoji_reaction(valid_ctx, reaction, user)
+ actual = snekbox.predicate_emoji_reaction(valid_ctx, reaction, user)
self.assertEqual(actual, expected)
-class SnekboxSetupTests(unittest.TestCase):
+class SnekboxSetupTests(unittest.IsolatedAsyncioTestCase):
"""Tests setup of the `Snekbox` cog."""
- def test_setup(self):
+ async def test_setup(self):
"""Setup of the extension should call add_cog."""
bot = MockBot()
- snekbox.setup(bot)
- bot.add_cog.assert_called_once()
+ await snekbox.setup(bot)
+ bot.add_cog.assert_awaited_once()
diff --git a/tests/bot/rules/test_mentions.py b/tests/bot/rules/test_mentions.py
index f8805ac48..e1f904917 100644
--- a/tests/bot/rules/test_mentions.py
+++ b/tests/bot/rules/test_mentions.py
@@ -1,15 +1,32 @@
-from typing import Iterable
+from typing import Iterable, Optional
+
+import discord
from bot.rules import mentions
from tests.bot.rules import DisallowedCase, RuleTest
-from tests.helpers import MockMember, MockMessage
+from tests.helpers import MockMember, MockMessage, MockMessageReference
-def make_msg(author: str, total_user_mentions: int, total_bot_mentions: int = 0) -> MockMessage:
- """Makes a message with `total_mentions` mentions."""
+def make_msg(
+ author: str,
+ total_user_mentions: int,
+ total_bot_mentions: int = 0,
+ *,
+ reference: Optional[MockMessageReference] = None
+) -> MockMessage:
+ """Makes a message from `author` with `total_user_mentions` user mentions and `total_bot_mentions` bot mentions."""
user_mentions = [MockMember() for _ in range(total_user_mentions)]
bot_mentions = [MockMember(bot=True) for _ in range(total_bot_mentions)]
- return MockMessage(author=author, mentions=user_mentions+bot_mentions)
+
+ mentions = user_mentions + bot_mentions
+ if reference is not None:
+ # For the sake of these tests we assume that all references are mentions.
+ mentions.append(reference.resolved.author)
+ msg_type = discord.MessageType.reply
+ else:
+ msg_type = discord.MessageType.default
+
+ return MockMessage(author=author, mentions=mentions, reference=reference, type=msg_type)
class TestMentions(RuleTest):
@@ -56,6 +73,16 @@ class TestMentions(RuleTest):
("bob",),
3,
),
+ DisallowedCase(
+ [make_msg("bob", 3, reference=MockMessageReference())],
+ ("bob",),
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("bob", 3, reference=MockMessageReference(reference_author_is_bot=True))],
+ ("bob",),
+ 3
+ )
)
await self.run_disallowed(cases)
@@ -71,6 +98,27 @@ class TestMentions(RuleTest):
await self.run_allowed(cases)
+ async def test_ignore_reply_mentions(self):
+ """Messages with an allowed amount of mentions in the content, also containing reply mentions."""
+ cases = (
+ [
+ make_msg("bob", 2, reference=MockMessageReference())
+ ],
+ [
+ make_msg("bob", 2, reference=MockMessageReference(reference_author_is_bot=True))
+ ],
+ [
+ make_msg("bob", 2, reference=MockMessageReference()),
+ make_msg("bob", 0, reference=MockMessageReference())
+ ],
+ [
+ make_msg("bob", 2, reference=MockMessageReference(reference_author_is_bot=True)),
+ make_msg("bob", 0, reference=MockMessageReference(reference_author_is_bot=True))
+ ]
+ )
+
+ await self.run_allowed(cases)
+
def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
last_message = case.recent_messages[0]
return tuple(
diff --git a/tests/bot/test_api.py b/tests/bot/test_api.py
deleted file mode 100644
index 76bcb481d..000000000
--- a/tests/bot/test_api.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import unittest
-from unittest.mock import MagicMock
-
-from bot import api
-
-
-class APIClientTests(unittest.IsolatedAsyncioTestCase):
- """Tests for the bot's API client."""
-
- @classmethod
- def setUpClass(cls):
- """Sets up the shared fixtures for the tests."""
- cls.error_api_response = MagicMock()
- cls.error_api_response.status = 999
-
- def test_response_code_error_default_initialization(self):
- """Test the default initialization of `ResponseCodeError` without `text` or `json`"""
- error = api.ResponseCodeError(response=self.error_api_response)
-
- self.assertIs(error.status, self.error_api_response.status)
- self.assertEqual(error.response_json, {})
- self.assertEqual(error.response_text, "")
- self.assertIs(error.response, self.error_api_response)
-
- def test_response_code_error_string_representation_default_initialization(self):
- """Test the string representation of `ResponseCodeError` initialized without text or json."""
- error = api.ResponseCodeError(response=self.error_api_response)
- self.assertEqual(str(error), f"Status: {self.error_api_response.status} Response: ")
-
- def test_response_code_error_initialization_with_json(self):
- """Test the initialization of `ResponseCodeError` with json."""
- json_data = {'hello': 'world'}
- error = api.ResponseCodeError(
- response=self.error_api_response,
- response_json=json_data,
- )
- self.assertEqual(error.response_json, json_data)
- self.assertEqual(error.response_text, "")
-
- def test_response_code_error_string_representation_with_nonempty_response_json(self):
- """Test the string representation of `ResponseCodeError` initialized with json."""
- json_data = {'hello': 'world'}
- error = api.ResponseCodeError(
- response=self.error_api_response,
- response_json=json_data
- )
- self.assertEqual(str(error), f"Status: {self.error_api_response.status} Response: {json_data}")
-
- def test_response_code_error_initialization_with_text(self):
- """Test the initialization of `ResponseCodeError` with text."""
- text_data = 'Lemon will eat your soul'
- error = api.ResponseCodeError(
- response=self.error_api_response,
- response_text=text_data,
- )
- self.assertEqual(error.response_text, text_data)
- self.assertEqual(error.response_json, {})
-
- def test_response_code_error_string_representation_with_nonempty_response_text(self):
- """Test the string representation of `ResponseCodeError` initialized with text."""
- text_data = 'Lemon will eat your soul'
- error = api.ResponseCodeError(
- response=self.error_api_response,
- response_text=text_data
- )
- self.assertEqual(str(error), f"Status: {self.error_api_response.status} Response: {text_data}")
diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py
index afb8a973d..1bb678db2 100644
--- a/tests/bot/test_converters.py
+++ b/tests/bot/test_converters.py
@@ -4,7 +4,7 @@ from datetime import MAXYEAR, datetime, timezone
from unittest.mock import MagicMock, patch
from dateutil.relativedelta import relativedelta
-from disnake.ext.commands import BadArgument
+from discord.ext.commands import BadArgument
from bot.converters import Duration, HushDurationConverter, ISODateTime, PackageName
diff --git a/tests/bot/utils/test_checks.py b/tests/bot/utils/test_checks.py
index 5675e10ec..4ae11d5d3 100644
--- a/tests/bot/utils/test_checks.py
+++ b/tests/bot/utils/test_checks.py
@@ -1,7 +1,7 @@
import unittest
from unittest.mock import MagicMock
-from disnake import DMChannel
+from discord import DMChannel
from bot.utils import checks
from bot.utils.checks import InWhitelistCheckFailure
diff --git a/tests/bot/utils/test_services.py b/tests/bot/utils/test_services.py
index 3b71022db..d0e801299 100644
--- a/tests/bot/utils/test_services.py
+++ b/tests/bot/utils/test_services.py
@@ -4,7 +4,9 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch
from aiohttp import ClientConnectorError
-from bot.utils.services import FAILED_REQUEST_ATTEMPTS, send_to_paste_service
+from bot.utils.services import (
+ FAILED_REQUEST_ATTEMPTS, MAX_PASTE_LENGTH, PasteTooLongError, PasteUploadError, send_to_paste_service
+)
from tests.helpers import MockBot
@@ -55,23 +57,34 @@ class PasteTests(unittest.IsolatedAsyncioTestCase):
for error_json in test_cases:
with self.subTest(error_json=error_json):
response.json = AsyncMock(return_value=error_json)
- result = await send_to_paste_service("")
+ with self.assertRaises(PasteUploadError):
+ await send_to_paste_service("")
self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS)
- self.assertIsNone(result)
self.bot.http_session.post.reset_mock()
async def test_request_repeated_on_connection_errors(self):
"""Requests are repeated in the case of connection errors."""
self.bot.http_session.post = MagicMock(side_effect=ClientConnectorError(Mock(), Mock()))
- result = await send_to_paste_service("")
+ with self.assertRaises(PasteUploadError):
+ await send_to_paste_service("")
self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS)
- self.assertIsNone(result)
async def test_general_error_handled_and_request_repeated(self):
"""All `Exception`s are handled, logged and request repeated."""
self.bot.http_session.post = MagicMock(side_effect=Exception)
- result = await send_to_paste_service("")
+ with self.assertRaises(PasteUploadError):
+ await send_to_paste_service("")
self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS)
self.assertLogs("bot.utils", logging.ERROR)
- self.assertIsNone(result)
+
+ async def test_raises_error_on_too_long_input(self):
+ """Ensure PasteTooLongError is raised if `contents` is longer than `MAX_PASTE_LENGTH`."""
+ contents = "a" * (MAX_PASTE_LENGTH + 1)
+ with self.assertRaises(PasteTooLongError):
+ await send_to_paste_service(contents)
+
+ async def test_raises_on_too_large_max_length(self):
+ """Ensure ValueError is raised if `max_length` passed is greater than `MAX_PASTE_LENGTH`."""
+ with self.assertRaises(ValueError):
+ await send_to_paste_service("Hello World!", max_length=MAX_PASTE_LENGTH + 1)
diff --git a/tests/helpers.py b/tests/helpers.py
index bd1418ab9..687e15b96 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -7,12 +7,12 @@ import unittest.mock
from asyncio import AbstractEventLoop
from typing import Iterable, Optional
-import disnake
+import discord
from aiohttp import ClientSession
-from disnake.ext.commands import Context
+from botcore.async_stats import AsyncStatsClient
+from botcore.site_api import APIClient
+from discord.ext.commands import Context
-from bot.api import APIClient
-from bot.async_stats import AsyncStatsClient
from bot.bot import Bot
from tests._autospec import autospec # noqa: F401 other modules import it via this module
@@ -26,11 +26,11 @@ for logger in logging.Logger.manager.loggerDict.values():
logger.setLevel(logging.CRITICAL)
-class HashableMixin(disnake.mixins.EqualityComparable):
+class HashableMixin(discord.mixins.EqualityComparable):
"""
- Mixin that provides similar hashing and equality functionality as disnake's `Hashable` mixin.
+ Mixin that provides similar hashing and equality functionality as discord.py's `Hashable` mixin.
- Note: disnake`s `Hashable` mixin bit-shifts `self.id` (`>> 22`); to prevent hash-collisions
+ Note: discord.py`s `Hashable` mixin bit-shifts `self.id` (`>> 22`); to prevent hash-collisions
for the relative small `id` integers we generally use in tests, this bit-shift is omitted.
"""
@@ -39,22 +39,22 @@ class HashableMixin(disnake.mixins.EqualityComparable):
class ColourMixin:
- """A mixin for Mocks that provides the aliasing of (accent_)color->(accent_)colour like disnake does."""
+ """A mixin for Mocks that provides the aliasing of (accent_)color->(accent_)colour like discord.py does."""
@property
- def color(self) -> disnake.Colour:
+ def color(self) -> discord.Colour:
return self.colour
@color.setter
- def color(self, color: disnake.Colour) -> None:
+ def color(self, color: discord.Colour) -> None:
self.colour = color
@property
- def accent_color(self) -> disnake.Colour:
+ def accent_color(self) -> discord.Colour:
return self.accent_colour
@accent_color.setter
- def accent_color(self, color: disnake.Colour) -> None:
+ def accent_color(self, color: discord.Colour) -> None:
self.accent_colour = color
@@ -63,7 +63,7 @@ class CustomMockMixin:
Provides common functionality for our custom Mock types.
The `_get_child_mock` method automatically returns an AsyncMock for coroutine methods of the mock
- object. As disnake also uses synchronous methods that nonetheless return coroutine objects, the
+ object. As discord.py also uses synchronous methods that nonetheless return coroutine objects, the
class attribute `additional_spec_asyncs` can be overwritten with an iterable containing additional
attribute names that should also mocked with an AsyncMock instead of a regular MagicMock/Mock. The
class method `spec_set` can be overwritten with the object that should be uses as the specification
@@ -119,7 +119,7 @@ class CustomMockMixin:
return klass(**kw)
-# Create a guild instance to get a realistic Mock of `disnake.Guild`
+# Create a guild instance to get a realistic Mock of `discord.Guild`
guild_data = {
'id': 1,
'name': 'guild',
@@ -139,20 +139,20 @@ guild_data = {
'owner_id': 1,
'afk_channel_id': 464033278631084042,
}
-guild_instance = disnake.Guild(data=guild_data, state=unittest.mock.MagicMock())
+guild_instance = discord.Guild(data=guild_data, state=unittest.mock.MagicMock())
class MockGuild(CustomMockMixin, unittest.mock.Mock, HashableMixin):
"""
- A `Mock` subclass to mock `disnake.Guild` objects.
+ A `Mock` subclass to mock `discord.Guild` objects.
- A MockGuild instance will follow the specifications of a `disnake.Guild` instance. This means
+ A MockGuild instance will follow the specifications of a `discord.Guild` instance. This means
that if the code you're testing tries to access an attribute or method that normally does not
- exist for a `disnake.Guild` object this will raise an `AttributeError`. This is to make sure our
- tests fail if the code we're testing uses a `disnake.Guild` object in the wrong way.
+ exist for a `discord.Guild` object this will raise an `AttributeError`. This is to make sure our
+ tests fail if the code we're testing uses a `discord.Guild` object in the wrong way.
One restriction of that is that if the code tries to access an attribute that normally does not
- exist for `disnake.Guild` instance but was added dynamically, this will raise an exception with
+ exist for `discord.Guild` instance but was added dynamically, this will raise an exception with
the mocked object. To get around that, you can set the non-standard attribute explicitly for the
instance of `MockGuild`:
@@ -160,10 +160,10 @@ class MockGuild(CustomMockMixin, unittest.mock.Mock, HashableMixin):
>>> guild.attribute_that_normally_does_not_exist = unittest.mock.MagicMock()
In addition to attribute simulation, mocked guild object will pass an `isinstance` check against
- `disnake.Guild`:
+ `discord.Guild`:
>>> guild = MockGuild()
- >>> isinstance(guild, disnake.Guild)
+ >>> isinstance(guild, discord.Guild)
True
For more info, see the `Mocking` section in `tests/README.md`.
@@ -171,7 +171,7 @@ class MockGuild(CustomMockMixin, unittest.mock.Mock, HashableMixin):
spec_set = guild_instance
def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None:
- default_kwargs = {'id': next(self.discord_id), 'members': []}
+ default_kwargs = {'id': next(self.discord_id), 'members': [], "chunked": True}
super().__init__(**collections.ChainMap(kwargs, default_kwargs))
self.roles = [MockRole(name="@everyone", position=1, id=0)]
@@ -179,16 +179,16 @@ class MockGuild(CustomMockMixin, unittest.mock.Mock, HashableMixin):
self.roles.extend(roles)
-# Create a Role instance to get a realistic Mock of `disnake.Role`
+# Create a Role instance to get a realistic Mock of `discord.Role`
role_data = {'name': 'role', 'id': 1}
-role_instance = disnake.Role(guild=guild_instance, state=unittest.mock.MagicMock(), data=role_data)
+role_instance = discord.Role(guild=guild_instance, state=unittest.mock.MagicMock(), data=role_data)
class MockRole(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin):
"""
- A Mock subclass to mock `disnake.Role` objects.
+ A Mock subclass to mock `discord.Role` objects.
- Instances of this class will follow the specifications of `disnake.Role` instances. For more
+ Instances of this class will follow the specifications of `discord.Role` instances. For more
information, see the `MockGuild` docstring.
"""
spec_set = role_instance
@@ -198,40 +198,40 @@ class MockRole(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin):
'id': next(self.discord_id),
'name': 'role',
'position': 1,
- 'colour': disnake.Colour(0xdeadbf),
- 'permissions': disnake.Permissions(),
+ 'colour': discord.Colour(0xdeadbf),
+ 'permissions': discord.Permissions(),
}
super().__init__(**collections.ChainMap(kwargs, default_kwargs))
if isinstance(self.colour, int):
- self.colour = disnake.Colour(self.colour)
+ self.colour = discord.Colour(self.colour)
if isinstance(self.permissions, int):
- self.permissions = disnake.Permissions(self.permissions)
+ self.permissions = discord.Permissions(self.permissions)
if 'mention' not in kwargs:
self.mention = f'&{self.name}'
def __lt__(self, other):
- """Simplified position-based comparisons similar to those of `disnake.Role`."""
+ """Simplified position-based comparisons similar to those of `discord.Role`."""
return self.position < other.position
def __ge__(self, other):
- """Simplified position-based comparisons similar to those of `disnake.Role`."""
+ """Simplified position-based comparisons similar to those of `discord.Role`."""
return self.position >= other.position
-# Create a Member instance to get a realistic Mock of `disnake.Member`
+# Create a Member instance to get a realistic Mock of `discord.Member`
member_data = {'user': 'lemon', 'roles': [1]}
state_mock = unittest.mock.MagicMock()
-member_instance = disnake.Member(data=member_data, guild=guild_instance, state=state_mock)
+member_instance = discord.Member(data=member_data, guild=guild_instance, state=state_mock)
class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin):
"""
A Mock subclass to mock Member objects.
- Instances of this class will follow the specifications of `disnake.Member` instances. For more
+ Instances of this class will follow the specifications of `discord.Member` instances. For more
information, see the `MockGuild` docstring.
"""
spec_set = member_instance
@@ -249,11 +249,11 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin
self.mention = f"@{self.name}"
-# Create a User instance to get a realistic Mock of `disnake.User`
+# Create a User instance to get a realistic Mock of `discord.User`
_user_data_mock = collections.defaultdict(unittest.mock.MagicMock, {
"accent_color": 0
})
-user_instance = disnake.User(
+user_instance = discord.User(
data=unittest.mock.MagicMock(get=unittest.mock.Mock(side_effect=_user_data_mock.get)),
state=unittest.mock.MagicMock()
)
@@ -263,7 +263,7 @@ class MockUser(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin):
"""
A Mock subclass to mock User objects.
- Instances of this class will follow the specifications of `disnake.User` instances. For more
+ Instances of this class will follow the specifications of `discord.User` instances. For more
information, see the `MockGuild` docstring.
"""
spec_set = user_instance
@@ -305,13 +305,17 @@ class MockBot(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Bot objects.
- Instances of this class will follow the specifications of `disnake.ext.commands.Bot` instances.
+ Instances of this class will follow the specifications of `discord.ext.commands.Bot` instances.
For more information, see the `MockGuild` docstring.
"""
spec_set = Bot(
command_prefix=unittest.mock.MagicMock(),
loop=_get_mock_loop(),
redis_session=unittest.mock.MagicMock(),
+ http_session=unittest.mock.MagicMock(),
+ allowed_roles=[1],
+ guild_id=1,
+ intents=discord.Intents.all(),
)
additional_spec_asyncs = ("wait_for", "redis_ready")
@@ -322,9 +326,10 @@ class MockBot(CustomMockMixin, unittest.mock.MagicMock):
self.api_client = MockAPIClient(loop=self.loop)
self.http_session = unittest.mock.create_autospec(spec=ClientSession, spec_set=True)
self.stats = unittest.mock.create_autospec(spec=AsyncStatsClient, spec_set=True)
+ self.add_cog = unittest.mock.AsyncMock()
-# Create a TextChannel instance to get a realistic MagicMock of `disnake.TextChannel`
+# Create a TextChannel instance to get a realistic MagicMock of `discord.TextChannel`
channel_data = {
'id': 1,
'type': 'TextChannel',
@@ -334,20 +339,22 @@ channel_data = {
'position': 1,
'nsfw': False,
'last_message_id': 1,
+ 'bitrate': 1337,
+ 'user_limit': 25,
}
state = unittest.mock.MagicMock()
guild = unittest.mock.MagicMock()
-text_channel_instance = disnake.TextChannel(state=state, guild=guild, data=channel_data)
+text_channel_instance = discord.TextChannel(state=state, guild=guild, data=channel_data)
channel_data["type"] = "VoiceChannel"
-voice_channel_instance = disnake.VoiceChannel(state=state, guild=guild, data=channel_data)
+voice_channel_instance = discord.VoiceChannel(state=state, guild=guild, data=channel_data)
class MockTextChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
"""
A MagicMock subclass to mock TextChannel objects.
- Instances of this class will follow the specifications of `disnake.TextChannel` instances. For
+ Instances of this class will follow the specifications of `discord.TextChannel` instances. For
more information, see the `MockGuild` docstring.
"""
spec_set = text_channel_instance
@@ -364,7 +371,7 @@ class MockVoiceChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
"""
A MagicMock subclass to mock VoiceChannel objects.
- Instances of this class will follow the specifications of `disnake.VoiceChannel` instances. For
+ Instances of this class will follow the specifications of `discord.VoiceChannel` instances. For
more information, see the `MockGuild` docstring.
"""
spec_set = voice_channel_instance
@@ -381,14 +388,14 @@ class MockVoiceChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
state = unittest.mock.MagicMock()
me = unittest.mock.MagicMock()
dm_channel_data = {"id": 1, "recipients": [unittest.mock.MagicMock()]}
-dm_channel_instance = disnake.DMChannel(me=me, state=state, data=dm_channel_data)
+dm_channel_instance = discord.DMChannel(me=me, state=state, data=dm_channel_data)
class MockDMChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
"""
A MagicMock subclass to mock TextChannel objects.
- Instances of this class will follow the specifications of `disnake.TextChannel` instances. For
+ Instances of this class will follow the specifications of `discord.TextChannel` instances. For
more information, see the `MockGuild` docstring.
"""
spec_set = dm_channel_instance
@@ -398,17 +405,17 @@ class MockDMChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
super().__init__(**collections.ChainMap(kwargs, default_kwargs))
-# Create CategoryChannel instance to get a realistic MagicMock of `disnake.CategoryChannel`
+# Create CategoryChannel instance to get a realistic MagicMock of `discord.CategoryChannel`
category_channel_data = {
'id': 1,
- 'type': disnake.ChannelType.category,
+ 'type': discord.ChannelType.category,
'name': 'category',
'position': 1,
}
state = unittest.mock.MagicMock()
guild = unittest.mock.MagicMock()
-category_channel_instance = disnake.CategoryChannel(
+category_channel_instance = discord.CategoryChannel(
state=state, guild=guild, data=category_channel_data
)
@@ -419,13 +426,13 @@ class MockCategoryChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
super().__init__(**collections.ChainMap(default_kwargs, kwargs))
-# Create a Message instance to get a realistic MagicMock of `disnake.Message`
+# Create a Message instance to get a realistic MagicMock of `discord.Message`
message_data = {
'id': 1,
'webhook_id': 431341013479718912,
'attachments': [],
'embeds': [],
- 'application': 'Python Discord',
+ 'application': {"id": 4, "description": "A Python Bot", "name": "Python Discord", "icon": None},
'activity': 'mocking',
'channel': unittest.mock.MagicMock(),
'edited_timestamp': '2019-10-14T15:33:48+00:00',
@@ -438,10 +445,11 @@ message_data = {
}
state = unittest.mock.MagicMock()
channel = unittest.mock.MagicMock()
-message_instance = disnake.Message(state=state, channel=channel, data=message_data)
+channel.type = discord.ChannelType.text
+message_instance = discord.Message(state=state, channel=channel, data=message_data)
-# Create a Context instance to get a realistic MagicMock of `disnake.ext.commands.Context`
+# Create a Context instance to get a realistic MagicMock of `discord.ext.commands.Context`
context_instance = Context(
message=unittest.mock.MagicMock(),
prefix="$",
@@ -455,7 +463,7 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Context objects.
- Instances of this class will follow the specifications of `disnake.ext.commands.Context`
+ Instances of this class will follow the specifications of `discord.ext.commands.Context`
instances. For more information, see the `MockGuild` docstring.
"""
spec_set = context_instance
@@ -471,24 +479,46 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock):
self.invoked_from_error_handler = kwargs.get('invoked_from_error_handler', False)
-attachment_instance = disnake.Attachment(data=unittest.mock.MagicMock(id=1), state=unittest.mock.MagicMock())
+attachment_instance = discord.Attachment(data=unittest.mock.MagicMock(id=1), state=unittest.mock.MagicMock())
class MockAttachment(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Attachment objects.
- Instances of this class will follow the specifications of `disnake.Attachment` instances. For
+ Instances of this class will follow the specifications of `discord.Attachment` instances. For
more information, see the `MockGuild` docstring.
"""
spec_set = attachment_instance
+message_reference_instance = discord.MessageReference(
+ message_id=unittest.mock.MagicMock(id=1),
+ channel_id=unittest.mock.MagicMock(id=2),
+ guild_id=unittest.mock.MagicMock(id=3)
+)
+
+
+class MockMessageReference(CustomMockMixin, unittest.mock.MagicMock):
+ """
+ A MagicMock subclass to mock MessageReference objects.
+
+ Instances of this class will follow the specification of `discord.MessageReference` instances.
+ For more information, see the `MockGuild` docstring.
+ """
+ spec_set = message_reference_instance
+
+ def __init__(self, *, reference_author_is_bot: bool = False, **kwargs):
+ super().__init__(**kwargs)
+ referenced_msg_author = MockMember(name="bob", bot=reference_author_is_bot)
+ self.resolved = MockMessage(author=referenced_msg_author)
+
+
class MockMessage(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Message objects.
- Instances of this class will follow the specifications of `disnake.Message` instances. For more
+ Instances of this class will follow the specifications of `discord.Message` instances. For more
information, see the `MockGuild` docstring.
"""
spec_set = message_instance
@@ -501,14 +531,14 @@ class MockMessage(CustomMockMixin, unittest.mock.MagicMock):
emoji_data = {'require_colons': True, 'managed': True, 'id': 1, 'name': 'hyperlemon'}
-emoji_instance = disnake.Emoji(guild=MockGuild(), state=unittest.mock.MagicMock(), data=emoji_data)
+emoji_instance = discord.Emoji(guild=MockGuild(), state=unittest.mock.MagicMock(), data=emoji_data)
class MockEmoji(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Emoji objects.
- Instances of this class will follow the specifications of `disnake.Emoji` instances. For more
+ Instances of this class will follow the specifications of `discord.Emoji` instances. For more
information, see the `MockGuild` docstring.
"""
spec_set = emoji_instance
@@ -518,27 +548,27 @@ class MockEmoji(CustomMockMixin, unittest.mock.MagicMock):
self.guild = kwargs.get('guild', MockGuild())
-partial_emoji_instance = disnake.PartialEmoji(animated=False, name='guido')
+partial_emoji_instance = discord.PartialEmoji(animated=False, name='guido')
class MockPartialEmoji(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock PartialEmoji objects.
- Instances of this class will follow the specifications of `disnake.PartialEmoji` instances. For
+ Instances of this class will follow the specifications of `discord.PartialEmoji` instances. For
more information, see the `MockGuild` docstring.
"""
spec_set = partial_emoji_instance
-reaction_instance = disnake.Reaction(message=MockMessage(), data={'me': True}, emoji=MockEmoji())
+reaction_instance = discord.Reaction(message=MockMessage(), data={'me': True}, emoji=MockEmoji())
class MockReaction(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Reaction objects.
- Instances of this class will follow the specifications of `disnake.Reaction` instances. For
+ Instances of this class will follow the specifications of `discord.Reaction` instances. For
more information, see the `MockGuild` docstring.
"""
spec_set = reaction_instance
@@ -556,14 +586,14 @@ class MockReaction(CustomMockMixin, unittest.mock.MagicMock):
self.__str__.return_value = str(self.emoji)
-webhook_instance = disnake.Webhook(data=unittest.mock.MagicMock(), session=unittest.mock.MagicMock())
+webhook_instance = discord.Webhook(data=unittest.mock.MagicMock(), session=unittest.mock.MagicMock())
class MockAsyncWebhook(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Webhook objects using an AsyncWebhookAdapter.
- Instances of this class will follow the specifications of `disnake.Webhook` instances. For
+ Instances of this class will follow the specifications of `discord.Webhook` instances. For
more information, see the `MockGuild` docstring.
"""
spec_set = webhook_instance
diff --git a/tests/test_helpers.py b/tests/test_helpers.py
index c5e799a85..f3040b305 100644
--- a/tests/test_helpers.py
+++ b/tests/test_helpers.py
@@ -2,20 +2,20 @@ import asyncio
import unittest
import unittest.mock
-import disnake
+import discord
from tests import helpers
class DiscordMocksTests(unittest.TestCase):
- """Tests for our specialized disnake mocks."""
+ """Tests for our specialized discord.py mocks."""
def test_mock_role_default_initialization(self):
"""Test if the default initialization of MockRole results in the correct object."""
role = helpers.MockRole()
- # The `spec` argument makes sure `isistance` checks with `disnake.Role` pass
- self.assertIsInstance(role, disnake.Role)
+ # The `spec` argument makes sure `isistance` checks with `discord.Role` pass
+ self.assertIsInstance(role, discord.Role)
self.assertEqual(role.name, "role")
self.assertEqual(role.position, 1)
@@ -61,8 +61,8 @@ class DiscordMocksTests(unittest.TestCase):
"""Test if the default initialization of Mockmember results in the correct object."""
member = helpers.MockMember()
- # The `spec` argument makes sure `isistance` checks with `disnake.Member` pass
- self.assertIsInstance(member, disnake.Member)
+ # The `spec` argument makes sure `isistance` checks with `discord.Member` pass
+ self.assertIsInstance(member, discord.Member)
self.assertEqual(member.name, "member")
self.assertListEqual(member.roles, [helpers.MockRole(name="@everyone", position=1, id=0)])
@@ -86,18 +86,18 @@ class DiscordMocksTests(unittest.TestCase):
"""Test if MockMember accepts and sets abitrary keyword arguments."""
member = helpers.MockMember(
nick="Dino Man",
- colour=disnake.Colour.default(),
+ colour=discord.Colour.default(),
)
self.assertEqual(member.nick, "Dino Man")
- self.assertEqual(member.colour, disnake.Colour.default())
+ self.assertEqual(member.colour, discord.Colour.default())
def test_mock_guild_default_initialization(self):
"""Test if the default initialization of Mockguild results in the correct object."""
guild = helpers.MockGuild()
- # The `spec` argument makes sure `isistance` checks with `disnake.Guild` pass
- self.assertIsInstance(guild, disnake.Guild)
+ # The `spec` argument makes sure `isistance` checks with `discord.Guild` pass
+ self.assertIsInstance(guild, discord.Guild)
self.assertListEqual(guild.roles, [helpers.MockRole(name="@everyone", position=1, id=0)])
self.assertListEqual(guild.members, [])
@@ -127,15 +127,15 @@ class DiscordMocksTests(unittest.TestCase):
"""Tests if MockBot initializes with the correct values."""
bot = helpers.MockBot()
- # The `spec` argument makes sure `isistance` checks with `disnake.ext.commands.Bot` pass
- self.assertIsInstance(bot, disnake.ext.commands.Bot)
+ # The `spec` argument makes sure `isistance` checks with `discord.ext.commands.Bot` pass
+ self.assertIsInstance(bot, discord.ext.commands.Bot)
def test_mock_context_default_initialization(self):
"""Tests if MockContext initializes with the correct values."""
context = helpers.MockContext()
- # The `spec` argument makes sure `isistance` checks with `disnake.ext.commands.Context` pass
- self.assertIsInstance(context, disnake.ext.commands.Context)
+ # The `spec` argument makes sure `isistance` checks with `discord.ext.commands.Context` pass
+ self.assertIsInstance(context, discord.ext.commands.Context)
self.assertIsInstance(context.bot, helpers.MockBot)
self.assertIsInstance(context.guild, helpers.MockGuild)
@@ -327,7 +327,7 @@ class MockObjectTests(unittest.TestCase):
def test_spec_propagation_of_mock_subclasses(self):
"""Test if the `spec` does not propagate to attributes of the mock object."""
test_values = (
- (helpers.MockGuild, "region"),
+ (helpers.MockGuild, "features"),
(helpers.MockRole, "mentionable"),
(helpers.MockMember, "display_name"),
(helpers.MockBot, "owner_id"),
diff --git a/tox.ini b/tox.ini
index 9472c32f9..987b7c790 100644
--- a/tox.ini
+++ b/tox.ini
@@ -4,7 +4,7 @@ docstring-convention=all
import-order-style=pycharm
application_import_names=bot,tests
exclude=.cache,.venv,.git,constants.py
-ignore=
+extend-ignore=
B311,W503,E226,S311,T000,E731
# Missing Docstrings
D100,D104,D105,D107,
@@ -15,5 +15,5 @@ ignore=
# Docstring Content
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
# Type Annotations
- ANN002,ANN003,ANN101,ANN102,ANN204,ANN206
+ ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
per-file-ignores=tests/*:D,ANN