From edffd9b59dcf275848076291ea22aae5e71326dc Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 13 Feb 2020 09:03:44 -0800 Subject: API: accept additional session kwargs for recreate() These kwargs are merged with the kwargs given when the APIClient was created. This is useful for facilitating changing the session's connector with a new instance when the session needs to be recreated. * Rename _session_args attribute to _default_session_kwargs --- bot/api.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/bot/api.py b/bot/api.py index 56db99828..c168a869d 100644 --- a/bot/api.py +++ b/bot/api.py @@ -47,7 +47,7 @@ class APIClient: self._ready = asyncio.Event(loop=loop) self._creation_task = None - self._session_args = kwargs + self._default_session_kwargs = kwargs self.recreate() @@ -55,9 +55,13 @@ class APIClient: def _url_for(endpoint: str) -> str: return f"{URLs.site_schema}{URLs.site_api}/{quote_url(endpoint)}" - async def _create_session(self) -> None: - """Create the aiohttp session and set the ready event.""" - self.session = aiohttp.ClientSession(**self._session_args) + async def _create_session(self, **session_kwargs) -> None: + """ + Create the aiohttp session with `session_kwargs` and set the ready event. + + `session_kwargs` is merged with `_default_session_kwargs` and overwrites its values. + """ + self.session = aiohttp.ClientSession(**{**self._default_session_kwargs, **session_kwargs}) self._ready.set() async def close(self) -> None: @@ -68,12 +72,17 @@ class APIClient: await self.session.close() self._ready.clear() - def recreate(self) -> None: - """Schedule the aiohttp session to be created if it's been closed.""" + def recreate(self, **session_kwargs) -> None: + """ + Schedule the aiohttp session to be created with `session_kwargs` if it's been closed. + + `session_kwargs` is merged with the kwargs given when the `APIClient` was created and + overwrites those default kwargs. + """ if self.session is None or self.session.closed: # Don't schedule a task if one is already in progress. if self._creation_task is None or self._creation_task.done(): - self._creation_task = self.loop.create_task(self._create_session()) + self._creation_task = self.loop.create_task(self._create_session(**session_kwargs)) async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None: """Raise ResponseCodeError for non-OK response if an exception should be raised.""" -- cgit v1.2.3 From b19e6aaabdbcfefc5d22d1f72e670421fe8a8d97 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 13 Feb 2020 10:18:09 -0800 Subject: Bot: avoid DeprecationWarning for aiohttp.AsyncResolver (fix #748) AsyncResolver has to be created inside a coroutine so it's moved inside start(). Consequently, the APIClient session is also recreated inside start() now. When using clear(), the default connector is used for the session it recreates because clear() is not a coroutine. This should only affect requests made to the Discord API via the Client when not using it to run a bot; starting the bot will re-create the session with the custom connector. * Close connector and resolver when bot closes --- bot/bot.py | 45 ++++++++++++++++++++++++++++++--------------- 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/bot/bot.py b/bot/bot.py index 8f808272f..95fbae17f 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -14,18 +14,13 @@ class Bot(commands.Bot): """A subclass of `discord.ext.commands.Bot` with an aiohttp session and an API client.""" def __init__(self, *args, **kwargs): - # Use asyncio for DNS resolution instead of threads so threads aren't spammed. - # Use AF_INET as its socket family to prevent HTTPS related problems both locally - # and in production. - self.connector = aiohttp.TCPConnector( - resolver=aiohttp.AsyncResolver(), - family=socket.AF_INET, - ) - - super().__init__(*args, connector=self.connector, **kwargs) + super().__init__(*args, **kwargs) self.http_session: Optional[aiohttp.ClientSession] = None - self.api_client = api.APIClient(loop=self.loop, connector=self.connector) + self.api_client = api.APIClient(loop=self.loop) + + self._connector = None + self._resolver = None log.addHandler(api.APILoggingHandler(self.api_client)) @@ -35,19 +30,39 @@ class Bot(commands.Bot): log.info(f"Cog loaded: {cog.qualified_name}") def clear(self) -> None: - """Clears the internal state of the bot and resets the API client.""" + """Clears the internal state of the bot and sets the HTTPClient connector to None.""" + self.http.connector = None # Use the default connector. super().clear() - self.api_client.recreate() async def close(self) -> None: - """Close the aiohttp session after closing the Discord connection.""" + """Close the Discord connection and the aiohttp session, connector, and resolver.""" await super().close() await self.http_session.close() await self.api_client.close() + if self._connector: + await self._connector.close() + + if self._resolver: + await self._resolver.close() + async def start(self, *args, **kwargs) -> None: - """Open an aiohttp session before logging in and connecting to Discord.""" - self.http_session = aiohttp.ClientSession(connector=self.connector) + """Set up aiohttp sessions before logging in and connecting to Discord.""" + # Use asyncio for DNS resolution instead of threads so threads aren't spammed. + # Use AF_INET as its socket family to prevent HTTPS related problems both locally + # and in production. + self._resolver = aiohttp.AsyncResolver() + self._connector = aiohttp.TCPConnector( + resolver=self._resolver, + family=socket.AF_INET, + ) + + # Client.login() will call HTTPClient.static_login() which will create a session using + # this connector attribute. + self.http.connector = self._connector + + self.http_session = aiohttp.ClientSession(connector=self._connector) + self.api_client.recreate(connector=self._connector) await super().start(*args, **kwargs) -- cgit v1.2.3 From 253073ad059fc3a8eac890b4f3fe006454aae4b0 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 13 Feb 2020 10:20:45 -0800 Subject: Bot: add warning for when connector is a specified kwarg --- bot/bot.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/bot/bot.py b/bot/bot.py index 95fbae17f..762d316bf 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -1,5 +1,6 @@ import logging import socket +import warnings from typing import Optional import aiohttp @@ -14,6 +15,11 @@ class Bot(commands.Bot): """A subclass of `discord.ext.commands.Bot` with an aiohttp session and an API client.""" def __init__(self, *args, **kwargs): + if "connector" in kwargs: + warnings.warn( + "If the bot is started, the connector will be overwritten with an internal one" + ) + super().__init__(*args, **kwargs) self.http_session: Optional[aiohttp.ClientSession] = None -- cgit v1.2.3 From 9b6c9e8692313bd5ed70ce00c31e5b24e25635b1 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Fri, 14 Feb 2020 12:23:59 -0800 Subject: Bot: fix error trying to close a None session --- bot/bot.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/bot.py b/bot/bot.py index 762d316bf..67a15faba 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -44,9 +44,11 @@ class Bot(commands.Bot): """Close the Discord connection and the aiohttp session, connector, and resolver.""" await super().close() - await self.http_session.close() await self.api_client.close() + if self.http_session: + await self.http_session.close() + if self._connector: await self._connector.close() -- cgit v1.2.3 From a21f4e63680e55149c33ee0bdde938281a8eb020 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 11:05:42 -0800 Subject: Bot: override login() instead of start() The client can be used without running a bot so it makes more sense for the connector to be created when logging in, which is done in both cases, rather than in start(), which is only used when running a bot. --- bot/bot.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bot/bot.py b/bot/bot.py index 67a15faba..1d187f031 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -17,7 +17,8 @@ class Bot(commands.Bot): def __init__(self, *args, **kwargs): if "connector" in kwargs: warnings.warn( - "If the bot is started, the connector will be overwritten with an internal one" + "If login() is called (or the bot is started), the connector will be overwritten " + "with an internal one" ) super().__init__(*args, **kwargs) @@ -55,8 +56,8 @@ class Bot(commands.Bot): if self._resolver: await self._resolver.close() - async def start(self, *args, **kwargs) -> None: - """Set up aiohttp sessions before logging in and connecting to Discord.""" + async def login(self, *args, **kwargs) -> None: + """Re-create the connector and set up sessions before logging into Discord.""" # Use asyncio for DNS resolution instead of threads so threads aren't spammed. # Use AF_INET as its socket family to prevent HTTPS related problems both locally # and in production. @@ -73,4 +74,4 @@ class Bot(commands.Bot): self.http_session = aiohttp.ClientSession(connector=self._connector) self.api_client.recreate(connector=self._connector) - await super().start(*args, **kwargs) + await super().login(*args, **kwargs) -- cgit v1.2.3 From 5cc4e360aee28832ace207d8df2fb17b487fbfe7 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 11:21:09 -0800 Subject: Bot: move connector/session recreation to a separate function The function itself doesn't need to be a coroutine. It just has to be called in a coroutine (or, more indirectly, in an async context?). --- bot/bot.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bot/bot.py b/bot/bot.py index 1d187f031..e1b1d81dc 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -58,6 +58,11 @@ class Bot(commands.Bot): async def login(self, *args, **kwargs) -> None: """Re-create the connector and set up sessions before logging into Discord.""" + self._recreate() + await super().login(*args, **kwargs) + + def _recreate(self) -> None: + """Re-create the connector, aiohttp session, and the APIClient.""" # Use asyncio for DNS resolution instead of threads so threads aren't spammed. # Use AF_INET as its socket family to prevent HTTPS related problems both locally # and in production. @@ -73,5 +78,3 @@ class Bot(commands.Bot): self.http_session = aiohttp.ClientSession(connector=self._connector) self.api_client.recreate(connector=self._connector) - - await super().login(*args, **kwargs) -- cgit v1.2.3 From 6b689a15be69120a775789892f155c736926ef07 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 11:22:29 -0800 Subject: Bot: call _recreate() in clear() Because discord.py recreates the HTTPClient session, may as well follow suite and recreate our own stuff here too. --- bot/bot.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/bot/bot.py b/bot/bot.py index e1b1d81dc..9f48c980c 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -37,8 +37,14 @@ class Bot(commands.Bot): log.info(f"Cog loaded: {cog.qualified_name}") def clear(self) -> None: - """Clears the internal state of the bot and sets the HTTPClient connector to None.""" - self.http.connector = None # Use the default connector. + """ + Clears the internal state of the bot and recreates the connector and sessions. + + Will cause a DeprecationWarning if called outside a coroutine. + """ + # Because discord.py recreates the HTTPClient session, may as well follow suite and recreate + # our own stuff here too. + self._recreate() super().clear() async def close(self) -> None: -- cgit v1.2.3 From a417c318e6a0e57fa53b9b68572a524e0aa0f729 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 11:38:27 -0800 Subject: Bot: warn when connector/session not closed when recreating aiohttp does warn too, but these warnings will provide more immediate feedback. --- bot/bot.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/bot/bot.py b/bot/bot.py index 9f48c980c..0287ec925 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -72,7 +72,18 @@ class Bot(commands.Bot): # Use asyncio for DNS resolution instead of threads so threads aren't spammed. # Use AF_INET as its socket family to prevent HTTPS related problems both locally # and in production. + + # Doesn't seem to have any state with regards to being closed, so no need to worry? self._resolver = aiohttp.AsyncResolver() + + # Does have a closed state. Its __del__ will warn about this, but let's do it immediately. + if self._connector and not self._connector._closed: + warnings.warn( + "The previous connector was not closed; it will remain open and be overwritten", + ResourceWarning, + stacklevel=2 + ) + self._connector = aiohttp.TCPConnector( resolver=self._resolver, family=socket.AF_INET, @@ -82,5 +93,12 @@ class Bot(commands.Bot): # this connector attribute. self.http.connector = self._connector + if self.http_session and not self.http_session.closed: + warnings.warn( + "The previous ClientSession was not closed; it will remain open and be overwritten", + ResourceWarning, + stacklevel=2 + ) + self.http_session = aiohttp.ClientSession(connector=self._connector) self.api_client.recreate(connector=self._connector) -- cgit v1.2.3 From f4e569b56d52f4e29024be4fdbae796e4d85aea6 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 11:51:29 -0800 Subject: Bot: send not-closed warnings as log messages "Real" warnings weren't showing up for some reason. --- bot/bot.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/bot/bot.py b/bot/bot.py index 0287ec925..088b94a1f 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -70,20 +70,17 @@ class Bot(commands.Bot): def _recreate(self) -> None: """Re-create the connector, aiohttp session, and the APIClient.""" # Use asyncio for DNS resolution instead of threads so threads aren't spammed. - # Use AF_INET as its socket family to prevent HTTPS related problems both locally - # and in production. - # Doesn't seem to have any state with regards to being closed, so no need to worry? self._resolver = aiohttp.AsyncResolver() - # Does have a closed state. Its __del__ will warn about this, but let's do it immediately. + # Its __del__ does send a warning but it doesn't always show up for some reason. if self._connector and not self._connector._closed: - warnings.warn( - "The previous connector was not closed; it will remain open and be overwritten", - ResourceWarning, - stacklevel=2 + log.warning( + "The previous connector was not closed; it will remain open and be overwritten" ) + # Use AF_INET as its socket family to prevent HTTPS related problems both locally + # and in production. self._connector = aiohttp.TCPConnector( resolver=self._resolver, family=socket.AF_INET, @@ -93,11 +90,10 @@ class Bot(commands.Bot): # this connector attribute. self.http.connector = self._connector + # Its __del__ does send a warning but it doesn't always show up for some reason. if self.http_session and not self.http_session.closed: - warnings.warn( - "The previous ClientSession was not closed; it will remain open and be overwritten", - ResourceWarning, - stacklevel=2 + log.warning( + "The previous session was not closed; it will remain open and be overwritten" ) self.http_session = aiohttp.ClientSession(connector=self._connector) -- cgit v1.2.3 From fd313d2b1dac8b627d3d731e0898fbdff6642616 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 12:47:42 -0800 Subject: API: add argument to force recreation of the session --- bot/api.py | 12 +++++++++--- bot/bot.py | 2 +- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/bot/api.py b/bot/api.py index c168a869d..37c1497fc 100644 --- a/bot/api.py +++ b/bot/api.py @@ -72,16 +72,22 @@ class APIClient: await self.session.close() self._ready.clear() - def recreate(self, **session_kwargs) -> None: + def recreate(self, force: bool = False, **session_kwargs) -> None: """ Schedule the aiohttp session to be created with `session_kwargs` if it's been closed. + If `force` is True, the session will be recreated even if an open one exists. If a task to + create the session is pending, it will be cancelled. + `session_kwargs` is merged with the kwargs given when the `APIClient` was created and overwrites those default kwargs. """ - if self.session is None or self.session.closed: + if force or self.session is None or self.session.closed: + if force and self._creation_task: + self._creation_task.cancel() + # Don't schedule a task if one is already in progress. - if self._creation_task is None or self._creation_task.done(): + if force or self._creation_task is None or self._creation_task.done(): self._creation_task = self.loop.create_task(self._create_session(**session_kwargs)) async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None: diff --git a/bot/bot.py b/bot/bot.py index 088b94a1f..3094a27c5 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -97,4 +97,4 @@ class Bot(commands.Bot): ) self.http_session = aiohttp.ClientSession(connector=self._connector) - self.api_client.recreate(connector=self._connector) + self.api_client.recreate(force=True, connector=self._connector) -- cgit v1.2.3 From 02f5e2fe019f332cb6b9e79b63fee54e41b66732 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 12:51:37 -0800 Subject: API: close existing open session before recreating it --- bot/api.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/api.py b/bot/api.py index 37c1497fc..8cb429cd5 100644 --- a/bot/api.py +++ b/bot/api.py @@ -60,16 +60,17 @@ class APIClient: Create the aiohttp session with `session_kwargs` and set the ready event. `session_kwargs` is merged with `_default_session_kwargs` and overwrites its values. + If an open session already exists, it will first be closed. """ + await self.close() self.session = aiohttp.ClientSession(**{**self._default_session_kwargs, **session_kwargs}) self._ready.set() async def close(self) -> None: """Close the aiohttp session and unset the ready event.""" - if not self._ready.is_set(): - return + if self.session: + await self.session.close() - await self.session.close() self._ready.clear() def recreate(self, force: bool = False, **session_kwargs) -> None: -- cgit v1.2.3 From 95fd3511f504adbe4ac806b4c706ff106466e4fb Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 19:57:58 -0800 Subject: Scheduler: fix #754 - only suppress CancelledError --- bot/utils/scheduling.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index ee6c0a8e6..8d4721d70 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -63,12 +63,13 @@ def create_task(loop: asyncio.AbstractEventLoop, coro_or_future: Union[Coroutine """Creates an asyncio.Task object from a coroutine or future object.""" task: asyncio.Task = asyncio.ensure_future(coro_or_future, loop=loop) - # Silently ignore exceptions in a callback (handles the CancelledError nonsense) - task.add_done_callback(_silent_exception) + # Silently ignore CancelledError in a callback + task.add_done_callback(_suppress_cancelled_error) return task -def _silent_exception(future: asyncio.Future) -> None: - """Suppress future's exception.""" - with contextlib.suppress(Exception): - future.exception() +def _suppress_cancelled_error(future: asyncio.Future) -> None: + """Suppress future's CancelledError exception.""" + if future.cancelled(): + with contextlib.suppress(asyncio.CancelledError): + future.exception() -- cgit v1.2.3 From f905f73451730fb5b83b441f8d32748acef374e0 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 20:07:26 -0800 Subject: Scheduler: remove create_task function It's redundant because the done callback only takes a single line to add and can be added in schedule_task(). * Use Task as the type hint rather than Future for _suppress_cancelled_error() --- bot/utils/scheduling.py | 22 +++++++--------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 8d4721d70..7b055f5e7 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -2,7 +2,7 @@ import asyncio import contextlib import logging from abc import abstractmethod -from typing import Coroutine, Dict, Union +from typing import Dict from bot.utils import CogABCMeta @@ -41,7 +41,8 @@ class Scheduler(metaclass=CogABCMeta): ) return - task: asyncio.Task = create_task(loop, self._scheduled_task(task_data)) + task = loop.create_task(self._scheduled_task(task_data)) + task.add_done_callback(_suppress_cancelled_error) self.scheduled_tasks[task_id] = task log.debug(f"{self.cog_name}: scheduled task #{task_id}.") @@ -59,17 +60,8 @@ class Scheduler(metaclass=CogABCMeta): del self.scheduled_tasks[task_id] -def create_task(loop: asyncio.AbstractEventLoop, coro_or_future: Union[Coroutine, asyncio.Future]) -> asyncio.Task: - """Creates an asyncio.Task object from a coroutine or future object.""" - task: asyncio.Task = asyncio.ensure_future(coro_or_future, loop=loop) - - # Silently ignore CancelledError in a callback - task.add_done_callback(_suppress_cancelled_error) - return task - - -def _suppress_cancelled_error(future: asyncio.Future) -> None: - """Suppress future's CancelledError exception.""" - if future.cancelled(): +def _suppress_cancelled_error(task: asyncio.Task) -> None: + """Suppress a task's CancelledError exception.""" + if task.cancelled(): with contextlib.suppress(asyncio.CancelledError): - future.exception() + task.exception() -- cgit v1.2.3 From f5cd7e357de26c81b462b8935ec4bdaa032429fc Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 20:10:42 -0800 Subject: Scheduler: correct schedule_task's docstring --- bot/utils/scheduling.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 7b055f5e7..adf10d683 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -33,7 +33,7 @@ class Scheduler(metaclass=CogABCMeta): """ Schedules a task. - `task_data` is passed to `Scheduler._scheduled_expiration` + `task_data` is passed to the `Scheduler._scheduled_task()` coroutine. """ if task_id in self.scheduled_tasks: log.debug( -- cgit v1.2.3 From 0d05be37564b1ec8babd688fb348c2c13eeb9fa2 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 20:16:30 -0800 Subject: Scheduler: remove loop parameter from schedule_task asyncio.create_task() exists and will already use the running loop in the current thread. Because there is no intention of using a different loop in a different thread anywhere in the program for the foreseeable future, the loop parameter is redundant. --- bot/cogs/moderation/management.py | 4 +--- bot/cogs/moderation/scheduler.py | 4 ++-- bot/cogs/moderation/superstarify.py | 2 +- bot/cogs/reminders.py | 11 +++-------- bot/utils/scheduling.py | 4 ++-- 5 files changed, 9 insertions(+), 16 deletions(-) diff --git a/bot/cogs/moderation/management.py b/bot/cogs/moderation/management.py index f2964cd78..279c8b809 100644 --- a/bot/cogs/moderation/management.py +++ b/bot/cogs/moderation/management.py @@ -1,4 +1,3 @@ -import asyncio import logging import textwrap import typing as t @@ -133,8 +132,7 @@ class ModManagement(commands.Cog): # If the infraction was not marked as permanent, schedule a new expiration task if request_data['expires_at']: - loop = asyncio.get_event_loop() - self.infractions_cog.schedule_task(loop, new_infraction['id'], new_infraction) + self.infractions_cog.schedule_task(new_infraction['id'], new_infraction) log_text += f""" Previous expiry: {old_infraction['expires_at'] or "Permanent"} diff --git a/bot/cogs/moderation/scheduler.py b/bot/cogs/moderation/scheduler.py index e14c302cb..62b040d1f 100644 --- a/bot/cogs/moderation/scheduler.py +++ b/bot/cogs/moderation/scheduler.py @@ -48,7 +48,7 @@ class InfractionScheduler(Scheduler): ) for infraction in infractions: if infraction["expires_at"] is not None and infraction["type"] in supported_infractions: - self.schedule_task(self.bot.loop, infraction["id"], infraction) + self.schedule_task(infraction["id"], infraction) async def reapply_infraction( self, @@ -150,7 +150,7 @@ class InfractionScheduler(Scheduler): await action_coro if expiry: # Schedule the expiration of the infraction. - self.schedule_task(ctx.bot.loop, infraction["id"], infraction) + self.schedule_task(infraction["id"], infraction) except discord.HTTPException as e: # Accordingly display that applying the infraction failed. confirm_msg = f":x: failed to apply" diff --git a/bot/cogs/moderation/superstarify.py b/bot/cogs/moderation/superstarify.py index 050c847ac..d94ee6891 100644 --- a/bot/cogs/moderation/superstarify.py +++ b/bot/cogs/moderation/superstarify.py @@ -145,7 +145,7 @@ class Superstarify(InfractionScheduler, Cog): log.debug(f"Changing nickname of {member} to {forced_nick}.") self.mod_log.ignore(constants.Event.member_update, member.id) await member.edit(nick=forced_nick, reason=reason) - self.schedule_task(ctx.bot.loop, id_, infraction) + self.schedule_task(id_, infraction) # Send a DM to the user to notify them of their new infraction. await utils.notify_infraction( diff --git a/bot/cogs/reminders.py b/bot/cogs/reminders.py index 45bf9a8f4..d96dedd20 100644 --- a/bot/cogs/reminders.py +++ b/bot/cogs/reminders.py @@ -1,4 +1,3 @@ -import asyncio import logging import random import textwrap @@ -42,7 +41,6 @@ class Reminders(Scheduler, Cog): ) now = datetime.utcnow() - loop = asyncio.get_event_loop() for reminder in response: remind_at = datetime.fromisoformat(reminder['expiration'][:-1]) @@ -53,7 +51,7 @@ class Reminders(Scheduler, Cog): await self.send_reminder(reminder, late) else: - self.schedule_task(loop, reminder["id"], reminder) + self.schedule_task(reminder["id"], reminder) @staticmethod async def _send_confirmation(ctx: Context, on_success: str) -> None: @@ -88,10 +86,8 @@ class Reminders(Scheduler, Cog): async def _reschedule_reminder(self, reminder: dict) -> None: """Reschedule a reminder object.""" - loop = asyncio.get_event_loop() - self.cancel_task(reminder["id"]) - self.schedule_task(loop, reminder["id"], reminder) + self.schedule_task(reminder["id"], reminder) async def send_reminder(self, reminder: dict, late: relativedelta = None) -> None: """Send the reminder.""" @@ -185,8 +181,7 @@ class Reminders(Scheduler, Cog): on_success=f"Your reminder will arrive in {humanize_delta(relativedelta(expiration, now))}!" ) - loop = asyncio.get_event_loop() - self.schedule_task(loop, reminder["id"], reminder) + self.schedule_task(reminder["id"], reminder) @remind_group.command(name="list") async def list_reminders(self, ctx: Context) -> Optional[Message]: diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index adf10d683..a16900066 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -29,7 +29,7 @@ class Scheduler(metaclass=CogABCMeta): then make a site API request to delete the reminder from the database. """ - def schedule_task(self, loop: asyncio.AbstractEventLoop, task_id: str, task_data: dict) -> None: + def schedule_task(self, task_id: str, task_data: dict) -> None: """ Schedules a task. @@ -41,7 +41,7 @@ class Scheduler(metaclass=CogABCMeta): ) return - task = loop.create_task(self._scheduled_task(task_data)) + task = asyncio.create_task(self._scheduled_task(task_data)) task.add_done_callback(_suppress_cancelled_error) self.scheduled_tasks[task_id] = task -- cgit v1.2.3 From 6b7c0a7a74460ee96c5ce574bf042f3de38dd685 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 20:24:48 -0800 Subject: Scheduler: raise task exceptions besides CancelledError Explicitly retrieves the task's exception, which will raise the exception if one exists. * Rename _suppress_cancelled_error to _handle_task_exception --- bot/utils/scheduling.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index a16900066..df46ccdd9 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -13,8 +13,9 @@ class Scheduler(metaclass=CogABCMeta): """Task scheduler.""" def __init__(self): + # Keep track of the child cog's name so the logs are clear. + self.cog_name = self.__class__.__name__ - self.cog_name = self.__class__.__name__ # keep track of the child cog's name so the logs are clear. self.scheduled_tasks: Dict[str, asyncio.Task] = {} @abstractmethod @@ -42,7 +43,7 @@ class Scheduler(metaclass=CogABCMeta): return task = asyncio.create_task(self._scheduled_task(task_data)) - task.add_done_callback(_suppress_cancelled_error) + task.add_done_callback(_handle_task_exception) self.scheduled_tasks[task_id] = task log.debug(f"{self.cog_name}: scheduled task #{task_id}.") @@ -60,8 +61,10 @@ class Scheduler(metaclass=CogABCMeta): del self.scheduled_tasks[task_id] -def _suppress_cancelled_error(task: asyncio.Task) -> None: - """Suppress a task's CancelledError exception.""" +def _handle_task_exception(task: asyncio.Task) -> None: + """Raise the task's exception, if any, unless the task is cancelled and has a CancelledError.""" if task.cancelled(): with contextlib.suppress(asyncio.CancelledError): task.exception() + else: + task.exception() -- cgit v1.2.3 From 687b6404e8976ffdc67ba9492a4355819f06a2f7 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 21:17:22 -0800 Subject: Scheduler: cancel the task in the callback This design makes more sense and is more convenient than requiring tasks to be responsible for cancelling themselves. * Rename _handle_task_exception to _task_done_callback * Add trace logging --- bot/cogs/reminders.py | 6 +++--- bot/utils/scheduling.py | 36 +++++++++++++++++++++++++++--------- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/bot/cogs/reminders.py b/bot/cogs/reminders.py index d96dedd20..603b627fb 100644 --- a/bot/cogs/reminders.py +++ b/bot/cogs/reminders.py @@ -74,9 +74,6 @@ class Reminders(Scheduler, Cog): log.debug(f"Deleting reminder {reminder_id} (the user has been reminded).") await self._delete_reminder(reminder_id) - # Now we can begone with it from our schedule list. - self.cancel_task(reminder_id) - async def _delete_reminder(self, reminder_id: str) -> None: """Delete a reminder from the database, given its ID, and cancel the running task.""" await self.bot.api_client.delete('bot/reminders/' + str(reminder_id)) @@ -86,7 +83,10 @@ class Reminders(Scheduler, Cog): async def _reschedule_reminder(self, reminder: dict) -> None: """Reschedule a reminder object.""" + log.trace(f"Cancelling old task #{reminder['id']}") self.cancel_task(reminder["id"]) + + log.trace(f"Scheduling new task #{reminder['id']}") self.schedule_task(reminder["id"], reminder) async def send_reminder(self, reminder: dict, late: relativedelta = None) -> None: diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index df46ccdd9..40d26249f 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -2,6 +2,7 @@ import asyncio import contextlib import logging from abc import abstractmethod +from functools import partial from typing import Dict from bot.utils import CogABCMeta @@ -36,6 +37,8 @@ class Scheduler(metaclass=CogABCMeta): `task_data` is passed to the `Scheduler._scheduled_task()` coroutine. """ + log.trace(f"{self.cog_name}: scheduling task #{task_id}...") + if task_id in self.scheduled_tasks: log.debug( f"{self.cog_name}: did not schedule task #{task_id}; task was already scheduled." @@ -43,13 +46,15 @@ class Scheduler(metaclass=CogABCMeta): return task = asyncio.create_task(self._scheduled_task(task_data)) - task.add_done_callback(_handle_task_exception) + task.add_done_callback(partial(self._task_done_callback, task_id)) self.scheduled_tasks[task_id] = task - log.debug(f"{self.cog_name}: scheduled task #{task_id}.") + log.debug(f"{self.cog_name}: scheduled task #{task_id} {id(task)}.") def cancel_task(self, task_id: str) -> None: """Un-schedules a task.""" + log.trace(f"{self.cog_name}: cancelling task #{task_id}...") + task = self.scheduled_tasks.get(task_id) if task is None: @@ -57,14 +62,27 @@ class Scheduler(metaclass=CogABCMeta): return task.cancel() - log.debug(f"{self.cog_name}: unscheduled task #{task_id}.") + log.debug(f"{self.cog_name}: unscheduled task #{task_id} {id(task)}.") del self.scheduled_tasks[task_id] + def _task_done_callback(self, task_id: str, task: asyncio.Task) -> None: + """ + Unschedule the task and raise its exception if one exists. + + If the task was cancelled, the CancelledError is retrieved and suppressed. In this case, + the task is already assumed to have been unscheduled. + """ + log.trace(f"{self.cog_name}: performing done callback for task #{task_id} {id(task)}") + + if task.cancelled(): + with contextlib.suppress(asyncio.CancelledError): + task.exception() + else: + # Check if it exists to avoid logging a warning. + if task_id in self.scheduled_tasks: + # Only cancel if the task is not cancelled to avoid a race condition when a new + # task is scheduled using the same ID. Reminders do this when re-scheduling after + # editing. + self.cancel_task(task_id) -def _handle_task_exception(task: asyncio.Task) -> None: - """Raise the task's exception, if any, unless the task is cancelled and has a CancelledError.""" - if task.cancelled(): - with contextlib.suppress(asyncio.CancelledError): task.exception() - else: - task.exception() -- cgit v1.2.3 From 6edae6cda82add4d0bf538e916ce571432f83ab1 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 21:21:51 -0800 Subject: Moderation: avoid prematurely cancelling deactivation task Because deactivate_infraction() explicitly cancels the scheduled task, it now runs in a separate task to avoid prematurely cancelling itself. --- bot/cogs/moderation/scheduler.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/cogs/moderation/scheduler.py b/bot/cogs/moderation/scheduler.py index 62b040d1f..7d401c7ff 100644 --- a/bot/cogs/moderation/scheduler.py +++ b/bot/cogs/moderation/scheduler.py @@ -415,4 +415,6 @@ class InfractionScheduler(Scheduler): expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) await time.wait_until(expiry) - await self.deactivate_infraction(infraction) + # Because deactivate_infraction() explicitly cancels this scheduled task, it runs in + # a separate task to avoid prematurely cancelling itself. + self.bot.loop.create_task(self.deactivate_infraction(infraction)) -- cgit v1.2.3 From f09d6b0646edb62806b60b145986b7cc680fd77c Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 15 Feb 2020 21:34:38 -0800 Subject: Scheduler: make _scheduled_tasks private Main concern is someone trying to cancel a task directly. The workaround for the race condition relies on the task only being cancelled via Scheduler.cancel_task(), particularly because it removes the task from the dictionary. The done callback will not remove from the dictionary if it sees the task has already been cancelled. So it's a bad idea to cancel tasks directly... --- bot/utils/scheduling.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 40d26249f..0d66952eb 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -17,7 +17,7 @@ class Scheduler(metaclass=CogABCMeta): # Keep track of the child cog's name so the logs are clear. self.cog_name = self.__class__.__name__ - self.scheduled_tasks: Dict[str, asyncio.Task] = {} + self._scheduled_tasks: Dict[str, asyncio.Task] = {} @abstractmethod async def _scheduled_task(self, task_object: dict) -> None: @@ -39,7 +39,7 @@ class Scheduler(metaclass=CogABCMeta): """ log.trace(f"{self.cog_name}: scheduling task #{task_id}...") - if task_id in self.scheduled_tasks: + if task_id in self._scheduled_tasks: log.debug( f"{self.cog_name}: did not schedule task #{task_id}; task was already scheduled." ) @@ -48,14 +48,14 @@ class Scheduler(metaclass=CogABCMeta): task = asyncio.create_task(self._scheduled_task(task_data)) task.add_done_callback(partial(self._task_done_callback, task_id)) - self.scheduled_tasks[task_id] = task + self._scheduled_tasks[task_id] = task log.debug(f"{self.cog_name}: scheduled task #{task_id} {id(task)}.") def cancel_task(self, task_id: str) -> None: """Un-schedules a task.""" log.trace(f"{self.cog_name}: cancelling task #{task_id}...") - task = self.scheduled_tasks.get(task_id) + task = self._scheduled_tasks.get(task_id) if task is None: log.warning(f"{self.cog_name}: Failed to unschedule {task_id} (no task found).") @@ -63,7 +63,7 @@ class Scheduler(metaclass=CogABCMeta): task.cancel() log.debug(f"{self.cog_name}: unscheduled task #{task_id} {id(task)}.") - del self.scheduled_tasks[task_id] + del self._scheduled_tasks[task_id] def _task_done_callback(self, task_id: str, task: asyncio.Task) -> None: """ @@ -79,7 +79,7 @@ class Scheduler(metaclass=CogABCMeta): task.exception() else: # Check if it exists to avoid logging a warning. - if task_id in self.scheduled_tasks: + if task_id in self._scheduled_tasks: # Only cancel if the task is not cancelled to avoid a race condition when a new # task is scheduled using the same ID. Reminders do this when re-scheduling after # editing. -- cgit v1.2.3 From 05de1f705a9dda3e6b15f0772ddbdc6876ffeb8d Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sun, 23 Feb 2020 12:31:01 +0100 Subject: Update to Python 3.8 and discord.py 1.3.2 I've changed the Python version in our Pipfile to Python 3.8. The main advantage of Python 3.8 is that it comes with significant upgrades to the unittest module which allow us to test asyncio-based code more easily. While our current test suite runs in P3.8 "out of the box", it currently still relies on many of the workarounds we had to use to test asynchronous code in Python 3.7. A future commit will replace these workarounds with the new tools available in Python 3.8. This commit also updates our discord.py version to 1.3.2. Versions of discord.py <= 1.3.1 contain a bug that causes errors in the new unittest tools that come with Python 3.8. For more specific details, see https://github.com/Rapptz/discord.py/pull/2570. --- Pipfile | 4 +-- Pipfile.lock | 112 +++++++++++++++++++---------------------------------------- 2 files changed, 37 insertions(+), 79 deletions(-) diff --git a/Pipfile b/Pipfile index 400e64c18..e08b5b41d 100644 --- a/Pipfile +++ b/Pipfile @@ -4,7 +4,7 @@ verify_ssl = true name = "pypi" [packages] -discord-py = "~=1.3.1" +discord-py = "~=1.3.2" aiodns = "~=2.0" aiohttp = "~=3.5" sphinx = "~=2.2" @@ -36,7 +36,7 @@ unittest-xml-reporting = "~=2.5" dodgy = "~=0.1" [requires] -python_version = "3.7" +python_version = "3.8" [scripts] start = "python -m bot" diff --git a/Pipfile.lock b/Pipfile.lock index fa29bf995..7c11f1860 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "c7706a61eb96c06d073898018ea2dbcf5bd3b15d007496e2d60120a65647f31e" + "sha256": "513182efe8c06f5d8acb494ebdfb8670cd68f426fd87085778421872c2c3acc8" }, "pipfile-spec": 6, "requires": { - "python_version": "3.7" + "python_version": "3.8" }, "sources": [ { @@ -150,10 +150,10 @@ }, "discord-py": { "hashes": [ - "sha256:8bfe5628d31771744000f19135c386c74ac337479d7282c26cc1627b9d31f360" + "sha256:7424be26b07b37ecad4404d9383d685995a0e0b3df3f9c645bdd3a4d977b83b4" ], "index": "pypi", - "version": "==1.3.1" + "version": "==1.3.2" }, "docutils": { "hashes": [ @@ -279,25 +279,25 @@ }, "multidict": { "hashes": [ - "sha256:13f3ebdb5693944f52faa7b2065b751cb7e578b8dd0a5bb8e4ab05ad0188b85e", - "sha256:26502cefa86d79b86752e96639352c7247846515c864d7c2eb85d036752b643c", - "sha256:4fba5204d32d5c52439f88437d33ad14b5f228e25072a192453f658bddfe45a7", - "sha256:527124ef435f39a37b279653ad0238ff606b58328ca7989a6df372fd75d7fe26", - "sha256:5414f388ffd78c57e77bd253cf829373721f450613de53dc85a08e34d806e8eb", - "sha256:5eee66f882ab35674944dfa0d28b57fa51e160b4dce0ce19e47f495fdae70703", - "sha256:63810343ea07f5cd86ba66ab66706243a6f5af075eea50c01e39b4ad6bc3c57a", - "sha256:6bd10adf9f0d6a98ccc792ab6f83d18674775986ba9bacd376b643fe35633357", - "sha256:83c6ddf0add57c6b8a7de0bc7e2d656be3eefeff7c922af9a9aae7e49f225625", - "sha256:93166e0f5379cf6cd29746989f8a594fa7204dcae2e9335ddba39c870a287e1c", - "sha256:9a7b115ee0b9b92d10ebc246811d8f55d0c57e82dbb6a26b23c9a9a6ad40ce0c", - "sha256:a38baa3046cce174a07a59952c9f876ae8875ef3559709639c17fdf21f7b30dd", - "sha256:a6d219f49821f4b2c85c6d426346a5d84dab6daa6f85ca3da6c00ed05b54022d", - "sha256:a8ed33e8f9b67e3b592c56567135bb42e7e0e97417a4b6a771e60898dfd5182b", - "sha256:d7d428488c67b09b26928950a395e41cc72bb9c3d5abfe9f0521940ee4f796d4", - "sha256:dcfed56aa085b89d644af17442cdc2debaa73388feba4b8026446d168ca8dad7", - "sha256:f29b885e4903bd57a7789f09fe9d60b6475a6c1a4c0eca874d8558f00f9d4b51" - ], - "version": "==4.7.4" + "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1", + "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35", + "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928", + "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969", + "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e", + "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78", + "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1", + "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136", + "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8", + "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2", + "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e", + "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4", + "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5", + "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd", + "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab", + "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20", + "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3" + ], + "version": "==4.7.5" }, "ordered-set": { "hashes": [ @@ -437,18 +437,18 @@ }, "soupsieve": { "hashes": [ - "sha256:bdb0d917b03a1369ce964056fc195cfdff8819c40de04695a80bc813c3cfa1f5", - "sha256:e2c1c5dee4a1c36bcb790e0fabd5492d874b8ebd4617622c4f6a731701060dda" + "sha256:e914534802d7ffd233242b785229d5ba0766a7f487385e3f714446a07bf540ae", + "sha256:fcd71e08c0aee99aca1b73f45478549ee7e7fc006d51b37bec9e9def7dc22b69" ], - "version": "==1.9.5" + "version": "==2.0" }, "sphinx": { "hashes": [ - "sha256:525527074f2e0c2585f68f73c99b4dc257c34bbe308b27f5f8c7a6e20642742f", - "sha256:543d39db5f82d83a5c1aa0c10c88f2b6cff2da3e711aa849b2c627b4b403bbd9" + "sha256:776ff8333181138fae52df65be733127539623bb46cc692e7fa0fcfc80d7aa88", + "sha256:ca762da97c3b5107cbf0ab9e11d3ec7ab8d3c31377266fd613b962ed971df709" ], "index": "pypi", - "version": "==2.4.2" + "version": "==2.4.3" }, "sphinxcontrib-applehelp": { "hashes": [ @@ -466,10 +466,10 @@ }, "sphinxcontrib-htmlhelp": { "hashes": [ - "sha256:4670f99f8951bd78cd4ad2ab962f798f5618b17675c35c5ac3b2132a14ea8422", - "sha256:d4fd39a65a625c9df86d7fa8a2d9f3cd8299a3a4b15db63b50aac9e161d8eff7" + "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", + "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], - "version": "==1.0.2" + "version": "==1.0.3" }, "sphinxcontrib-jsmath": { "hashes": [ @@ -750,14 +750,6 @@ ], "version": "==2.9" }, - "importlib-metadata": { - "hashes": [ - "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", - "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" - ], - "markers": "python_version < '3.8'", - "version": "==1.5.0" - }, "mccabe": { "hashes": [ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", @@ -868,33 +860,6 @@ ], "version": "==0.10.0" }, - "typed-ast": { - "hashes": [ - "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", - "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", - "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", - "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", - "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", - "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", - "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", - "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", - "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", - "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", - "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", - "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", - "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", - "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", - "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", - "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", - "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", - "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", - "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", - "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", - "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" - ], - "markers": "python_version < '3.8'", - "version": "==1.4.1" - }, "unittest-xml-reporting": { "hashes": [ "sha256:358bbdaf24a26d904cc1c26ef3078bca7fc81541e0a54c8961693cc96a6f35e0", @@ -913,17 +878,10 @@ }, "virtualenv": { "hashes": [ - "sha256:08f3623597ce73b85d6854fb26608a6f39ee9d055c81178dc6583803797f8994", - "sha256:de2cbdd5926c48d7b84e0300dea9e8f276f61d186e8e49223d71d91250fbaebd" + "sha256:531b142e300d405bb9faedad4adbeb82b4098b918e35209af2adef3129274aae", + "sha256:5dd42a9f56307542bddc446cfd10ef6576f11910366a07609fe8d0d88fa8fb7e" ], - "version": "==20.0.4" - }, - "zipp": { - "hashes": [ - "sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2", - "sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a" - ], - "version": "==3.0.0" + "version": "==20.0.5" } } } -- cgit v1.2.3 From d3f4673c1a1c3f5213840e756c5f35f7c70d46f6 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sun, 23 Feb 2020 12:39:25 +0100 Subject: Use mixin-composition not inheritance for LoggingTestCase We used inheritence to add additional logging assertion methods to unittest's TestCase class. However, with the introduction of the new IsolatedAsyncioTestCase this extension strategy means we'd have to create multiple child classes to be able to use the extended functionality in all of the TestCase variants. Since that leads to undesirable code reuse and an inheritance relationship is not at all needed, I've switched to a mixin-composition based approach that allows the user to extend the functionality of any TestCase variant with a mixin where needed. --- tests/base.py | 10 +++++++--- tests/bot/cogs/test_duck_pond.py | 2 +- tests/test_base.py | 18 ++++++------------ 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/tests/base.py b/tests/base.py index 029a249ed..21a57716a 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,5 +1,4 @@ import logging -import unittest from contextlib import contextmanager @@ -16,8 +15,13 @@ class _CaptureLogHandler(logging.Handler): self.records.append(record) -class LoggingTestCase(unittest.TestCase): - """TestCase subclass that adds more logging assertion tools.""" +class LoggingTestsMixin: + """ + A mixin that defines additional test methods for logging behavior. + + This mixin relies on the availability of the `fail` attribute defined by the + test classes included in Python's unittest method to signal test failure. + """ @contextmanager def assertNotLogs(self, logger=None, level=None, msg=None): diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py index d07b2bce1..320cbd5c5 100644 --- a/tests/bot/cogs/test_duck_pond.py +++ b/tests/bot/cogs/test_duck_pond.py @@ -14,7 +14,7 @@ from tests import helpers MODULE_PATH = "bot.cogs.duck_pond" -class DuckPondTests(base.LoggingTestCase): +class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): """Tests for DuckPond functionality.""" @classmethod diff --git a/tests/test_base.py b/tests/test_base.py index a16e2af8f..23abb1dfd 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -3,7 +3,11 @@ import unittest import unittest.mock -from tests.base import LoggingTestCase, _CaptureLogHandler +from tests.base import LoggingTestsMixin, _CaptureLogHandler + + +class LoggingTestCase(LoggingTestsMixin): + pass class LoggingTestCaseTests(unittest.TestCase): @@ -18,19 +22,9 @@ class LoggingTestCaseTests(unittest.TestCase): try: with LoggingTestCase.assertNotLogs(self, level=logging.DEBUG): pass - except AssertionError: + except AssertionError: # pragma: no cover self.fail("`self.assertNotLogs` raised an AssertionError when it should not!") - @unittest.mock.patch("tests.base.LoggingTestCase.assertNotLogs") - def test_the_test_function_assert_not_logs_does_not_raise_with_no_logs(self, assertNotLogs): - """Test if test_assert_not_logs_does_not_raise_with_no_logs captures exception correctly.""" - assertNotLogs.return_value = iter([None]) - assertNotLogs.side_effect = AssertionError - - message = "`self.assertNotLogs` raised an AssertionError when it should not!" - with self.assertRaises(AssertionError, msg=message): - self.test_assert_not_logs_does_not_raise_with_no_logs() - def test_assert_not_logs_raises_correct_assertion_error_when_logs_are_emitted(self): """Test if LoggingTestCase.assertNotLogs raises AssertionError when logs were emitted.""" msg_regex = ( -- cgit v1.2.3 From 135d6daa4804574935cd788c5baec656765f484b Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sun, 23 Feb 2020 13:05:10 +0100 Subject: Use IsolatedAsyncioTestCase instead of async_test Since we upgraded to Python 3.8, we can now use the new IsolatedAsyncioTestCase test class to use coroutine-based test methods instead of our own, custom async_test decorator. I have changed the base class for all of our test classes that use coroutine-based test methods and removed the now obsolete decorator from our helpers. --- tests/bot/cogs/test_duck_pond.py | 11 +---------- tests/bot/rules/__init__.py | 2 +- tests/bot/rules/test_attachments.py | 4 +--- tests/bot/rules/test_burst.py | 4 +--- tests/bot/rules/test_burst_shared.py | 4 +--- tests/bot/rules/test_chars.py | 4 +--- tests/bot/rules/test_discord_emojis.py | 4 +--- tests/bot/rules/test_duplicates.py | 4 +--- tests/bot/rules/test_links.py | 4 +--- tests/bot/rules/test_mentions.py | 4 +--- tests/bot/rules/test_newlines.py | 5 +---- tests/bot/rules/test_role_mentions.py | 4 +--- tests/bot/test_api.py | 4 +--- tests/helpers.py | 17 ----------------- tests/test_helpers.py | 8 -------- 15 files changed, 13 insertions(+), 70 deletions(-) diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py index 320cbd5c5..6406f0737 100644 --- a/tests/bot/cogs/test_duck_pond.py +++ b/tests/bot/cogs/test_duck_pond.py @@ -14,7 +14,7 @@ from tests import helpers MODULE_PATH = "bot.cogs.duck_pond" -class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): +class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): """Tests for DuckPond functionality.""" @classmethod @@ -88,7 +88,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): with self.subTest(user_type=user.name, expected_return=expected_return, actual_return=actual_return): self.assertEqual(expected_return, actual_return) - @helpers.async_test async def test_has_green_checkmark_correctly_detects_presence_of_green_checkmark_emoji(self): """The `has_green_checkmark` method should only return `True` if one is present.""" test_cases = ( @@ -172,7 +171,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): nonstaffers = [helpers.MockMember() for _ in range(nonstaff)] return helpers.MockReaction(emoji=emoji, users=staffers + nonstaffers) - @helpers.async_test async def test_count_ducks_correctly_counts_the_number_of_eligible_duck_emojis(self): """The `count_ducks` method should return the number of unique staffers who gave a duck.""" test_cases = ( @@ -280,7 +278,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): with self.subTest(test_case=description, expected_count=expected_count, actual_count=actual_count): self.assertEqual(expected_count, actual_count) - @helpers.async_test async def test_relay_message_correctly_relays_content_and_attachments(self): """The `relay_message` method should correctly relay message content and attachments.""" send_webhook_path = f"{MODULE_PATH}.DuckPond.send_webhook" @@ -307,7 +304,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): message.add_reaction.assert_called_once_with(self.checkmark_emoji) @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock) - @helpers.async_test async def test_relay_message_handles_irretrievable_attachment_exceptions(self, send_attachments): """The `relay_message` method should handle irretrievable attachments.""" message = helpers.MockMessage(clean_content="message", attachments=["attachment"]) @@ -327,7 +323,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): @patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=helpers.AsyncMock) @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock) - @helpers.async_test async def test_relay_message_handles_attachment_http_error(self, send_attachments, send_webhook): """The `relay_message` method should handle irretrievable attachments.""" message = helpers.MockMessage(clean_content="message", attachments=["attachment"]) @@ -360,7 +355,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): payload.emoji.name = emoji_name return payload - @helpers.async_test async def test_payload_has_duckpond_emoji_correctly_detects_relevant_emojis(self): """The `on_raw_reaction_add` event handler should ignore irrelevant emojis.""" test_values = ( @@ -434,7 +428,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): return channel, message, member, payload - @helpers.async_test async def test_on_raw_reaction_add_returns_for_bot_and_non_staff_members(self): """The `on_raw_reaction_add` event handler should return for bot users or non-staff members.""" channel_id = 1234 @@ -485,7 +478,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): # Assert that we've made it past `self.is_staff` is_staff.assert_called_once() - @helpers.async_test async def test_on_raw_reaction_add_does_not_relay_below_duck_threshold(self): """The `on_raw_reaction_add` listener should not relay messages or attachments below the duck threshold.""" test_cases = ( @@ -515,7 +507,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.TestCase): if should_relay: relay_message.assert_called_once_with(message) - @helpers.async_test async def test_on_raw_reaction_remove_prevents_removal_of_green_checkmark_depending_on_the_duck_count(self): """The `on_raw_reaction_remove` listener prevents removal of the check mark on messages with enough ducks.""" checkmark = helpers.MockPartialEmoji(name=self.checkmark_emoji) diff --git a/tests/bot/rules/__init__.py b/tests/bot/rules/__init__.py index 36c986fe1..0233e7939 100644 --- a/tests/bot/rules/__init__.py +++ b/tests/bot/rules/__init__.py @@ -12,7 +12,7 @@ class DisallowedCase(NamedTuple): n_violations: int -class RuleTest(unittest.TestCase, metaclass=ABCMeta): +class RuleTest(unittest.IsolatedAsyncioTestCase, metaclass=ABCMeta): """ Abstract class for antispam rule test cases. diff --git a/tests/bot/rules/test_attachments.py b/tests/bot/rules/test_attachments.py index e54b4b5b8..d7e779221 100644 --- a/tests/bot/rules/test_attachments.py +++ b/tests/bot/rules/test_attachments.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import attachments from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, total_attachments: int) -> MockMessage: @@ -17,7 +17,6 @@ class AttachmentRuleTests(RuleTest): self.apply = attachments.apply self.config = {"max": 5, "interval": 10} - @async_test async def test_allows_messages_without_too_many_attachments(self): """Messages without too many attachments are allowed as-is.""" cases = ( @@ -28,7 +27,6 @@ class AttachmentRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_with_too_many_attachments(self): """Messages with too many attachments trigger the rule.""" cases = ( diff --git a/tests/bot/rules/test_burst.py b/tests/bot/rules/test_burst.py index 72f0be0c7..03682966b 100644 --- a/tests/bot/rules/test_burst.py +++ b/tests/bot/rules/test_burst.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import burst from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str) -> MockMessage: @@ -21,7 +21,6 @@ class BurstRuleTests(RuleTest): self.apply = burst.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """Cases which do not violate the rule.""" cases = ( @@ -31,7 +30,6 @@ class BurstRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases where the amount of messages exceeds the limit, triggering the rule.""" cases = ( diff --git a/tests/bot/rules/test_burst_shared.py b/tests/bot/rules/test_burst_shared.py index 47367a5f8..3275143d5 100644 --- a/tests/bot/rules/test_burst_shared.py +++ b/tests/bot/rules/test_burst_shared.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import burst_shared from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str) -> MockMessage: @@ -21,7 +21,6 @@ class BurstSharedRuleTests(RuleTest): self.apply = burst_shared.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """ Cases that do not violate the rule. @@ -34,7 +33,6 @@ class BurstSharedRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases where the amount of messages exceeds the limit, triggering the rule.""" cases = ( diff --git a/tests/bot/rules/test_chars.py b/tests/bot/rules/test_chars.py index 7cc36f49e..f1e3c76a7 100644 --- a/tests/bot/rules/test_chars.py +++ b/tests/bot/rules/test_chars.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import chars from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, n_chars: int) -> MockMessage: @@ -20,7 +20,6 @@ class CharsRuleTests(RuleTest): "interval": 10, } - @async_test async def test_allows_messages_within_limit(self): """Cases with a total amount of chars within limit.""" cases = ( @@ -31,7 +30,6 @@ class CharsRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases where the total amount of chars exceeds the limit, triggering the rule.""" cases = ( diff --git a/tests/bot/rules/test_discord_emojis.py b/tests/bot/rules/test_discord_emojis.py index 0239b0b00..9a72723e2 100644 --- a/tests/bot/rules/test_discord_emojis.py +++ b/tests/bot/rules/test_discord_emojis.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import discord_emojis from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage discord_emoji = "<:abcd:1234>" # Discord emojis follow the format <:name:id> @@ -19,7 +19,6 @@ class DiscordEmojisRuleTests(RuleTest): self.apply = discord_emojis.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """Cases with a total amount of discord emojis within limit.""" cases = ( @@ -29,7 +28,6 @@ class DiscordEmojisRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases with more than the allowed amount of discord emojis.""" cases = ( diff --git a/tests/bot/rules/test_duplicates.py b/tests/bot/rules/test_duplicates.py index 59e0fb6ef..9bd886a77 100644 --- a/tests/bot/rules/test_duplicates.py +++ b/tests/bot/rules/test_duplicates.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import duplicates from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, content: str) -> MockMessage: @@ -17,7 +17,6 @@ class DuplicatesRuleTests(RuleTest): self.apply = duplicates.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """Cases which do not violate the rule.""" cases = ( @@ -28,7 +27,6 @@ class DuplicatesRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases with too many duplicate messages from the same author.""" cases = ( diff --git a/tests/bot/rules/test_links.py b/tests/bot/rules/test_links.py index 3c3f90e5f..b091bd9d7 100644 --- a/tests/bot/rules/test_links.py +++ b/tests/bot/rules/test_links.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import links from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, total_links: int) -> MockMessage: @@ -21,7 +21,6 @@ class LinksTests(RuleTest): "interval": 10 } - @async_test async def test_links_within_limit(self): """Messages with an allowed amount of links.""" cases = ( @@ -34,7 +33,6 @@ class LinksTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_links_exceeding_limit(self): """Messages with a a higher than allowed amount of links.""" cases = ( diff --git a/tests/bot/rules/test_mentions.py b/tests/bot/rules/test_mentions.py index ebcdabac6..6444532f2 100644 --- a/tests/bot/rules/test_mentions.py +++ b/tests/bot/rules/test_mentions.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import mentions from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, total_mentions: int) -> MockMessage: @@ -20,7 +20,6 @@ class TestMentions(RuleTest): "interval": 10, } - @async_test async def test_mentions_within_limit(self): """Messages with an allowed amount of mentions.""" cases = ( @@ -32,7 +31,6 @@ class TestMentions(RuleTest): await self.run_allowed(cases) - @async_test async def test_mentions_exceeding_limit(self): """Messages with a higher than allowed amount of mentions.""" cases = ( diff --git a/tests/bot/rules/test_newlines.py b/tests/bot/rules/test_newlines.py index d61c4609d..e35377773 100644 --- a/tests/bot/rules/test_newlines.py +++ b/tests/bot/rules/test_newlines.py @@ -2,7 +2,7 @@ from typing import Iterable, List from bot.rules import newlines from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, newline_groups: List[int]) -> MockMessage: @@ -29,7 +29,6 @@ class TotalNewlinesRuleTests(RuleTest): "interval": 10, } - @async_test async def test_allows_messages_within_limit(self): """Cases which do not violate the rule.""" cases = ( @@ -41,7 +40,6 @@ class TotalNewlinesRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_total(self): """Cases which violate the rule by having too many newlines in total.""" cases = ( @@ -79,7 +77,6 @@ class GroupNewlinesRuleTests(RuleTest): self.apply = newlines.apply self.config = {"max": 5, "max_consecutive": 3, "interval": 10} - @async_test async def test_disallows_messages_consecutive(self): """Cases which violate the rule due to having too many consecutive newlines.""" cases = ( diff --git a/tests/bot/rules/test_role_mentions.py b/tests/bot/rules/test_role_mentions.py index b339cccf7..26c05d527 100644 --- a/tests/bot/rules/test_role_mentions.py +++ b/tests/bot/rules/test_role_mentions.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import role_mentions from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, n_mentions: int) -> MockMessage: @@ -17,7 +17,6 @@ class RoleMentionsRuleTests(RuleTest): self.apply = role_mentions.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """Cases with a total amount of role mentions within limit.""" cases = ( @@ -27,7 +26,6 @@ class RoleMentionsRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases with more than the allowed amount of role mentions.""" cases = ( diff --git a/tests/bot/test_api.py b/tests/bot/test_api.py index bdfcc73e4..99e942813 100644 --- a/tests/bot/test_api.py +++ b/tests/bot/test_api.py @@ -2,10 +2,9 @@ import unittest from unittest.mock import MagicMock from bot import api -from tests.helpers import async_test -class APIClientTests(unittest.TestCase): +class APIClientTests(unittest.IsolatedAsyncioTestCase): """Tests for the bot's API client.""" @classmethod @@ -18,7 +17,6 @@ class APIClientTests(unittest.TestCase): """The event loop should not be running by default.""" self.assertFalse(api.loop_is_running()) - @async_test async def test_loop_is_running_in_async_context(self): """The event loop should be running in an async context.""" self.assertTrue(api.loop_is_running()) diff --git a/tests/helpers.py b/tests/helpers.py index 5df796c23..01752a791 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,8 +1,6 @@ from __future__ import annotations -import asyncio import collections -import functools import inspect import itertools import logging @@ -25,21 +23,6 @@ for logger in logging.Logger.manager.loggerDict.values(): logger.setLevel(logging.CRITICAL) -def async_test(wrapped): - """ - Run a test case via asyncio. - Example: - >>> @async_test - ... async def lemon_wins(): - ... assert True - """ - - @functools.wraps(wrapped) - def wrapper(*args, **kwargs): - return asyncio.run(wrapped(*args, **kwargs)) - return wrapper - - class HashableMixin(discord.mixins.EqualityComparable): """ Mixin that provides similar hashing and equality functionality as discord.py's `Hashable` mixin. diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 7894e104a..fe39df308 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -395,11 +395,3 @@ class MockObjectTests(unittest.TestCase): coroutine = async_mock() self.assertTrue(inspect.iscoroutine(coroutine)) self.assertIsNotNone(asyncio.run(coroutine)) - - def test_async_test_decorator_allows_synchronous_call_to_async_def(self): - """Test if the `async_test` decorator allows an `async def` to be called synchronously.""" - @helpers.async_test - async def kosayoda(): - return "return value" - - self.assertEqual(kosayoda(), "return value") -- cgit v1.2.3 From b6500eb967ae4856d4d65d7946b1e341c093eedd Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sun, 23 Feb 2020 13:41:27 +0100 Subject: Remove lingering pytest test_time.py file I forgot to remove one pytest test file during the migration from pytest to unittest. Since we have sinced added a unittest version of the same file, I've now removed the lingering pytest file. --- tests/utils/test_time.py | 62 ------------------------------------------------ 1 file changed, 62 deletions(-) delete mode 100644 tests/utils/test_time.py diff --git a/tests/utils/test_time.py b/tests/utils/test_time.py deleted file mode 100644 index 4baa6395c..000000000 --- a/tests/utils/test_time.py +++ /dev/null @@ -1,62 +0,0 @@ -import asyncio -from datetime import datetime, timezone -from unittest.mock import patch - -import pytest -from dateutil.relativedelta import relativedelta - -from bot.utils import time -from tests.helpers import AsyncMock - - -@pytest.mark.parametrize( - ('delta', 'precision', 'max_units', 'expected'), - ( - (relativedelta(days=2), 'seconds', 1, '2 days'), - (relativedelta(days=2, hours=2), 'seconds', 2, '2 days and 2 hours'), - (relativedelta(days=2, hours=2), 'seconds', 1, '2 days'), - (relativedelta(days=2, hours=2), 'days', 2, '2 days'), - - # Does not abort for unknown units, as the unit name is checked - # against the attribute of the relativedelta instance. - (relativedelta(days=2, hours=2), 'elephants', 2, '2 days and 2 hours'), - - # Very high maximum units, but it only ever iterates over - # each value the relativedelta might have. - (relativedelta(days=2, hours=2), 'hours', 20, '2 days and 2 hours'), - ) -) -def test_humanize_delta( - delta: relativedelta, - precision: str, - max_units: int, - expected: str -): - assert time.humanize_delta(delta, precision, max_units) == expected - - -@pytest.mark.parametrize('max_units', (-1, 0)) -def test_humanize_delta_raises_for_invalid_max_units(max_units: int): - with pytest.raises(ValueError, match='max_units must be positive'): - time.humanize_delta(relativedelta(days=2, hours=2), 'hours', max_units) - - -@pytest.mark.parametrize( - ('stamp', 'expected'), - ( - ('Sun, 15 Sep 2019 12:00:00 GMT', datetime(2019, 9, 15, 12, 0, 0, tzinfo=timezone.utc)), - ) -) -def test_parse_rfc1123(stamp: str, expected: str): - assert time.parse_rfc1123(stamp) == expected - - -@patch('asyncio.sleep', new_callable=AsyncMock) -def test_wait_until(sleep_patch): - start = datetime(2019, 1, 1, 0, 0) - then = datetime(2019, 1, 1, 0, 10) - - # No return value - assert asyncio.run(time.wait_until(then, start)) is None - - sleep_patch.assert_called_once_with(10 * 60) -- cgit v1.2.3 From ea64d7cc6defa759fc1c7f1631a7ae9b8073cc29 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sun, 23 Feb 2020 20:53:45 +0100 Subject: Use unittest's AsyncMock instead of our AsyncMock Python 3.8 introduced an `unittest.mock.AsyncMock` class that can be used to mock coroutines and other types of asynchronous operations like async iterators and async context managers. As we were using our custom, but limited, AsyncMock, I have replaced our mock with unittest's AsyncMock. Since Python 3.8 also introduces a different way of automatically detecting which attributes should be mocked with an AsyncMock, I've changed our CustomMockMixin to use this new method as well. Together with a couple other small changes, this means that our Custom Mocks now use a lazy method of detecting coroutine attributes, which significantly speeds up the test suite. --- tests/bot/cogs/test_duck_pond.py | 22 ++-- tests/bot/cogs/test_information.py | 34 +++---- tests/bot/cogs/test_token_remover.py | 4 +- tests/bot/utils/test_time.py | 3 +- tests/helpers.py | 190 ++++++++++++----------------------- tests/test_helpers.py | 63 ++---------- 6 files changed, 103 insertions(+), 213 deletions(-) diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py index 6406f0737..e164f7544 100644 --- a/tests/bot/cogs/test_duck_pond.py +++ b/tests/bot/cogs/test_duck_pond.py @@ -2,7 +2,7 @@ import asyncio import logging import typing import unittest -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import discord @@ -293,8 +293,8 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): ) for message, expect_webhook_call, expect_attachment_call in test_values: - with patch(send_webhook_path, new_callable=helpers.AsyncMock) as send_webhook: - with patch(send_attachments_path, new_callable=helpers.AsyncMock) as send_attachments: + with patch(send_webhook_path, new_callable=AsyncMock) as send_webhook: + with patch(send_attachments_path, new_callable=AsyncMock) as send_attachments: with self.subTest(clean_content=message.clean_content, attachments=message.attachments): await self.cog.relay_message(message) @@ -303,7 +303,7 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): message.add_reaction.assert_called_once_with(self.checkmark_emoji) - @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock) + @patch(f"{MODULE_PATH}.send_attachments", new_callable=AsyncMock) async def test_relay_message_handles_irretrievable_attachment_exceptions(self, send_attachments): """The `relay_message` method should handle irretrievable attachments.""" message = helpers.MockMessage(clean_content="message", attachments=["attachment"]) @@ -314,15 +314,15 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): for side_effect in side_effects: send_attachments.side_effect = side_effect - with patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=helpers.AsyncMock) as send_webhook: + with patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=AsyncMock) as send_webhook: with self.subTest(side_effect=type(side_effect).__name__): with self.assertNotLogs(logger=log, level=logging.ERROR): await self.cog.relay_message(message) self.assertEqual(send_webhook.call_count, 2) - @patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=helpers.AsyncMock) - @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock) + @patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=AsyncMock) + @patch(f"{MODULE_PATH}.send_attachments", new_callable=AsyncMock) async def test_relay_message_handles_attachment_http_error(self, send_attachments, send_webhook): """The `relay_message` method should handle irretrievable attachments.""" message = helpers.MockMessage(clean_content="message", attachments=["attachment"]) @@ -456,7 +456,7 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): channel.fetch_message.reset_mock() @patch(f"{MODULE_PATH}.DuckPond.is_staff") - @patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) + @patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=AsyncMock) def test_on_raw_reaction_add_returns_on_message_with_green_checkmark_placed_by_bot(self, count_ducks, is_staff): """The `on_raw_reaction_add` event should return when the message has a green check mark placed by the bot.""" channel_id = 31415926535 @@ -491,8 +491,8 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): payload.emoji = self.duck_pond_emoji for duck_count, should_relay in test_cases: - with patch(f"{MODULE_PATH}.DuckPond.relay_message", new_callable=helpers.AsyncMock) as relay_message: - with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) as count_ducks: + with patch(f"{MODULE_PATH}.DuckPond.relay_message", new_callable=AsyncMock) as relay_message: + with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=AsyncMock) as count_ducks: count_ducks.return_value = duck_count with self.subTest(duck_count=duck_count, should_relay=should_relay): await self.cog.on_raw_reaction_add(payload) @@ -526,7 +526,7 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): (constants.DuckPond.threshold + 1, True), ) for duck_count, should_re_add_checkmark in test_cases: - with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) as count_ducks: + with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=AsyncMock) as count_ducks: count_ducks.return_value = duck_count with self.subTest(duck_count=duck_count, should_re_add_checkmark=should_re_add_checkmark): await self.cog.on_raw_reaction_remove(payload) diff --git a/tests/bot/cogs/test_information.py b/tests/bot/cogs/test_information.py index deae7ebad..f5e937356 100644 --- a/tests/bot/cogs/test_information.py +++ b/tests/bot/cogs/test_information.py @@ -34,7 +34,7 @@ class InformationCogTests(unittest.TestCase): """Test if the `role_info` command correctly returns the `moderator_role`.""" self.ctx.guild.roles.append(self.moderator_role) - self.cog.roles_info.can_run = helpers.AsyncMock() + self.cog.roles_info.can_run = unittest.mock.AsyncMock() self.cog.roles_info.can_run.return_value = True coroutine = self.cog.roles_info.callback(self.cog, self.ctx) @@ -72,7 +72,7 @@ class InformationCogTests(unittest.TestCase): self.ctx.guild.roles.append([dummy_role, admin_role]) - self.cog.role_info.can_run = helpers.AsyncMock() + self.cog.role_info.can_run = unittest.mock.AsyncMock() self.cog.role_info.can_run.return_value = True coroutine = self.cog.role_info.callback(self.cog, self.ctx, dummy_role, admin_role) @@ -174,7 +174,7 @@ class UserInfractionHelperMethodTests(unittest.TestCase): def setUp(self): """Common set-up steps done before for each test.""" self.bot = helpers.MockBot() - self.bot.api_client.get = helpers.AsyncMock() + self.bot.api_client.get = unittest.mock.AsyncMock() self.cog = information.Information(self.bot) self.member = helpers.MockMember(id=1234) @@ -345,10 +345,10 @@ class UserEmbedTests(unittest.TestCase): def setUp(self): """Common set-up steps done before for each test.""" self.bot = helpers.MockBot() - self.bot.api_client.get = helpers.AsyncMock() + self.bot.api_client.get = unittest.mock.AsyncMock() self.cog = information.Information(self.bot) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value="")) def test_create_user_embed_uses_string_representation_of_user_in_title_if_nick_is_not_available(self): """The embed should use the string representation of the user if they don't have a nick.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) @@ -360,7 +360,7 @@ class UserEmbedTests(unittest.TestCase): self.assertEqual(embed.title, "Mr. Hemlock") - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value="")) def test_create_user_embed_uses_nick_in_title_if_available(self): """The embed should use the nick if it's available.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) @@ -372,7 +372,7 @@ class UserEmbedTests(unittest.TestCase): self.assertEqual(embed.title, "Cat lover (Mr. Hemlock)") - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value="")) def test_create_user_embed_ignores_everyone_role(self): """Created `!user` embeds should not contain mention of the @everyone-role.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) @@ -387,8 +387,8 @@ class UserEmbedTests(unittest.TestCase): self.assertIn("&Admins", embed.description) self.assertNotIn("&Everyone", embed.description) - @unittest.mock.patch(f"{COG_PATH}.expanded_user_infraction_counts", new_callable=helpers.AsyncMock) - @unittest.mock.patch(f"{COG_PATH}.user_nomination_counts", new_callable=helpers.AsyncMock) + @unittest.mock.patch(f"{COG_PATH}.expanded_user_infraction_counts", new_callable=unittest.mock.AsyncMock) + @unittest.mock.patch(f"{COG_PATH}.user_nomination_counts", new_callable=unittest.mock.AsyncMock) def test_create_user_embed_expanded_information_in_moderation_channels(self, nomination_counts, infraction_counts): """The embed should contain expanded infractions and nomination info in mod channels.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=50)) @@ -423,7 +423,7 @@ class UserEmbedTests(unittest.TestCase): embed.description ) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new_callable=helpers.AsyncMock) + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new_callable=unittest.mock.AsyncMock) def test_create_user_embed_basic_information_outside_of_moderation_channels(self, infraction_counts): """The embed should contain only basic infraction data outside of mod channels.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=100)) @@ -454,7 +454,7 @@ class UserEmbedTests(unittest.TestCase): embed.description ) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value="")) def test_create_user_embed_uses_top_role_colour_when_user_has_roles(self): """The embed should be created with the colour of the top role, if a top role is available.""" ctx = helpers.MockContext() @@ -467,7 +467,7 @@ class UserEmbedTests(unittest.TestCase): self.assertEqual(embed.colour, discord.Colour(moderators_role.colour)) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value="")) def test_create_user_embed_uses_blurple_colour_when_user_has_no_roles(self): """The embed should be created with a blurple colour if the user has no assigned roles.""" ctx = helpers.MockContext() @@ -477,7 +477,7 @@ class UserEmbedTests(unittest.TestCase): self.assertEqual(embed.colour, discord.Colour.blurple()) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value="")) def test_create_user_embed_uses_png_format_of_user_avatar_as_thumbnail(self): """The embed thumbnail should be set to the user's avatar in `png` format.""" ctx = helpers.MockContext() @@ -529,7 +529,7 @@ class UserCommandTests(unittest.TestCase): with self.assertRaises(InChannelCheckFailure, msg=msg): asyncio.run(self.cog.user_info.callback(self.cog, ctx)) - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) + @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=unittest.mock.AsyncMock) def test_regular_user_may_use_command_in_bot_commands_channel(self, create_embed, constants): """A regular user should be allowed to use `!user` targeting themselves in bot-commands.""" constants.STAFF_ROLES = [self.moderator_role.id] @@ -542,7 +542,7 @@ class UserCommandTests(unittest.TestCase): create_embed.assert_called_once_with(ctx, self.author) ctx.send.assert_called_once() - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) + @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=unittest.mock.AsyncMock) def test_regular_user_can_explicitly_target_themselves(self, create_embed, constants): """A user should target itself with `!user` when a `user` argument was not provided.""" constants.STAFF_ROLES = [self.moderator_role.id] @@ -555,7 +555,7 @@ class UserCommandTests(unittest.TestCase): create_embed.assert_called_once_with(ctx, self.author) ctx.send.assert_called_once() - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) + @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=unittest.mock.AsyncMock) def test_staff_members_can_bypass_channel_restriction(self, create_embed, constants): """Staff members should be able to bypass the bot-commands channel restriction.""" constants.STAFF_ROLES = [self.moderator_role.id] @@ -568,7 +568,7 @@ class UserCommandTests(unittest.TestCase): create_embed.assert_called_once_with(ctx, self.moderator) ctx.send.assert_called_once() - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) + @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=unittest.mock.AsyncMock) def test_moderators_can_target_another_member(self, create_embed, constants): """A moderator should be able to use `!user` targeting another user.""" constants.MODERATION_ROLES = [self.moderator_role.id] diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index a54b839d7..33d1ec170 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -1,7 +1,7 @@ import asyncio import logging import unittest -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock from discord import Colour @@ -11,7 +11,7 @@ from bot.cogs.token_remover import ( setup as setup_cog, ) from bot.constants import Channels, Colours, Event, Icons -from tests.helpers import AsyncMock, MockBot, MockMessage +from tests.helpers import MockBot, MockMessage class TokenRemoverTests(unittest.TestCase): diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py index 69f35f2f5..de5724bca 100644 --- a/tests/bot/utils/test_time.py +++ b/tests/bot/utils/test_time.py @@ -1,12 +1,11 @@ import asyncio import unittest from datetime import datetime, timezone -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from dateutil.relativedelta import relativedelta from bot.utils import time -from tests.helpers import AsyncMock class TimeTests(unittest.TestCase): diff --git a/tests/helpers.py b/tests/helpers.py index 01752a791..506fe9894 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,11 +1,10 @@ from __future__ import annotations import collections -import inspect import itertools import logging import unittest.mock -from typing import Any, Iterable, Optional +from typing import Iterable, Optional import discord from discord.ext.commands import Context @@ -51,24 +50,31 @@ class CustomMockMixin: """ Provides common functionality for our custom Mock types. - The cooperative `__init__` automatically creates `AsyncMock` attributes for every coroutine - function `inspect` detects in the `spec` instance we provide. In addition, this mixin takes care - of making sure child mocks are instantiated with the correct class. By default, the mock of the - children will be `unittest.mock.MagicMock`, but this can be overwritten by setting the attribute - `child_mock_type` on the custom mock inheriting from this mixin. + The `_get_child_mock` method automatically returns an AsyncMock for coroutine methods of the mock + object. As discord.py also uses synchronous methods that nonetheless return coroutine objects, the + class attribute `additional_spec_asyncs` can be overwritten with an iterable containing additional + attribute names that should also mocked with an AsyncMock instead of a regular MagicMock/Mock. The + class method `spec_set` can be overwritten with the object that should be uses as the specification + for the mock. + + Mock/MagicMock subclasses that use this mixin only need to define `__init__` method if they need to + implement custom behavior. """ child_mock_type = unittest.mock.MagicMock discord_id = itertools.count(0) + spec_set = None + additional_spec_asyncs = None - def __init__(self, spec_set: Any = None, **kwargs): + def __init__(self, **kwargs): name = kwargs.pop('name', None) # `name` has special meaning for Mock classes, so we need to set it manually. - super().__init__(spec_set=spec_set, **kwargs) + super().__init__(spec_set=self.spec_set, **kwargs) + + if self.additional_spec_asyncs: + self._spec_asyncs.extend(self.additional_spec_asyncs) if name: self.name = name - if spec_set: - self._extract_coroutine_methods_from_spec_instance(spec_set) def _get_child_mock(self, **kw): """ @@ -82,7 +88,16 @@ class CustomMockMixin: This override will look for an attribute called `child_mock_type` and use that as the type of the child mock. """ - klass = self.child_mock_type + _new_name = kw.get("_new_name") + if _new_name in self.__dict__['_spec_asyncs']: + return unittest.mock.AsyncMock(**kw) + + _type = type(self) + if issubclass(_type, unittest.mock.MagicMock) and _new_name in unittest.mock._async_method_magics: + # Any asynchronous magic becomes an AsyncMock + klass = unittest.mock.AsyncMock + else: + klass = self.child_mock_type if self._mock_sealed: attribute = "." + kw["name"] if "name" in kw else "()" @@ -91,95 +106,6 @@ class CustomMockMixin: return klass(**kw) - def _extract_coroutine_methods_from_spec_instance(self, source: Any) -> None: - """Automatically detect coroutine functions in `source` and set them as AsyncMock attributes.""" - for name, _method in inspect.getmembers(source, inspect.iscoroutinefunction): - setattr(self, name, AsyncMock()) - - -# TODO: Remove me in Python 3.8 -class AsyncMock(CustomMockMixin, unittest.mock.MagicMock): - """ - A MagicMock subclass to mock async callables. - - Python 3.8 will introduce an AsyncMock class in the standard library that will have some more - features; this stand-in only overwrites the `__call__` method to an async version. - """ - - async def __call__(self, *args, **kwargs): - return super().__call__(*args, **kwargs) - - -class AsyncIteratorMock: - """ - A class to mock asynchronous iterators. - - This allows async for, which is used in certain Discord.py objects. For example, - an async iterator is returned by the Reaction.users() method. - """ - - def __init__(self, iterable: Iterable = None): - if iterable is None: - iterable = [] - - self.iter = iter(iterable) - self.iterable = iterable - - self.call_count = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - try: - return next(self.iter) - except StopIteration: - raise StopAsyncIteration - - def __call__(self): - """ - Keeps track of the number of times an instance has been called. - - This is useful, since it typically shows that the iterator has actually been used somewhere after we have - instantiated the mock for an attribute that normally returns an iterator when called. - """ - self.call_count += 1 - return self - - @property - def return_value(self): - """Makes `self.iterable` accessible as self.return_value.""" - return self.iterable - - @return_value.setter - def return_value(self, iterable): - """Stores the `return_value` as `self.iterable` and its iterator as `self.iter`.""" - self.iter = iter(iterable) - self.iterable = iterable - - def assert_called(self): - """Asserts if the AsyncIteratorMock instance has been called at least once.""" - if self.call_count == 0: - raise AssertionError("Expected AsyncIteratorMock to have been called.") - - def assert_called_once(self): - """Asserts if the AsyncIteratorMock instance has been called exactly once.""" - if self.call_count != 1: - raise AssertionError( - f"Expected AsyncIteratorMock to have been called once. Called {self.call_count} times." - ) - - def assert_not_called(self): - """Asserts if the AsyncIteratorMock instance has not been called.""" - if self.call_count != 0: - raise AssertionError( - f"Expected AsyncIteratorMock to not have been called once. Called {self.call_count} times." - ) - - def reset_mock(self): - """Resets the call count, but not the return value or iterator.""" - self.call_count = 0 - # Create a guild instance to get a realistic Mock of `discord.Guild` guild_data = { @@ -230,9 +156,11 @@ class MockGuild(CustomMockMixin, unittest.mock.Mock, HashableMixin): For more info, see the `Mocking` section in `tests/README.md`. """ + spec_set = guild_instance + def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None: default_kwargs = {'id': next(self.discord_id), 'members': []} - super().__init__(spec_set=guild_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) self.roles = [MockRole(name="@everyone", position=1, id=0)] if roles: @@ -251,9 +179,11 @@ class MockRole(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin): Instances of this class will follow the specifications of `discord.Role` instances. For more information, see the `MockGuild` docstring. """ + spec_set = role_instance + def __init__(self, **kwargs) -> None: default_kwargs = {'id': next(self.discord_id), 'name': 'role', 'position': 1} - super().__init__(spec_set=role_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) if 'mention' not in kwargs: self.mention = f'&{self.name}' @@ -276,9 +206,11 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin Instances of this class will follow the specifications of `discord.Member` instances. For more information, see the `MockGuild` docstring. """ + spec_set = member_instance + def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None: default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False} - super().__init__(spec_set=member_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) self.roles = [MockRole(name="@everyone", position=1, id=0)] if roles: @@ -299,9 +231,11 @@ class MockUser(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin): Instances of this class will follow the specifications of `discord.User` instances. For more information, see the `MockGuild` docstring. """ + spec_set = user_instance + def __init__(self, **kwargs) -> None: default_kwargs = {'name': 'user', 'id': next(self.discord_id), 'bot': False} - super().__init__(spec_set=user_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) if 'mention' not in kwargs: self.mention = f"@{self.name}" @@ -320,14 +254,16 @@ class MockBot(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.ext.commands.Bot` instances. For more information, see the `MockGuild` docstring. """ + spec_set = bot_instance + additional_spec_asyncs = ("wait_for",) def __init__(self, **kwargs) -> None: - super().__init__(spec_set=bot_instance, **kwargs) + super().__init__(**kwargs) # self.wait_for is *not* a coroutine function, but returns a coroutine nonetheless and # and should therefore be awaited. (The documentation calls it a coroutine as well, which # is technically incorrect, since it's a regular def.) - self.wait_for = AsyncMock() + # self.wait_for = unittest.mock.AsyncMock() # Since calling `create_task` on our MockBot does not actually schedule the coroutine object # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object @@ -358,10 +294,11 @@ class MockTextChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): Instances of this class will follow the specifications of `discord.TextChannel` instances. For more information, see the `MockGuild` docstring. """ + spec_set = channel_instance def __init__(self, name: str = 'channel', channel_id: int = 1, **kwargs) -> None: default_kwargs = {'id': next(self.discord_id), 'name': 'channel', 'guild': MockGuild()} - super().__init__(spec_set=channel_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) if 'mention' not in kwargs: self.mention = f"#{self.name}" @@ -400,9 +337,10 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.ext.commands.Context` instances. For more information, see the `MockGuild` docstring. """ + spec_set = context_instance def __init__(self, **kwargs) -> None: - super().__init__(spec_set=context_instance, **kwargs) + super().__init__(**kwargs) self.bot = kwargs.get('bot', MockBot()) self.guild = kwargs.get('guild', MockGuild()) self.author = kwargs.get('author', MockMember()) @@ -419,8 +357,7 @@ class MockAttachment(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Attachment` instances. For more information, see the `MockGuild` docstring. """ - def __init__(self, **kwargs) -> None: - super().__init__(spec_set=attachment_instance, **kwargs) + spec_set = attachment_instance class MockMessage(CustomMockMixin, unittest.mock.MagicMock): @@ -430,10 +367,11 @@ class MockMessage(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Message` instances. For more information, see the `MockGuild` docstring. """ + spec_set = message_instance def __init__(self, **kwargs) -> None: default_kwargs = {'attachments': []} - super().__init__(spec_set=message_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) self.author = kwargs.get('author', MockMember()) self.channel = kwargs.get('channel', MockTextChannel()) @@ -449,9 +387,10 @@ class MockEmoji(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Emoji` instances. For more information, see the `MockGuild` docstring. """ + spec_set = emoji_instance def __init__(self, **kwargs) -> None: - super().__init__(spec_set=emoji_instance, **kwargs) + super().__init__(**kwargs) self.guild = kwargs.get('guild', MockGuild()) @@ -465,9 +404,7 @@ class MockPartialEmoji(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.PartialEmoji` instances. For more information, see the `MockGuild` docstring. """ - - def __init__(self, **kwargs) -> None: - super().__init__(spec_set=partial_emoji_instance, **kwargs) + spec_set = partial_emoji_instance reaction_instance = discord.Reaction(message=MockMessage(), data={'me': True}, emoji=MockEmoji()) @@ -480,12 +417,17 @@ class MockReaction(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Reaction` instances. For more information, see the `MockGuild` docstring. """ + spec_set = reaction_instance def __init__(self, **kwargs) -> None: - super().__init__(spec_set=reaction_instance, **kwargs) + _users = kwargs.pop("users", []) + super().__init__(**kwargs) self.emoji = kwargs.get('emoji', MockEmoji()) self.message = kwargs.get('message', MockMessage()) - self.users = AsyncIteratorMock(kwargs.get('users', [])) + + user_iterator = unittest.mock.AsyncMock() + user_iterator.__aiter__.return_value = _users + self.users.return_value = user_iterator webhook_instance = discord.Webhook(data=unittest.mock.MagicMock(), adapter=unittest.mock.MagicMock()) @@ -498,13 +440,5 @@ class MockAsyncWebhook(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Webhook` instances. For more information, see the `MockGuild` docstring. """ - - def __init__(self, **kwargs) -> None: - super().__init__(spec_set=webhook_instance, **kwargs) - - # Because Webhooks can also use a synchronous "WebhookAdapter", the methods are not defined - # as coroutines. That's why we need to set the methods manually. - self.send = AsyncMock() - self.edit = AsyncMock() - self.delete = AsyncMock() - self.execute = AsyncMock() + spec_set = webhook_instance + additional_spec_asyncs = ("send", "edit", "delete", "execute") diff --git a/tests/test_helpers.py b/tests/test_helpers.py index fe39df308..81285e009 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,5 +1,4 @@ import asyncio -import inspect import unittest import unittest.mock @@ -214,6 +213,11 @@ class DiscordMocksTests(unittest.TestCase): with self.assertRaises(RuntimeError, msg="cannot reuse already awaited coroutine"): asyncio.run(coroutine_object) + def test_user_mock_uses_explicitly_passed_mention_attribute(self): + """MockUser should use an explicitly passed value for user.mention.""" + user = helpers.MockUser(mention="hello") + self.assertEqual(user.mention, "hello") + class MockObjectTests(unittest.TestCase): """Tests the mock objects and mixins we've defined.""" @@ -341,57 +345,10 @@ class MockObjectTests(unittest.TestCase): attribute = getattr(mock, valid_attribute) self.assertTrue(isinstance(attribute, mock_type.child_mock_type)) - def test_extract_coroutine_methods_from_spec_instance_should_extract_all_and_only_coroutines(self): - """Test if all coroutine functions are extracted, but not regular methods or attributes.""" - class CoroutineDonor: - def __init__(self): - self.some_attribute = 'alpha' - - async def first_coroutine(): - """This coroutine function should be extracted.""" - - async def second_coroutine(): - """This coroutine function should be extracted.""" - - def regular_method(): - """This regular function should not be extracted.""" - - class Receiver: + def test_custom_mock_mixin_mocks_async_magic_methods_with_async_mock(self): + """The CustomMockMixin should mock async magic methods with an AsyncMock.""" + class MyMock(helpers.CustomMockMixin, unittest.mock.MagicMock): pass - donor = CoroutineDonor() - receiver = Receiver() - - helpers.CustomMockMixin._extract_coroutine_methods_from_spec_instance(receiver, donor) - - self.assertIsInstance(receiver.first_coroutine, helpers.AsyncMock) - self.assertIsInstance(receiver.second_coroutine, helpers.AsyncMock) - self.assertFalse(hasattr(receiver, 'regular_method')) - self.assertFalse(hasattr(receiver, 'some_attribute')) - - @unittest.mock.patch("builtins.super", new=unittest.mock.MagicMock()) - @unittest.mock.patch("tests.helpers.CustomMockMixin._extract_coroutine_methods_from_spec_instance") - def test_custom_mock_mixin_init_with_spec(self, extract_method_mock): - """Test if CustomMockMixin correctly passes on spec/kwargs and calls the extraction method.""" - spec_set = "pydis" - - helpers.CustomMockMixin(spec_set=spec_set) - - extract_method_mock.assert_called_once_with(spec_set) - - @unittest.mock.patch("builtins.super", new=unittest.mock.MagicMock()) - @unittest.mock.patch("tests.helpers.CustomMockMixin._extract_coroutine_methods_from_spec_instance") - def test_custom_mock_mixin_init_without_spec(self, extract_method_mock): - """Test if CustomMockMixin correctly passes on spec/kwargs and calls the extraction method.""" - helpers.CustomMockMixin() - - extract_method_mock.assert_not_called() - - def test_async_mock_provides_coroutine_for_dunder_call(self): - """Test if AsyncMock objects have a coroutine for their __call__ method.""" - async_mock = helpers.AsyncMock() - self.assertTrue(inspect.iscoroutinefunction(async_mock.__call__)) - - coroutine = async_mock() - self.assertTrue(inspect.iscoroutine(coroutine)) - self.assertIsNotNone(asyncio.run(coroutine)) + mock = MyMock() + self.assertIsInstance(mock.__aenter__, unittest.mock.AsyncMock) -- cgit v1.2.3 From f67cb7ac61eee86419d10e23e3fd3c66f1f9312e Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sun, 23 Feb 2020 20:58:20 +0100 Subject: Fix test_time test and ensure coverage One of the test_time methods did not actually assert the exception message it was trying to detect as the assertion statement was contained within the context manager handling the exception. I've moved it out of the context so it actually runs. I've also added a few `praga: no cover` comments for parts that were artifically lowering coverage of the test suite. --- tests/bot/cogs/test_duck_pond.py | 2 +- tests/bot/rules/__init__.py | 4 ++-- tests/bot/utils/test_time.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py index e164f7544..7370b8471 100644 --- a/tests/bot/cogs/test_duck_pond.py +++ b/tests/bot/cogs/test_duck_pond.py @@ -312,7 +312,7 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase): self.cog.webhook = helpers.MockAsyncWebhook() log = logging.getLogger("bot.cogs.duck_pond") - for side_effect in side_effects: + for side_effect in side_effects: # pragma: no cover send_attachments.side_effect = side_effect with patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=AsyncMock) as send_webhook: with self.subTest(side_effect=type(side_effect).__name__): diff --git a/tests/bot/rules/__init__.py b/tests/bot/rules/__init__.py index 0233e7939..0d570f5a3 100644 --- a/tests/bot/rules/__init__.py +++ b/tests/bot/rules/__init__.py @@ -68,9 +68,9 @@ class RuleTest(unittest.IsolatedAsyncioTestCase, metaclass=ABCMeta): @abstractmethod def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: """Give expected relevant messages for `case`.""" - raise NotImplementedError + raise NotImplementedError # pragma: no cover @abstractmethod def get_report(self, case: DisallowedCase) -> str: """Give expected error report for `case`.""" - raise NotImplementedError + raise NotImplementedError # pragma: no cover diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py index de5724bca..694d3a40f 100644 --- a/tests/bot/utils/test_time.py +++ b/tests/bot/utils/test_time.py @@ -43,7 +43,7 @@ class TimeTests(unittest.TestCase): for max_units in test_cases: with self.subTest(max_units=max_units), self.assertRaises(ValueError) as error: time.humanize_delta(relativedelta(days=2, hours=2), 'hours', max_units) - self.assertEqual(str(error), 'max_units must be positive') + self.assertEqual(str(error.exception), 'max_units must be positive') def test_parse_rfc1123(self): """Testing parse_rfc1123.""" -- cgit v1.2.3 From 863f99eaece2612f3817780a084a3486d9cf4748 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Mon, 24 Feb 2020 00:00:55 +0100 Subject: Make Azure CI use Python 3.8 and Ubuntu 18.04 Since the bot is now using Python 3.8 and some of our dependencies have Python-version specfic dependencies, it's important that the CI, which installs from our Pipfile, uses the same version of Python. --- azure-pipelines.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 874364a6f..35dea089a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -9,7 +9,7 @@ jobs: - job: test displayName: 'Lint & Test' pool: - vmImage: ubuntu-16.04 + vmImage: ubuntu-18.04 variables: PIP_CACHE_DIR: ".cache/pip" @@ -18,7 +18,7 @@ jobs: - task: UsePythonVersion@0 displayName: 'Set Python version' inputs: - versionSpec: '3.7.x' + versionSpec: '3.8.x' addToPath: true - script: pip install pipenv -- cgit v1.2.3 From b9cafd1c4d6707467e5a2f336dd80eda97ba2f42 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Mon, 24 Feb 2020 01:00:45 +0100 Subject: Update Dockerfile to use python:3.8-slim --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 271c25050..22ebcd667 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7-slim +FROM python:3.8-slim # Set pip to have cleaner logs and no saved cache ENV PIP_NO_CACHE_DIR=false \ -- cgit v1.2.3 From c7ffafeedc44fde40e3bd5dae6c95fbabc75a9d9 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Mon, 24 Feb 2020 02:10:22 +0100 Subject: Use realistic mixin implementation Instead of using the mixin class bare, I've now included into a class tha subclasses unittest.TestCase as that's how it's going to be used "in the wild". --- tests/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_base.py b/tests/test_base.py index 23abb1dfd..235a2ee6c 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -6,7 +6,7 @@ import unittest.mock from tests.base import LoggingTestsMixin, _CaptureLogHandler -class LoggingTestCase(LoggingTestsMixin): +class LoggingTestCase(LoggingTestsMixin, unittest.TestCase): pass -- cgit v1.2.3 From b8bd18bd743608ddff47064d0b459edff3da65e3 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Mon, 24 Feb 2020 02:12:02 +0100 Subject: Migrate syncers test suite to Python 3.8 The test suite for the new role/member syncers used the "old"-style test suite with the helpers implemented for Python 3.7. I have migrated it to use the new Python 3.8 asyncio test helpers. --- tests/base.py | 4 ++-- tests/bot/cogs/sync/test_base.py | 45 ++++++++++++++++----------------------- tests/bot/cogs/sync/test_cog.py | 31 ++++++++------------------- tests/bot/cogs/sync/test_roles.py | 12 ++--------- tests/bot/cogs/sync/test_users.py | 13 ++--------- tests/helpers.py | 4 +--- 6 files changed, 34 insertions(+), 75 deletions(-) diff --git a/tests/base.py b/tests/base.py index 21613110e..42174e911 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,4 +1,5 @@ import logging +import unittest from contextlib import contextmanager from typing import Dict @@ -77,10 +78,9 @@ class LoggingTestsMixin: self.fail(msg) -class CommandTestCase(unittest.TestCase): +class CommandTestCase(unittest.IsolatedAsyncioTestCase): """TestCase with additional assertions that are useful for testing Discord commands.""" - @helpers.async_test async def assertHasPermissionsCheck( self, cmd: commands.Command, diff --git a/tests/bot/cogs/sync/test_base.py b/tests/bot/cogs/sync/test_base.py index e6a6f9688..17aa4198b 100644 --- a/tests/bot/cogs/sync/test_base.py +++ b/tests/bot/cogs/sync/test_base.py @@ -13,8 +13,8 @@ class TestSyncer(Syncer): """Syncer subclass with mocks for abstract methods for testing purposes.""" name = "test" - _get_diff = helpers.AsyncMock() - _sync = helpers.AsyncMock() + _get_diff = mock.AsyncMock() + _sync = mock.AsyncMock() class SyncerBaseTests(unittest.TestCase): @@ -29,7 +29,7 @@ class SyncerBaseTests(unittest.TestCase): Syncer(self.bot) -class SyncerSendPromptTests(unittest.TestCase): +class SyncerSendPromptTests(unittest.IsolatedAsyncioTestCase): """Tests for sending the sync confirmation prompt.""" def setUp(self): @@ -61,7 +61,6 @@ class SyncerSendPromptTests(unittest.TestCase): return mock_channel, mock_message - @helpers.async_test async def test_send_prompt_edits_and_returns_message(self): """The given message should be edited to display the prompt and then should be returned.""" msg = helpers.MockMessage() @@ -71,7 +70,6 @@ class SyncerSendPromptTests(unittest.TestCase): self.assertIn("content", msg.edit.call_args[1]) self.assertEqual(ret_val, msg) - @helpers.async_test async def test_send_prompt_gets_dev_core_channel(self): """The dev-core channel should be retrieved if an extant message isn't given.""" subtests = ( @@ -86,7 +84,6 @@ class SyncerSendPromptTests(unittest.TestCase): method.assert_called_once_with(constants.Channels.devcore) - @helpers.async_test async def test_send_prompt_returns_None_if_channel_fetch_fails(self): """None should be returned if there's an HTTPException when fetching the channel.""" self.bot.get_channel.return_value = None @@ -96,7 +93,6 @@ class SyncerSendPromptTests(unittest.TestCase): self.assertIsNone(ret_val) - @helpers.async_test async def test_send_prompt_sends_and_returns_new_message_if_not_given(self): """A new message mentioning core devs should be sent and returned if message isn't given.""" for mock_ in (self.mock_get_channel, self.mock_fetch_channel): @@ -108,7 +104,6 @@ class SyncerSendPromptTests(unittest.TestCase): self.assertIn(self.syncer._CORE_DEV_MENTION, mock_channel.send.call_args[0][0]) self.assertEqual(ret_val, mock_message) - @helpers.async_test async def test_send_prompt_adds_reactions(self): """The message should have reactions for confirmation added.""" extant_message = helpers.MockMessage() @@ -129,7 +124,7 @@ class SyncerSendPromptTests(unittest.TestCase): mock_message.add_reaction.assert_has_calls(calls) -class SyncerConfirmationTests(unittest.TestCase): +class SyncerConfirmationTests(unittest.IsolatedAsyncioTestCase): """Tests for waiting for a sync confirmation reaction on the prompt.""" def setUp(self): @@ -211,7 +206,6 @@ class SyncerConfirmationTests(unittest.TestCase): ret_val = self.syncer._reaction_check(*args) self.assertFalse(ret_val) - @helpers.async_test async def test_wait_for_confirmation(self): """The message should always be edited and only return True if the emoji is a check mark.""" subtests = ( @@ -251,14 +245,13 @@ class SyncerConfirmationTests(unittest.TestCase): self.assertIs(actual_return, ret_val) -class SyncerSyncTests(unittest.TestCase): +class SyncerSyncTests(unittest.IsolatedAsyncioTestCase): """Tests for main function orchestrating the sync.""" def setUp(self): self.bot = helpers.MockBot(user=helpers.MockMember(bot=True)) self.syncer = TestSyncer(self.bot) - @helpers.async_test async def test_sync_respects_confirmation_result(self): """The sync should abort if confirmation fails and continue if confirmed.""" mock_message = helpers.MockMessage() @@ -274,7 +267,7 @@ class SyncerSyncTests(unittest.TestCase): diff = _Diff({1, 2, 3}, {4, 5}, None) self.syncer._get_diff.return_value = diff - self.syncer._get_confirmation_result = helpers.AsyncMock( + self.syncer._get_confirmation_result = mock.AsyncMock( return_value=(confirmed, message) ) @@ -289,7 +282,6 @@ class SyncerSyncTests(unittest.TestCase): else: self.syncer._sync.assert_not_called() - @helpers.async_test async def test_sync_diff_size(self): """The diff size should be correctly calculated.""" subtests = ( @@ -303,7 +295,7 @@ class SyncerSyncTests(unittest.TestCase): with self.subTest(size=size, diff=diff): self.syncer._get_diff.reset_mock() self.syncer._get_diff.return_value = diff - self.syncer._get_confirmation_result = helpers.AsyncMock(return_value=(False, None)) + self.syncer._get_confirmation_result = mock.AsyncMock(return_value=(False, None)) guild = helpers.MockGuild() await self.syncer.sync(guild) @@ -312,7 +304,6 @@ class SyncerSyncTests(unittest.TestCase): self.syncer._get_confirmation_result.assert_called_once() self.assertEqual(self.syncer._get_confirmation_result.call_args[0][0], size) - @helpers.async_test async def test_sync_message_edited(self): """The message should be edited if one was sent, even if the sync has an API error.""" subtests = ( @@ -324,7 +315,7 @@ class SyncerSyncTests(unittest.TestCase): for message, side_effect, should_edit in subtests: with self.subTest(message=message, side_effect=side_effect, should_edit=should_edit): self.syncer._sync.side_effect = side_effect - self.syncer._get_confirmation_result = helpers.AsyncMock( + self.syncer._get_confirmation_result = mock.AsyncMock( return_value=(True, message) ) @@ -335,7 +326,6 @@ class SyncerSyncTests(unittest.TestCase): message.edit.assert_called_once() self.assertIn("content", message.edit.call_args[1]) - @helpers.async_test async def test_sync_confirmation_context_redirect(self): """If ctx is given, a new message should be sent and author should be ctx's author.""" mock_member = helpers.MockMember() @@ -349,7 +339,10 @@ class SyncerSyncTests(unittest.TestCase): if ctx is not None: ctx.send.return_value = message - self.syncer._get_confirmation_result = helpers.AsyncMock(return_value=(False, None)) + diff = _Diff({1, 2, 3}, {4, 5}, None) + self.syncer._get_diff.return_value = diff + + self.syncer._get_confirmation_result = mock.AsyncMock(return_value=(False, None)) guild = helpers.MockGuild() await self.syncer.sync(guild, ctx) @@ -362,16 +355,15 @@ class SyncerSyncTests(unittest.TestCase): self.assertEqual(self.syncer._get_confirmation_result.call_args[0][2], message) @mock.patch.object(constants.Sync, "max_diff", new=3) - @helpers.async_test async def test_confirmation_result_small_diff(self): """Should always return True and the given message if the diff size is too small.""" author = helpers.MockMember() expected_message = helpers.MockMessage() - for size in (3, 2): + for size in (3, 2): # pragma: no cover with self.subTest(size=size): - self.syncer._send_prompt = helpers.AsyncMock() - self.syncer._wait_for_confirmation = helpers.AsyncMock() + self.syncer._send_prompt = mock.AsyncMock() + self.syncer._wait_for_confirmation = mock.AsyncMock() coro = self.syncer._get_confirmation_result(size, author, expected_message) result, actual_message = await coro @@ -382,7 +374,6 @@ class SyncerSyncTests(unittest.TestCase): self.syncer._wait_for_confirmation.assert_not_called() @mock.patch.object(constants.Sync, "max_diff", new=3) - @helpers.async_test async def test_confirmation_result_large_diff(self): """Should return True if confirmed and False if _send_prompt fails or aborted.""" author = helpers.MockMember() @@ -394,10 +385,10 @@ class SyncerSyncTests(unittest.TestCase): (False, mock_message, False, "aborted"), ) - for expected_result, expected_message, confirmed, msg in subtests: + for expected_result, expected_message, confirmed, msg in subtests: # pragma: no cover with self.subTest(msg=msg): - self.syncer._send_prompt = helpers.AsyncMock(return_value=expected_message) - self.syncer._wait_for_confirmation = helpers.AsyncMock(return_value=confirmed) + self.syncer._send_prompt = mock.AsyncMock(return_value=expected_message) + self.syncer._wait_for_confirmation = mock.AsyncMock(return_value=confirmed) coro = self.syncer._get_confirmation_result(4, author) actual_result, actual_message = await coro diff --git a/tests/bot/cogs/sync/test_cog.py b/tests/bot/cogs/sync/test_cog.py index 98c9afc0d..8c87c0d6b 100644 --- a/tests/bot/cogs/sync/test_cog.py +++ b/tests/bot/cogs/sync/test_cog.py @@ -18,12 +18,13 @@ class MockSyncer(helpers.CustomMockMixin, mock.MagicMock): Instances of this class will follow the specifications of `bot.cogs.sync.syncers.Syncer` instances. For more information, see the `MockGuild` docstring. """ + spec_set = Syncer def __init__(self, **kwargs) -> None: - super().__init__(spec_set=Syncer, **kwargs) + super().__init__(**kwargs) -class SyncExtensionTests(unittest.TestCase): +class SyncExtensionTests(unittest.IsolatedAsyncioTestCase): """Tests for the sync extension.""" @staticmethod @@ -34,7 +35,7 @@ class SyncExtensionTests(unittest.TestCase): bot.add_cog.assert_called_once() -class SyncCogTestCase(unittest.TestCase): +class SyncCogTestCase(unittest.IsolatedAsyncioTestCase): """Base class for Sync cog tests. Sets up patches for syncers.""" def setUp(self): @@ -72,13 +73,13 @@ class SyncCogTestCase(unittest.TestCase): class SyncCogTests(SyncCogTestCase): """Tests for the Sync cog.""" - @mock.patch.object(sync.Sync, "sync_guild") + @mock.patch.object(sync.Sync, "sync_guild", new_callable=mock.MagicMock) def test_sync_cog_init(self, sync_guild): """Should instantiate syncers and run a sync for the guild.""" # Reset because a Sync cog was already instantiated in setUp. self.RoleSyncer.reset_mock() self.UserSyncer.reset_mock() - self.bot.loop.create_task.reset_mock() + self.bot.loop.create_task = mock.MagicMock() mock_sync_guild_coro = mock.MagicMock() sync_guild.return_value = mock_sync_guild_coro @@ -90,7 +91,6 @@ class SyncCogTests(SyncCogTestCase): sync_guild.assert_called_once_with() self.bot.loop.create_task.assert_called_once_with(mock_sync_guild_coro) - @helpers.async_test async def test_sync_cog_sync_guild(self): """Roles and users should be synced only if a guild is successfully retrieved.""" for guild in (helpers.MockGuild(), None): @@ -126,14 +126,12 @@ class SyncCogTests(SyncCogTestCase): json=updated_information, ) - @helpers.async_test async def test_sync_cog_patch_user(self): """A PATCH request should be sent and 404 errors ignored.""" for side_effect in (None, self.response_error(404)): with self.subTest(side_effect=side_effect): await self.patch_user_helper(side_effect) - @helpers.async_test async def test_sync_cog_patch_user_non_404(self): """A PATCH request should be sent and the error raised if it's not a 404.""" with self.assertRaises(ResponseCodeError): @@ -145,9 +143,8 @@ class SyncCogListenerTests(SyncCogTestCase): def setUp(self): super().setUp() - self.cog.patch_user = helpers.AsyncMock(spec_set=self.cog.patch_user) + self.cog.patch_user = mock.AsyncMock(spec_set=self.cog.patch_user) - @helpers.async_test async def test_sync_cog_on_guild_role_create(self): """A POST request should be sent with the new role's data.""" self.assertTrue(self.cog.on_guild_role_create.__cog_listener__) @@ -164,7 +161,6 @@ class SyncCogListenerTests(SyncCogTestCase): self.bot.api_client.post.assert_called_once_with("bot/roles", json=role_data) - @helpers.async_test async def test_sync_cog_on_guild_role_delete(self): """A DELETE request should be sent.""" self.assertTrue(self.cog.on_guild_role_delete.__cog_listener__) @@ -174,7 +170,6 @@ class SyncCogListenerTests(SyncCogTestCase): self.bot.api_client.delete.assert_called_once_with("bot/roles/99") - @helpers.async_test async def test_sync_cog_on_guild_role_update(self): """A PUT request should be sent if the colour, name, permissions, or position changes.""" self.assertTrue(self.cog.on_guild_role_update.__cog_listener__) @@ -212,7 +207,6 @@ class SyncCogListenerTests(SyncCogTestCase): else: self.bot.api_client.put.assert_not_called() - @helpers.async_test async def test_sync_cog_on_member_remove(self): """Member should patched to set in_guild as False.""" self.assertTrue(self.cog.on_member_remove.__cog_listener__) @@ -225,7 +219,6 @@ class SyncCogListenerTests(SyncCogTestCase): updated_information={"in_guild": False} ) - @helpers.async_test async def test_sync_cog_on_member_update_roles(self): """Members should be patched if their roles have changed.""" self.assertTrue(self.cog.on_member_update.__cog_listener__) @@ -240,7 +233,6 @@ class SyncCogListenerTests(SyncCogTestCase): data = {"roles": sorted(role.id for role in after_member.roles)} self.cog.patch_user.assert_called_once_with(after_member.id, updated_information=data) - @helpers.async_test async def test_sync_cog_on_member_update_other(self): """Members should not be patched if other attributes have changed.""" self.assertTrue(self.cog.on_member_update.__cog_listener__) @@ -262,7 +254,6 @@ class SyncCogListenerTests(SyncCogTestCase): self.cog.patch_user.assert_not_called() - @helpers.async_test async def test_sync_cog_on_user_update(self): """A user should be patched only if the name, discriminator, or avatar changes.""" self.assertTrue(self.cog.on_user_update.__cog_listener__) @@ -341,7 +332,6 @@ class SyncCogListenerTests(SyncCogTestCase): return data - @helpers.async_test async def test_sync_cog_on_member_join(self): """Should PUT user's data or POST it if the user doesn't exist.""" for side_effect in (None, self.response_error(404)): @@ -354,7 +344,6 @@ class SyncCogListenerTests(SyncCogTestCase): else: self.bot.api_client.post.assert_not_called() - @helpers.async_test async def test_sync_cog_on_member_join_non_404(self): """ResponseCodeError should be re-raised if status code isn't a 404.""" with self.assertRaises(ResponseCodeError): @@ -366,7 +355,6 @@ class SyncCogListenerTests(SyncCogTestCase): class SyncCogCommandTests(SyncCogTestCase, CommandTestCase): """Tests for the commands in the Sync cog.""" - @helpers.async_test async def test_sync_roles_command(self): """sync() should be called on the RoleSyncer.""" ctx = helpers.MockContext() @@ -374,7 +362,6 @@ class SyncCogCommandTests(SyncCogTestCase, CommandTestCase): self.cog.role_syncer.sync.assert_called_once_with(ctx.guild, ctx) - @helpers.async_test async def test_sync_users_command(self): """sync() should be called on the UserSyncer.""" ctx = helpers.MockContext() @@ -382,7 +369,7 @@ class SyncCogCommandTests(SyncCogTestCase, CommandTestCase): self.cog.user_syncer.sync.assert_called_once_with(ctx.guild, ctx) - def test_commands_require_admin(self): + async def test_commands_require_admin(self): """The sync commands should only run if the author has the administrator permission.""" cmds = ( self.cog.sync_group, @@ -392,4 +379,4 @@ class SyncCogCommandTests(SyncCogTestCase, CommandTestCase): for cmd in cmds: with self.subTest(cmd=cmd): - self.assertHasPermissionsCheck(cmd, {"administrator": True}) + await self.assertHasPermissionsCheck(cmd, {"administrator": True}) diff --git a/tests/bot/cogs/sync/test_roles.py b/tests/bot/cogs/sync/test_roles.py index 14fb2577a..79eee98f4 100644 --- a/tests/bot/cogs/sync/test_roles.py +++ b/tests/bot/cogs/sync/test_roles.py @@ -18,7 +18,7 @@ def fake_role(**kwargs): return kwargs -class RoleSyncerDiffTests(unittest.TestCase): +class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): """Tests for determining differences between roles in the DB and roles in the Guild cache.""" def setUp(self): @@ -39,7 +39,6 @@ class RoleSyncerDiffTests(unittest.TestCase): return guild - @helpers.async_test async def test_empty_diff_for_identical_roles(self): """No differences should be found if the roles in the guild and DB are identical.""" self.bot.api_client.get.return_value = [fake_role()] @@ -50,7 +49,6 @@ class RoleSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_for_updated_roles(self): """Only updated roles should be added to the 'updated' set of the diff.""" updated_role = fake_role(id=41, name="new") @@ -63,7 +61,6 @@ class RoleSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_for_new_roles(self): """Only new roles should be added to the 'created' set of the diff.""" new_role = fake_role(id=41, name="new") @@ -76,7 +73,6 @@ class RoleSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_for_deleted_roles(self): """Only deleted roles should be added to the 'deleted' set of the diff.""" deleted_role = fake_role(id=61, name="deleted") @@ -89,7 +85,6 @@ class RoleSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_for_new_updated_and_deleted_roles(self): """When roles are added, updated, and removed, all of them are returned properly.""" new = fake_role(id=41, name="new") @@ -109,14 +104,13 @@ class RoleSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) -class RoleSyncerSyncTests(unittest.TestCase): +class RoleSyncerSyncTests(unittest.IsolatedAsyncioTestCase): """Tests for the API requests that sync roles.""" def setUp(self): self.bot = helpers.MockBot() self.syncer = RoleSyncer(self.bot) - @helpers.async_test async def test_sync_created_roles(self): """Only POST requests should be made with the correct payload.""" roles = [fake_role(id=111), fake_role(id=222)] @@ -132,7 +126,6 @@ class RoleSyncerSyncTests(unittest.TestCase): self.bot.api_client.put.assert_not_called() self.bot.api_client.delete.assert_not_called() - @helpers.async_test async def test_sync_updated_roles(self): """Only PUT requests should be made with the correct payload.""" roles = [fake_role(id=111), fake_role(id=222)] @@ -148,7 +141,6 @@ class RoleSyncerSyncTests(unittest.TestCase): self.bot.api_client.post.assert_not_called() self.bot.api_client.delete.assert_not_called() - @helpers.async_test async def test_sync_deleted_roles(self): """Only DELETE requests should be made with the correct payload.""" roles = [fake_role(id=111), fake_role(id=222)] diff --git a/tests/bot/cogs/sync/test_users.py b/tests/bot/cogs/sync/test_users.py index 421bf6bb6..818883012 100644 --- a/tests/bot/cogs/sync/test_users.py +++ b/tests/bot/cogs/sync/test_users.py @@ -17,7 +17,7 @@ def fake_user(**kwargs): return kwargs -class UserSyncerDiffTests(unittest.TestCase): +class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): """Tests for determining differences between users in the DB and users in the Guild cache.""" def setUp(self): @@ -42,7 +42,6 @@ class UserSyncerDiffTests(unittest.TestCase): return guild - @helpers.async_test async def test_empty_diff_for_no_users(self): """When no users are given, an empty diff should be returned.""" guild = self.get_guild() @@ -52,7 +51,6 @@ class UserSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_empty_diff_for_identical_users(self): """No differences should be found if the users in the guild and DB are identical.""" self.bot.api_client.get.return_value = [fake_user()] @@ -63,7 +61,6 @@ class UserSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_for_updated_users(self): """Only updated users should be added to the 'updated' set of the diff.""" updated_user = fake_user(id=99, name="new") @@ -76,7 +73,6 @@ class UserSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_for_new_users(self): """Only new users should be added to the 'created' set of the diff.""" new_user = fake_user(id=99, name="new") @@ -89,7 +85,6 @@ class UserSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_sets_in_guild_false_for_leaving_users(self): """When a user leaves the guild, the `in_guild` flag is updated to `False`.""" leaving_user = fake_user(id=63, in_guild=False) @@ -102,7 +97,6 @@ class UserSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_diff_for_new_updated_and_leaving_users(self): """When users are added, updated, and removed, all of them are returned properly.""" new_user = fake_user(id=99, name="new") @@ -117,7 +111,6 @@ class UserSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) - @helpers.async_test async def test_empty_diff_for_db_users_not_in_guild(self): """When the DB knows a user the guild doesn't, no difference is found.""" self.bot.api_client.get.return_value = [fake_user(), fake_user(id=63, in_guild=False)] @@ -129,14 +122,13 @@ class UserSyncerDiffTests(unittest.TestCase): self.assertEqual(actual_diff, expected_diff) -class UserSyncerSyncTests(unittest.TestCase): +class UserSyncerSyncTests(unittest.IsolatedAsyncioTestCase): """Tests for the API requests that sync users.""" def setUp(self): self.bot = helpers.MockBot() self.syncer = UserSyncer(self.bot) - @helpers.async_test async def test_sync_created_users(self): """Only POST requests should be made with the correct payload.""" users = [fake_user(id=111), fake_user(id=222)] @@ -152,7 +144,6 @@ class UserSyncerSyncTests(unittest.TestCase): self.bot.api_client.put.assert_not_called() self.bot.api_client.delete.assert_not_called() - @helpers.async_test async def test_sync_updated_users(self): """Only PUT requests should be made with the correct payload.""" users = [fake_user(id=111), fake_user(id=222)] diff --git a/tests/helpers.py b/tests/helpers.py index 7ae7ed621..8e13f0f28 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -261,9 +261,7 @@ class MockAPIClient(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `bot.api.APIClient` instances. For more information, see the `MockGuild` docstring. """ - - def __init__(self, **kwargs) -> None: - super().__init__(spec_set=APIClient, **kwargs) + spec_set = APIClient # Create a Bot instance to get a realistic MagicMock of `discord.ext.commands.Bot` -- cgit v1.2.3 From 0de8f42c122a4bf8f0ea84ea481d2f26d718a0c7 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 23 Feb 2020 22:00:34 -0800 Subject: Sync tests: use autospec instead of MockSyncer Autospec supports using AsyncMocks in 3.8 so there's no need to rely on a subclass of CustomMockMixin for the async mocks. --- tests/bot/cogs/sync/test_cog.py | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/tests/bot/cogs/sync/test_cog.py b/tests/bot/cogs/sync/test_cog.py index 8c87c0d6b..81398c61f 100644 --- a/tests/bot/cogs/sync/test_cog.py +++ b/tests/bot/cogs/sync/test_cog.py @@ -11,19 +11,6 @@ from tests import helpers from tests.base import CommandTestCase -class MockSyncer(helpers.CustomMockMixin, mock.MagicMock): - """ - A MagicMock subclass to mock Syncer objects. - - Instances of this class will follow the specifications of `bot.cogs.sync.syncers.Syncer` - instances. For more information, see the `MockGuild` docstring. - """ - spec_set = Syncer - - def __init__(self, **kwargs) -> None: - super().__init__(**kwargs) - - class SyncExtensionTests(unittest.IsolatedAsyncioTestCase): """Tests for the sync extension.""" @@ -41,16 +28,15 @@ class SyncCogTestCase(unittest.IsolatedAsyncioTestCase): def setUp(self): self.bot = helpers.MockBot() - # These patch the type. When the type is called, a MockSyncer instanced is returned. - # MockSyncer is needed so that our custom AsyncMock is used. - # TODO: Use autospec instead in 3.8, which will automatically use AsyncMock when needed. self.role_syncer_patcher = mock.patch( "bot.cogs.sync.syncers.RoleSyncer", - new=mock.MagicMock(return_value=MockSyncer()) + autospec=Syncer, + spec_set=True ) self.user_syncer_patcher = mock.patch( "bot.cogs.sync.syncers.UserSyncer", - new=mock.MagicMock(return_value=MockSyncer()) + autospec=Syncer, + spec_set=True ) self.RoleSyncer = self.role_syncer_patcher.start() self.UserSyncer = self.user_syncer_patcher.start() -- cgit v1.2.3 From 3574eaa0c903cd8ed862b8bff896ce0a73412321 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Mon, 24 Feb 2020 10:06:09 +0100 Subject: Use MagicMock as return value for _get_diff mock The `_get_diff` method of TestSyncer class is mocked using an AsyncMock object. By default, when an AsyncMock object is called **and awaited**, it returns a child mock of the same time (another AsyncMock) according to the "the child is a like the parent" principle. This means that the _get_diff method will return an AsyncMock unless a different return_value is explicitly provided. Because of that "child is like parent" behavior, this will happen in lines 194-196 of bot.cogs.sync.syncers (annotations added by me): ``` // `diff` will be a child AsyncMock as "child is like parent" diff = await self._get_diff(guild) // `diff._asdict` will be an AsyncMock as "child is like parent" and, // after being called, it will return an unawaited coroutine object // we assign the name `diff_dict`: diff_dict = diff._asdict() // `diff_dict` is still an unawaited coroutine object meaning that it // doesn't have an `items()` method: totals = {k: len(v) for k, v in diff_dict.items() if v is not None} ``` Original, unannotated: https://github.com/python-discord/bot/blob/c81a4d401ea434e98b0a1ece51d3d10f1a3ad226/bot/cogs/sync/syncers.py#L194-L196 This will lead to the following exception when running the tests: ```py ====================================================================== ERROR: test_sync_confirmation_context_redirect (tests.bot.cogs.sync.test_base.SyncerSyncTests) (ctx=None, author=, message=None) If ctx is given, a new message should be sent and author should be ctx's author. ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/sebastiaan/pydis/repositories/bot/tests/bot/cogs/sync/test_base.py", line 348, in test_sync_confirmation_context_redirect await self.syncer.sync(guild, ctx) File "/home/sebastiaan/pydis/repositories/bot/bot/cogs/sync/syncers.py", line 196, in sync totals = {k: len(v) for k, v in diff_dict.items() if v is not None} AttributeError: 'coroutine' object has no attribute 'items' ``` The solution is to assign an explicit return value so the parent mock doesn't "guess" and return an object of its own type. I previously did that by providing a specific `_Diff` object as the return value, but I should have gone with a `MagicMock` to signify that it's not an important return value; it's just something that needs to support/mimic the API we use on it. So that's what this commit adds. --- tests/bot/cogs/sync/test_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/bot/cogs/sync/test_base.py b/tests/bot/cogs/sync/test_base.py index 17aa4198b..d17a27409 100644 --- a/tests/bot/cogs/sync/test_base.py +++ b/tests/bot/cogs/sync/test_base.py @@ -339,8 +339,8 @@ class SyncerSyncTests(unittest.IsolatedAsyncioTestCase): if ctx is not None: ctx.send.return_value = message - diff = _Diff({1, 2, 3}, {4, 5}, None) - self.syncer._get_diff.return_value = diff + # Make sure `_get_diff` returns a MagicMock, not an AsyncMock + self.syncer._get_diff.return_value = mock.MagicMock() self.syncer._get_confirmation_result = mock.AsyncMock(return_value=(False, None)) -- cgit v1.2.3 From df87aba432db50eb480ba8b2f42b1a64147909d9 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 25 Feb 2020 20:15:58 -0800 Subject: Moderation: use asyncio.shield to prevent self-cancellation The shield exists to be used for exactly this purpose so its a better fit than create_task. --- bot/cogs/moderation/scheduler.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bot/cogs/moderation/scheduler.py b/bot/cogs/moderation/scheduler.py index 162159af8..93afd9f9f 100644 --- a/bot/cogs/moderation/scheduler.py +++ b/bot/cogs/moderation/scheduler.py @@ -1,3 +1,4 @@ +import asyncio import logging import textwrap import typing as t @@ -427,6 +428,6 @@ class InfractionScheduler(Scheduler): expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) await time.wait_until(expiry) - # Because deactivate_infraction() explicitly cancels this scheduled task, it runs in - # a separate task to avoid prematurely cancelling itself. - self.bot.loop.create_task(self.deactivate_infraction(infraction)) + # Because deactivate_infraction() explicitly cancels this scheduled task, it is shielded + # to avoid prematurely cancelling itself. + await asyncio.shield(self.deactivate_infraction(infraction)) -- cgit v1.2.3 From b1f8f4779738be35e1339d6c07e317ef08009467 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 25 Feb 2020 21:20:20 -0800 Subject: Scheduler: improve cancel_task's docstring * Use imperative mood for docstring * Explain the purpose of the parameter in the docstring * Make log message after cog name lowercase --- bot/utils/scheduling.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 0d66952eb..cb28648db 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -52,19 +52,19 @@ class Scheduler(metaclass=CogABCMeta): log.debug(f"{self.cog_name}: scheduled task #{task_id} {id(task)}.") def cancel_task(self, task_id: str) -> None: - """Un-schedules a task.""" + """Unschedule the task identified by `task_id`.""" log.trace(f"{self.cog_name}: cancelling task #{task_id}...") - task = self._scheduled_tasks.get(task_id) - if task is None: - log.warning(f"{self.cog_name}: Failed to unschedule {task_id} (no task found).") + if not task: + log.warning(f"{self.cog_name}: failed to unschedule {task_id} (no task found).") return task.cancel() - log.debug(f"{self.cog_name}: unscheduled task #{task_id} {id(task)}.") del self._scheduled_tasks[task_id] + log.debug(f"{self.cog_name}: unscheduled task #{task_id} {id(task)}.") + def _task_done_callback(self, task_id: str, task: asyncio.Task) -> None: """ Unschedule the task and raise its exception if one exists. -- cgit v1.2.3 From 4a11bf22cc9e894271b896eb9fca0c3cff085766 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 25 Feb 2020 22:22:01 -0800 Subject: Scheduler: only delete the task in the done callback if tasks are same To prevent a deletion of task rescheduled with the same ID, the callback checks that the stored task is the same as the done task being handled. * Only delete the task; it doesn't need to be cancelled because the it is already done * Revise the callback's docstring to explain the new behaviour * Rename `task` parameter to `done_task` --- bot/utils/scheduling.py | 41 +++++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index cb28648db..58bb32e5d 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -65,24 +65,33 @@ class Scheduler(metaclass=CogABCMeta): log.debug(f"{self.cog_name}: unscheduled task #{task_id} {id(task)}.") - def _task_done_callback(self, task_id: str, task: asyncio.Task) -> None: + def _task_done_callback(self, task_id: str, done_task: asyncio.Task) -> None: """ - Unschedule the task and raise its exception if one exists. + Delete the task and raise its exception if one exists. - If the task was cancelled, the CancelledError is retrieved and suppressed. In this case, - the task is already assumed to have been unscheduled. + If `done_task` and the task associated with `task_id` are different, then the latter + will not be deleted. In this case, a new task was likely rescheduled with the same ID. """ - log.trace(f"{self.cog_name}: performing done callback for task #{task_id} {id(task)}") + log.trace(f"{self.cog_name}: performing done callback for task #{task_id} {id(done_task)}.") - if task.cancelled(): - with contextlib.suppress(asyncio.CancelledError): - task.exception() + scheduled_task = self._scheduled_tasks.get(task_id) + + if scheduled_task and done_task is scheduled_task: + # A task for the ID exists and its the same as the done task. + # Since this is the done callback, the task is already done so no need to cancel it. + log.trace(f"{self.cog_name}: deleting task #{task_id} {id(done_task)}.") + del self._scheduled_tasks[task_id] + elif scheduled_task: + # A new task was likely rescheduled with the same ID. + log.debug( + f"{self.cog_name}: the scheduled task #{task_id} {id(scheduled_task)} " + f"and the done task {id(done_task)} differ." + ) else: - # Check if it exists to avoid logging a warning. - if task_id in self._scheduled_tasks: - # Only cancel if the task is not cancelled to avoid a race condition when a new - # task is scheduled using the same ID. Reminders do this when re-scheduling after - # editing. - self.cancel_task(task_id) - - task.exception() + log.warning( + f"{self.cog_name}: task #{task_id} not found while handling task {id(done_task)}! " + f"A task somehow got unscheduled improperly (i.e. deleted but not cancelled)." + ) + + with contextlib.suppress(asyncio.CancelledError): + done_task.exception() -- cgit v1.2.3 From 47b645a2cd2622709c57158d788554544579d870 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 25 Feb 2020 22:23:07 -0800 Subject: Scheduler: properly raise task's exception the done callback Task.exception() only returns the exception. It still needs to be explicitly raised. --- bot/utils/scheduling.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 58bb32e5d..742133f02 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -94,4 +94,7 @@ class Scheduler(metaclass=CogABCMeta): ) with contextlib.suppress(asyncio.CancelledError): - done_task.exception() + exception = done_task.exception() + # Raise the exception if one exists. + if exception: + raise exception -- cgit v1.2.3 From e173cd2af20b546230ed467f26286ee167df55cd Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 25 Feb 2020 22:34:35 -0800 Subject: Scheduler: only send warning in callback if task isn't cancelled If a task is cancelled it is assumed it was done via cancel_task. That method deletes the task after cancelling so the warning isn't relevant. --- bot/utils/scheduling.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 742133f02..9371dcdb7 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -87,7 +87,7 @@ class Scheduler(metaclass=CogABCMeta): f"{self.cog_name}: the scheduled task #{task_id} {id(scheduled_task)} " f"and the done task {id(done_task)} differ." ) - else: + elif not done_task.cancelled(): log.warning( f"{self.cog_name}: task #{task_id} not found while handling task {id(done_task)}! " f"A task somehow got unscheduled improperly (i.e. deleted but not cancelled)." -- cgit v1.2.3 From 4096ef526aba41ab3fd83be16ef3b5554419d524 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 27 Feb 2020 19:43:45 -0800 Subject: Scheduler: log the exception instead of raising Logging it ourselves has a cleaner traceback and gives more control over the output, such as including the task ID. --- bot/utils/scheduling.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 9371dcdb7..1eae817c1 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -95,6 +95,9 @@ class Scheduler(metaclass=CogABCMeta): with contextlib.suppress(asyncio.CancelledError): exception = done_task.exception() - # Raise the exception if one exists. + # Log the exception if one exists. if exception: - raise exception + log.error( + f"{self.cog_name}: error in task #{task_id} {id(scheduled_task)}!", + exc_info=exception + ) -- cgit v1.2.3 From 5daf1db8ea9e86568da4907d42507aa3286eb3c1 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Fri, 28 Feb 2020 09:59:57 -0800 Subject: Scheduler: correct type annotations --- bot/utils/scheduling.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 1eae817c1..5760ec2d4 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -1,9 +1,9 @@ import asyncio import contextlib import logging +import typing as t from abc import abstractmethod from functools import partial -from typing import Dict from bot.utils import CogABCMeta @@ -17,10 +17,10 @@ class Scheduler(metaclass=CogABCMeta): # Keep track of the child cog's name so the logs are clear. self.cog_name = self.__class__.__name__ - self._scheduled_tasks: Dict[str, asyncio.Task] = {} + self._scheduled_tasks: t.Dict[t.Hashable, asyncio.Task] = {} @abstractmethod - async def _scheduled_task(self, task_object: dict) -> None: + async def _scheduled_task(self, task_object: t.Any) -> None: """ A coroutine which handles the scheduling. @@ -31,7 +31,7 @@ class Scheduler(metaclass=CogABCMeta): then make a site API request to delete the reminder from the database. """ - def schedule_task(self, task_id: str, task_data: dict) -> None: + def schedule_task(self, task_id: t.Hashable, task_data: t.Any) -> None: """ Schedules a task. @@ -51,7 +51,7 @@ class Scheduler(metaclass=CogABCMeta): self._scheduled_tasks[task_id] = task log.debug(f"{self.cog_name}: scheduled task #{task_id} {id(task)}.") - def cancel_task(self, task_id: str) -> None: + def cancel_task(self, task_id: t.Hashable) -> None: """Unschedule the task identified by `task_id`.""" log.trace(f"{self.cog_name}: cancelling task #{task_id}...") task = self._scheduled_tasks.get(task_id) @@ -65,7 +65,7 @@ class Scheduler(metaclass=CogABCMeta): log.debug(f"{self.cog_name}: unscheduled task #{task_id} {id(task)}.") - def _task_done_callback(self, task_id: str, done_task: asyncio.Task) -> None: + def _task_done_callback(self, task_id: t.Hashable, done_task: asyncio.Task) -> None: """ Delete the task and raise its exception if one exists. -- cgit v1.2.3 From c2af442676011eb620593505789be4d34da76ea3 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sat, 29 Feb 2020 17:06:51 +0100 Subject: Migrate snekbox tests to Python 3.8's unittest I've migrated the `tests/test_snekbox.py` file to use the new Python 3.8-style unittests instead of our old style using our custom Async mocks. In particular, I had to make a few changes: - Mocking the async post() context manager correctly Since `ClientSession.post` returns an async context manager when called, we need to make sure to assign the return value to the __aenter__ method of whatever `post()` returns, not of `post` itself (i.e.. when it's not called). - Use the new AsyncMock assert methods `assert_awaited_once` and `assert_awaited_once_with` Objects of the new `unittest.mock.AsyncMock` class have special methods to assert what they were called with that also assert that specific coroutine object was awaited. This means we test two things in one: Whether or not it was called with the right arguments and whether or not the returned coroutine object was then awaited. - Patch `functools.partial` as `partial` objects are compared by identity When you create two partial functions of the same function, you'll end up with two different `partial` objects. Since `partial` objects are compared by identity, you can't compare a `partial` created in a test method to that created in the callable you're trying to test. They will always compare as `False`. Since we're not interested in actually creating `partial` objects, I've just patched `functools.partial` in the namespace of the module we're testing to make sure we can compare them. --- tests/bot/cogs/test_snekbox.py | 68 +++++++++++++++++------------------------- 1 file changed, 27 insertions(+), 41 deletions(-) diff --git a/tests/bot/cogs/test_snekbox.py b/tests/bot/cogs/test_snekbox.py index 985bc66a1..9cd7f0154 100644 --- a/tests/bot/cogs/test_snekbox.py +++ b/tests/bot/cogs/test_snekbox.py @@ -1,74 +1,68 @@ import asyncio import logging import unittest -from functools import partial -from unittest.mock import MagicMock, Mock, call, patch +from unittest.mock import AsyncMock, MagicMock, Mock, call, patch from bot.cogs import snekbox from bot.cogs.snekbox import Snekbox from bot.constants import URLs -from tests.helpers import ( - AsyncContextManagerMock, AsyncMock, MockBot, MockContext, MockMessage, MockReaction, MockUser, async_test -) +from tests.helpers import MockBot, MockContext, MockMessage, MockReaction, MockUser -class SnekboxTests(unittest.TestCase): +class SnekboxTests(unittest.IsolatedAsyncioTestCase): def setUp(self): """Add mocked bot and cog to the instance.""" self.bot = MockBot() - - self.mocked_post = MagicMock() - self.mocked_post.json = AsyncMock() - self.bot.http_session.post = MagicMock(return_value=AsyncContextManagerMock(self.mocked_post)) - self.cog = Snekbox(bot=self.bot) - @async_test async def test_post_eval(self): """Post the eval code to the URLs.snekbox_eval_api endpoint.""" - self.mocked_post.json.return_value = {'lemon': 'AI'} + resp = MagicMock() + resp.json = AsyncMock(return_value="return") + self.bot.http_session.post().__aenter__.return_value = resp - self.assertEqual(await self.cog.post_eval("import random"), {'lemon': 'AI'}) - self.bot.http_session.post.assert_called_once_with( + self.assertEqual(await self.cog.post_eval("import random"), "return") + self.bot.http_session.post.assert_called_with( URLs.snekbox_eval_api, json={"input": "import random"}, raise_for_status=True ) + resp.json.assert_awaited_once() - @async_test async def test_upload_output_reject_too_long(self): """Reject output longer than MAX_PASTE_LEN.""" result = await self.cog.upload_output("-" * (snekbox.MAX_PASTE_LEN + 1)) self.assertEqual(result, "too long to upload") - @async_test async def test_upload_output(self): """Upload the eval output to the URLs.paste_service.format(key="documents") endpoint.""" - key = "RainbowDash" - self.mocked_post.json.return_value = {"key": key} + key = "MarkDiamond" + resp = MagicMock() + resp.json = AsyncMock(return_value={"key": key}) + self.bot.http_session.post().__aenter__.return_value = resp self.assertEqual( await self.cog.upload_output("My awesome output"), URLs.paste_service.format(key=key) ) - self.bot.http_session.post.assert_called_once_with( + self.bot.http_session.post.assert_called_with( URLs.paste_service.format(key="documents"), data="My awesome output", raise_for_status=True ) - @async_test async def test_upload_output_gracefully_fallback_if_exception_during_request(self): """Output upload gracefully fallback if the upload fail.""" - self.mocked_post.json.side_effect = Exception + resp = MagicMock() + resp.json = AsyncMock(side_effect=Exception) + self.bot.http_session.post().__aenter__.return_value = resp + log = logging.getLogger("bot.cogs.snekbox") with self.assertLogs(logger=log, level='ERROR'): await self.cog.upload_output('My awesome output!') - @async_test async def test_upload_output_gracefully_fallback_if_no_key_in_response(self): """Output upload gracefully fallback if there is no key entry in the response body.""" - self.mocked_post.json.return_value = {} self.assertEqual((await self.cog.upload_output('My awesome output!')), None) def test_prepare_input(self): @@ -121,7 +115,6 @@ class SnekboxTests(unittest.TestCase): actual = self.cog.get_status_emoji({'stdout': stdout, 'returncode': returncode}) self.assertEqual(actual, expected) - @async_test async def test_format_output(self): """Test output formatting.""" self.cog.upload_output = AsyncMock(return_value='https://testificate.com/') @@ -172,7 +165,6 @@ class SnekboxTests(unittest.TestCase): with self.subTest(msg=testname, case=case, expected=expected): self.assertEqual(await self.cog.format_output(case), expected) - @async_test async def test_eval_command_evaluate_once(self): """Test the eval command procedure.""" ctx = MockContext() @@ -186,7 +178,6 @@ class SnekboxTests(unittest.TestCase): self.cog.send_eval.assert_called_once_with(ctx, 'MyAwesomeFormattedCode') self.cog.continue_eval.assert_called_once_with(ctx, response) - @async_test async def test_eval_command_evaluate_twice(self): """Test the eval and re-eval command procedure.""" ctx = MockContext() @@ -201,7 +192,6 @@ class SnekboxTests(unittest.TestCase): self.cog.send_eval.assert_called_with(ctx, 'MyAwesomeFormattedCode') self.cog.continue_eval.assert_called_with(ctx, response) - @async_test async def test_eval_command_reject_two_eval_at_the_same_time(self): """Test if the eval command rejects an eval if the author already have a running eval.""" ctx = MockContext() @@ -214,7 +204,6 @@ class SnekboxTests(unittest.TestCase): "@LemonLemonishBeard#0042 You've already got a job running - please wait for it to finish!" ) - @async_test async def test_eval_command_call_help(self): """Test if the eval command call the help command if no code is provided.""" ctx = MockContext() @@ -222,14 +211,13 @@ class SnekboxTests(unittest.TestCase): await self.cog.eval_command.callback(self.cog, ctx=ctx, code='') ctx.invoke.assert_called_once_with(self.bot.get_command("help"), "eval") - @async_test async def test_send_eval(self): """Test the send_eval function.""" ctx = MockContext() ctx.message = MockMessage() ctx.send = AsyncMock() ctx.author.mention = '@LemonLemonishBeard#0042' - ctx.typing = MagicMock(return_value=AsyncContextManagerMock(None)) + self.cog.post_eval = AsyncMock(return_value={'stdout': '', 'returncode': 0}) self.cog.get_results_message = MagicMock(return_value=('Return code 0', '')) self.cog.get_status_emoji = MagicMock(return_value=':yay!:') @@ -244,14 +232,13 @@ class SnekboxTests(unittest.TestCase): self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0}) self.cog.format_output.assert_called_once_with('') - @async_test async def test_send_eval_with_paste_link(self): """Test the send_eval function with a too long output that generate a paste link.""" ctx = MockContext() ctx.message = MockMessage() ctx.send = AsyncMock() ctx.author.mention = '@LemonLemonishBeard#0042' - ctx.typing = MagicMock(return_value=AsyncContextManagerMock(None)) + self.cog.post_eval = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0}) self.cog.get_results_message = MagicMock(return_value=('Return code 0', '')) self.cog.get_status_emoji = MagicMock(return_value=':yay!:') @@ -267,14 +254,12 @@ class SnekboxTests(unittest.TestCase): self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) self.cog.format_output.assert_called_once_with('Way too long beard') - @async_test async def test_send_eval_with_non_zero_eval(self): """Test the send_eval function with a code returning a non-zero code.""" ctx = MockContext() ctx.message = MockMessage() ctx.send = AsyncMock() ctx.author.mention = '@LemonLemonishBeard#0042' - ctx.typing = MagicMock(return_value=AsyncContextManagerMock(None)) self.cog.post_eval = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127}) self.cog.get_results_message = MagicMock(return_value=('Return code 127', 'Beard got stuck in the eval')) self.cog.get_status_emoji = MagicMock(return_value=':nope!:') @@ -289,8 +274,8 @@ class SnekboxTests(unittest.TestCase): self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) self.cog.format_output.assert_not_called() - @async_test - async def test_continue_eval_does_continue(self): + @patch("bot.cogs.snekbox.partial") + async def test_continue_eval_does_continue(self, partial_mock): """Test that the continue_eval function does continue if required conditions are met.""" ctx = MockContext(message=MockMessage(add_reaction=AsyncMock(), clear_reactions=AsyncMock())) response = MockMessage(delete=AsyncMock()) @@ -299,15 +284,16 @@ class SnekboxTests(unittest.TestCase): actual = await self.cog.continue_eval(ctx, response) self.assertEqual(actual, 'NewCode') - self.bot.wait_for.has_calls( - call('message_edit', partial(snekbox.predicate_eval_message_edit, ctx), timeout=10), - call('reaction_add', partial(snekbox.predicate_eval_emoji_reaction, ctx), timeout=10) + self.bot.wait_for.assert_has_awaits( + ( + call('message_edit', check=partial_mock(snekbox.predicate_eval_message_edit, ctx), timeout=10), + call('reaction_add', check=partial_mock(snekbox.predicate_eval_emoji_reaction, ctx), timeout=10) + ) ) ctx.message.add_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI) ctx.message.clear_reactions.assert_called_once() response.delete.assert_called_once() - @async_test async def test_continue_eval_does_not_continue(self): ctx = MockContext(message=MockMessage(clear_reactions=AsyncMock())) self.bot.wait_for.side_effect = asyncio.TimeoutError -- cgit v1.2.3 From d2341a5fbf06dc2b541a88d3dfbd6a9deed1dc28 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Wed, 26 Feb 2020 15:30:07 -0800 Subject: Install the coloredlogs package This makes it easy to add colour to the logs. Colorama is also installed if on a Windows system. --- Pipfile | 2 ++ Pipfile.lock | 104 ++++++++++++++++++++++++++++++++++++----------------------- 2 files changed, 66 insertions(+), 40 deletions(-) diff --git a/Pipfile b/Pipfile index 400e64c18..88aacf6a8 100644 --- a/Pipfile +++ b/Pipfile @@ -19,6 +19,8 @@ requests = "~=2.22" more_itertools = "~=7.2" urllib3 = ">=1.24.2,<1.25" sentry-sdk = "~=0.14" +coloredlogs = "~=14.0" +colorama = {version = "~=0.4.3", sys_platform = "== 'win32'"} [dev-packages] coverage = "~=4.5" diff --git a/Pipfile.lock b/Pipfile.lock index fa29bf995..f645698f2 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "c7706a61eb96c06d073898018ea2dbcf5bd3b15d007496e2d60120a65647f31e" + "sha256": "f9dda521aa7816ca575b33e0f2e4e7e434682a0add9d74f0e89addae65453cd6" }, "pipfile-spec": 6, "requires": { @@ -140,6 +140,23 @@ ], "version": "==3.0.4" }, + "colorama": { + "hashes": [ + "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff", + "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1" + ], + "index": "pypi", + "markers": "sys_platform == 'win32'", + "version": "==0.4.3" + }, + "coloredlogs": { + "hashes": [ + "sha256:346f58aad6afd48444c2468618623638dadab76e4e70d5e10822676f2d32226a", + "sha256:a1fab193d2053aa6c0a97608c4342d031f1f93a3d1218432c59322441d31a505" + ], + "index": "pypi", + "version": "==14.0" + }, "deepdiff": { "hashes": [ "sha256:b3fa588d1eac7fa318ec1fb4f2004568e04cb120a1989feda8e5e7164bcbf07a", @@ -150,10 +167,10 @@ }, "discord-py": { "hashes": [ - "sha256:8bfe5628d31771744000f19135c386c74ac337479d7282c26cc1627b9d31f360" + "sha256:7424be26b07b37ecad4404d9383d685995a0e0b3df3f9c645bdd3a4d977b83b4" ], "index": "pypi", - "version": "==1.3.1" + "version": "==1.3.2" }, "docutils": { "hashes": [ @@ -170,6 +187,13 @@ "index": "pypi", "version": "==0.18.0" }, + "humanfriendly": { + "hashes": [ + "sha256:5e5c2b82fb58dcea413b48ab2a7381baa5e246d47fe94241d7d83724c11c0565", + "sha256:a9a41074c24dc5d6486e8784dc8f057fec8b963217e941c25fb7c7c383a4a1c1" + ], + "version": "==7.1.1" + }, "idna": { "hashes": [ "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", @@ -279,25 +303,25 @@ }, "multidict": { "hashes": [ - "sha256:13f3ebdb5693944f52faa7b2065b751cb7e578b8dd0a5bb8e4ab05ad0188b85e", - "sha256:26502cefa86d79b86752e96639352c7247846515c864d7c2eb85d036752b643c", - "sha256:4fba5204d32d5c52439f88437d33ad14b5f228e25072a192453f658bddfe45a7", - "sha256:527124ef435f39a37b279653ad0238ff606b58328ca7989a6df372fd75d7fe26", - "sha256:5414f388ffd78c57e77bd253cf829373721f450613de53dc85a08e34d806e8eb", - "sha256:5eee66f882ab35674944dfa0d28b57fa51e160b4dce0ce19e47f495fdae70703", - "sha256:63810343ea07f5cd86ba66ab66706243a6f5af075eea50c01e39b4ad6bc3c57a", - "sha256:6bd10adf9f0d6a98ccc792ab6f83d18674775986ba9bacd376b643fe35633357", - "sha256:83c6ddf0add57c6b8a7de0bc7e2d656be3eefeff7c922af9a9aae7e49f225625", - "sha256:93166e0f5379cf6cd29746989f8a594fa7204dcae2e9335ddba39c870a287e1c", - "sha256:9a7b115ee0b9b92d10ebc246811d8f55d0c57e82dbb6a26b23c9a9a6ad40ce0c", - "sha256:a38baa3046cce174a07a59952c9f876ae8875ef3559709639c17fdf21f7b30dd", - "sha256:a6d219f49821f4b2c85c6d426346a5d84dab6daa6f85ca3da6c00ed05b54022d", - "sha256:a8ed33e8f9b67e3b592c56567135bb42e7e0e97417a4b6a771e60898dfd5182b", - "sha256:d7d428488c67b09b26928950a395e41cc72bb9c3d5abfe9f0521940ee4f796d4", - "sha256:dcfed56aa085b89d644af17442cdc2debaa73388feba4b8026446d168ca8dad7", - "sha256:f29b885e4903bd57a7789f09fe9d60b6475a6c1a4c0eca874d8558f00f9d4b51" - ], - "version": "==4.7.4" + "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1", + "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35", + "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928", + "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969", + "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e", + "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78", + "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1", + "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136", + "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8", + "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2", + "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e", + "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4", + "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5", + "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd", + "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab", + "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20", + "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3" + ], + "version": "==4.7.5" }, "ordered-set": { "hashes": [ @@ -415,11 +439,11 @@ }, "sentry-sdk": { "hashes": [ - "sha256:b06dd27391fd11fb32f84fe054e6a64736c469514a718a99fb5ce1dff95d6b28", - "sha256:e023da07cfbead3868e1e2ba994160517885a32dfd994fc455b118e37989479b" + "sha256:480eee754e60bcae983787a9a13bc8f155a111aef199afaa4f289d6a76aa622a", + "sha256:a920387dc3ee252a66679d0afecd34479fb6fc52c2bc20763793ed69e5b0dcc0" ], "index": "pypi", - "version": "==0.14.1" + "version": "==0.14.2" }, "six": { "hashes": [ @@ -437,18 +461,18 @@ }, "soupsieve": { "hashes": [ - "sha256:bdb0d917b03a1369ce964056fc195cfdff8819c40de04695a80bc813c3cfa1f5", - "sha256:e2c1c5dee4a1c36bcb790e0fabd5492d874b8ebd4617622c4f6a731701060dda" + "sha256:e914534802d7ffd233242b785229d5ba0766a7f487385e3f714446a07bf540ae", + "sha256:fcd71e08c0aee99aca1b73f45478549ee7e7fc006d51b37bec9e9def7dc22b69" ], - "version": "==1.9.5" + "version": "==2.0" }, "sphinx": { "hashes": [ - "sha256:525527074f2e0c2585f68f73c99b4dc257c34bbe308b27f5f8c7a6e20642742f", - "sha256:543d39db5f82d83a5c1aa0c10c88f2b6cff2da3e711aa849b2c627b4b403bbd9" + "sha256:776ff8333181138fae52df65be733127539623bb46cc692e7fa0fcfc80d7aa88", + "sha256:ca762da97c3b5107cbf0ab9e11d3ec7ab8d3c31377266fd613b962ed971df709" ], "index": "pypi", - "version": "==2.4.2" + "version": "==2.4.3" }, "sphinxcontrib-applehelp": { "hashes": [ @@ -466,10 +490,10 @@ }, "sphinxcontrib-htmlhelp": { "hashes": [ - "sha256:4670f99f8951bd78cd4ad2ab962f798f5618b17675c35c5ac3b2132a14ea8422", - "sha256:d4fd39a65a625c9df86d7fa8a2d9f3cd8299a3a4b15db63b50aac9e161d8eff7" + "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", + "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], - "version": "==1.0.2" + "version": "==1.0.3" }, "sphinxcontrib-jsmath": { "hashes": [ @@ -581,10 +605,10 @@ }, "cfgv": { "hashes": [ - "sha256:04b093b14ddf9fd4d17c53ebfd55582d27b76ed30050193c14e560770c5360eb", - "sha256:f22b426ed59cd2ab2b54ff96608d846c33dfb8766a67f0b4a6ce130ce244414f" + "sha256:1ccf53320421aeeb915275a196e23b3b8ae87dea8ac6698b1638001d4a486d53", + "sha256:c8e8f552ffcc6194f4e18dd4f68d9aef0c0d58ae7e7be8c82bee3c5e9edfa513" ], - "version": "==3.0.0" + "version": "==3.1.0" }, "chardet": { "hashes": [ @@ -913,10 +937,10 @@ }, "virtualenv": { "hashes": [ - "sha256:08f3623597ce73b85d6854fb26608a6f39ee9d055c81178dc6583803797f8994", - "sha256:de2cbdd5926c48d7b84e0300dea9e8f276f61d186e8e49223d71d91250fbaebd" + "sha256:30ea90b21dabd11da5f509710ad3be2ae47d40ccbc717dfdd2efe4367c10f598", + "sha256:4a36a96d785428278edd389d9c36d763c5755844beb7509279194647b1ef47f1" ], - "version": "==20.0.4" + "version": "==20.0.7" }, "zipp": { "hashes": [ -- cgit v1.2.3 From 69440ead8d0592bf129ae046cb3565c24816c69c Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 27 Feb 2020 08:00:37 -0800 Subject: Make logs coloured! --- bot/__init__.py | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/bot/__init__.py b/bot/__init__.py index f7a410706..c9dbc3f40 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -1,9 +1,11 @@ import logging import os import sys -from logging import Logger, StreamHandler, handlers +from logging import Logger, handlers from pathlib import Path +import coloredlogs + TRACE_LEVEL = logging.TRACE = 5 logging.addLevelName(TRACE_LEVEL, "TRACE") @@ -25,10 +27,9 @@ Logger.trace = monkeypatch_trace DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") -log_format = logging.Formatter("%(asctime)s | %(name)s | %(levelname)s | %(message)s") - -stream_handler = StreamHandler(stream=sys.stdout) -stream_handler.setFormatter(log_format) +log_level = TRACE_LEVEL if DEBUG_MODE else logging.INFO +format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s" +log_format = logging.Formatter(format_string) log_file = Path("logs", "bot.log") log_file.parent.mkdir(exist_ok=True) @@ -36,10 +37,25 @@ file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCo file_handler.setFormatter(log_format) root_log = logging.getLogger() -root_log.setLevel(TRACE_LEVEL if DEBUG_MODE else logging.INFO) -root_log.addHandler(stream_handler) +root_log.setLevel(log_level) root_log.addHandler(file_handler) +if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: + coloredlogs.DEFAULT_LEVEL_STYLES = { + **coloredlogs.DEFAULT_LEVEL_STYLES, + "trace": {"color": 246}, + "critical": {"background": "red"}, + "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] + } + +if "COLOREDLOGS_LOG_FORMAT" not in os.environ: + coloredlogs.DEFAULT_LOG_FORMAT = format_string + +if "COLOREDLOGS_LOG_LEVEL" not in os.environ: + coloredlogs.DEFAULT_LOG_LEVEL = log_level + +coloredlogs.install(logger=root_log, stream=sys.stdout) + logging.getLogger("discord").setLevel(logging.WARNING) logging.getLogger("websockets").setLevel(logging.WARNING) logging.getLogger(__name__) -- cgit v1.2.3 From e602889dedb9bd5db42a55274c11a74f42b9b700 Mon Sep 17 00:00:00 2001 From: Spencer Young Date: Sun, 1 Mar 2020 06:44:16 -0800 Subject: Optimize Dockerfile --- Dockerfile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 271c25050..2fba8cf68 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,12 +9,15 @@ ENV PIP_NO_CACHE_DIR=false \ # Install pipenv RUN pip install -U pipenv -# Copy project files into working directory +# Create the working directory WORKDIR /bot -COPY . . # Install project dependencies +COPY Pipfile* ./ RUN pipenv install --system --deploy +# Copy the source code in last to optimize rebuilding the image +COPY . . + ENTRYPOINT ["python3"] CMD ["-m", "bot"] -- cgit v1.2.3 From 515075fb6a67e7fa7c299ec03a1e601d80da9cdd Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Wed, 26 Feb 2020 19:35:31 -0500 Subject: Add logging to antimalware cog & expand user feedback * Add generic handling for multi-file uploads * Log user, id, and blocked extensions * Provide the full list of attachment filenames as a logging extra * Provide feedback on all blacklisted file types uploaded --- bot/cogs/antimalware.py | 41 ++++++++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/bot/cogs/antimalware.py b/bot/cogs/antimalware.py index 9e9e81364..373619895 100644 --- a/bot/cogs/antimalware.py +++ b/bot/cogs/antimalware.py @@ -1,4 +1,5 @@ import logging +from os.path import splitext from discord import Embed, Message, NotFound from discord.ext.commands import Cog @@ -28,24 +29,30 @@ class AntiMalware(Cog): return embed = Embed() - for attachment in message.attachments: - filename = attachment.filename.lower() - if filename.endswith('.py'): - embed.description = ( - f"It looks like you tried to attach a Python file - please " - f"use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}" - ) - break # Other detections irrelevant because we prioritize the .py message. - if not filename.endswith(tuple(AntiMalwareConfig.whitelist)): - whitelisted_types = ', '.join(AntiMalwareConfig.whitelist) - meta_channel = self.bot.get_channel(Channels.meta) - embed.description = ( - f"It looks like you tried to attach a file type that we " - f"do not allow. We currently allow the following file " - f"types: **{whitelisted_types}**. \n\n Feel free to ask " - f"in {meta_channel.mention} if you think this is a mistake." - ) + file_extensions = {splitext(message.filename.lower())[1] for message in message.attachments} + extensions_blocked = file_extensions - set(AntiMalwareConfig.whitelist) + if ".py" in extensions_blocked: + # Short-circuit on *.py files to provide a pastebin link + embed.description = ( + "It looks like you tried to attach a Python file - " + f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}" + ) + elif extensions_blocked: + blocked_extensions_str = ', '.join(extensions_blocked) + whitelisted_types = ', '.join(AntiMalwareConfig.whitelist) + meta_channel = self.bot.get_channel(Channels.meta) + embed.description = ( + f"It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). " + f"We currently allow the following file types: **{whitelisted_types}**.\n\n" + f"Feel free to ask in {meta_channel.mention} if you think this is a mistake." + ) + if embed.description: + log.info( + f"User '{message.author}' ({message.author.id}) uploaded blacklisted file(s): {blocked_extensions_str}", + extra={"attachment_list": [attachment.filename for attachment in message.attachments]} + ) + await message.channel.send(f"Hey {message.author.mention}!", embed=embed) # Delete the offending message: -- cgit v1.2.3 From dfade671e0a04aacde5c7d6bca290fc4c69dcc58 Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Sun, 1 Mar 2020 13:33:22 -0500 Subject: Bump Dependencies & Relock * Remove explicit urllib3 pinning, CVE that caused its pinning has been resolved by 1.25+. This is a child dependency of requests. --- Pipfile | 13 +++--- Pipfile.lock | 141 +++++++++++++++++++++++++++++------------------------------ 2 files changed, 76 insertions(+), 78 deletions(-) diff --git a/Pipfile b/Pipfile index 9ac32886a..64760f9dd 100644 --- a/Pipfile +++ b/Pipfile @@ -16,25 +16,24 @@ aio-pika = "~=6.1" python-dateutil = "~=2.8" deepdiff = "~=4.0" requests = "~=2.22" -more_itertools = "~=7.2" -urllib3 = ">=1.24.2,<1.25" +more_itertools = "~=8.2" sentry-sdk = "~=0.14" coloredlogs = "~=14.0" colorama = {version = "~=0.4.3", sys_platform = "== 'win32'"} [dev-packages] -coverage = "~=4.5" +coverage = "~=5.0" flake8 = "~=3.7" flake8-annotations = "~=2.0" -flake8-bugbear = "~=19.8" +flake8-bugbear = "~=20.1" flake8-docstrings = "~=1.4" flake8-import-order = "~=0.18" flake8-string-format = "~=0.2" -flake8-tidy-imports = "~=2.0" +flake8-tidy-imports = "~=4.0" flake8-todo = "~=0.7" -pre-commit = "~=1.18" +pre-commit = "~=2.1" safety = "~=1.8" -unittest-xml-reporting = "~=2.5" +unittest-xml-reporting = "~=3.0" dodgy = "~=0.1" [requires] diff --git a/Pipfile.lock b/Pipfile.lock index 91d7d5430..9953aab40 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "1128d3fc064359337cba08ddf7236982c09f714ca148861a22c4ef623e728c49" + "sha256": "fae6dcdb6a5ebf27e8ea5044f4ca2ab854774d17affb5fd64ac85f8d0ae71187" }, "pipfile-spec": 6, "requires": { @@ -189,10 +189,10 @@ }, "humanfriendly": { "hashes": [ - "sha256:5e5c2b82fb58dcea413b48ab2a7381baa5e246d47fe94241d7d83724c11c0565", - "sha256:a9a41074c24dc5d6486e8784dc8f057fec8b963217e941c25fb7c7c383a4a1c1" + "sha256:cbe04ecf964ccb951a578f396091f258448ca4b4b4c6d4b6194f48ef458fe991", + "sha256:e8e2e4524409e55d5c5cbbb4c555a0c0a9599d5e8f74d0ce1ac504ba51ad1cd2" ], - "version": "==7.1.1" + "version": "==7.2" }, "idna": { "hashes": [ @@ -295,11 +295,11 @@ }, "more-itertools": { "hashes": [ - "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", - "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" + "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c", + "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507" ], "index": "pypi", - "version": "==7.2.0" + "version": "==8.2.0" }, "multidict": { "hashes": [ @@ -379,7 +379,8 @@ }, "pycparser": { "hashes": [ - "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" + "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3", + "sha256:fd64020e8a5e0369de455adf9f22795a90fdb74e6bb999e9a13fd26b54f533ef" ], "version": "==2.19" }, @@ -397,6 +398,13 @@ ], "version": "==2.4.6" }, + "pyreadline": { + "hashes": [ + "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1" + ], + "markers": "sys_platform == 'win32'", + "version": "==2.1" + }, "python-dateutil": { "hashes": [ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", @@ -518,11 +526,10 @@ }, "urllib3": { "hashes": [ - "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", - "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", + "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" ], - "index": "pypi", - "version": "==1.24.3" + "version": "==1.25.8" }, "websockets": { "hashes": [ @@ -582,13 +589,6 @@ ], "version": "==1.4.3" }, - "aspy.yaml": { - "hashes": [ - "sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc", - "sha256:e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45" - ], - "version": "==1.3.0" - }, "attrs": { "hashes": [ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", @@ -626,45 +626,45 @@ }, "coverage": { "hashes": [ - "sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", - "sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", - "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", - "sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", - "sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", - "sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", - "sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", - "sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", - "sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", - "sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", - "sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", - "sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", - "sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", - "sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", - "sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", - "sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", - "sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", - "sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", - "sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", - "sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", - "sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", - "sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", - "sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", - "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", - "sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", - "sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", - "sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", - "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", - "sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", - "sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", - "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", - "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025" - ], - "index": "pypi", - "version": "==4.5.4" + "sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3", + "sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c", + "sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0", + "sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477", + "sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a", + "sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf", + "sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691", + "sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73", + "sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987", + "sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894", + "sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e", + "sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef", + "sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf", + "sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68", + "sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8", + "sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954", + "sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2", + "sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40", + "sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc", + "sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc", + "sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e", + "sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d", + "sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f", + "sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc", + "sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301", + "sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea", + "sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb", + "sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af", + "sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52", + "sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37", + "sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0" + ], + "index": "pypi", + "version": "==5.0.3" }, "distlib": { "hashes": [ - "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21" + "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21", + "sha256:9b183fb98f4870e02d315d5d17baef14be74c339d827346cae544f5597698555" ], "version": "==0.3.0" }, @@ -715,11 +715,11 @@ }, "flake8-bugbear": { "hashes": [ - "sha256:d8c466ea79d5020cb20bf9f11cf349026e09517a42264f313d3f6fddb83e0571", - "sha256:ded4d282778969b5ab5530ceba7aa1a9f1b86fa7618fc96a19a1d512331640f8" + "sha256:a3ddc03ec28ba2296fc6f89444d1c946a6b76460f859795b35b77d4920a51b63", + "sha256:bd02e4b009fb153fe6072c31c52aeab5b133d508095befb2ffcf3b41c4823162" ], "index": "pypi", - "version": "==19.8.0" + "version": "==20.1.4" }, "flake8-docstrings": { "hashes": [ @@ -747,11 +747,11 @@ }, "flake8-tidy-imports": { "hashes": [ - "sha256:1c476aabc6e8db26dc75278464a3a392dba0ea80562777c5f13fd5cdf2646154", - "sha256:b3f5b96affd0f57cacb6621ed28286ce67edaca807757b51227043ebf7b136a1" + "sha256:8aa34384b45137d4cf33f5818b8e7897dc903b1d1e10a503fa7dd193a9a710ba", + "sha256:b26461561bcc80e8012e46846630ecf0aaa59314f362a94cb7800dfdb32fa413" ], "index": "pypi", - "version": "==2.0.0" + "version": "==4.0.0" }, "flake8-todo": { "hashes": [ @@ -796,11 +796,11 @@ }, "pre-commit": { "hashes": [ - "sha256:8f48d8637bdae6fa70cc97db9c1dd5aa7c5c8bf71968932a380628c25978b850", - "sha256:f92a359477f3252452ae2e8d3029de77aec59415c16ae4189bcfba40b757e029" + "sha256:09ebe467f43ce24377f8c2f200fe3cd2570d328eb2ce0568c8e96ce19da45fa6", + "sha256:f8d555e31e2051892c7f7b3ad9f620bd2c09271d87e9eedb2ad831737d6211eb" ], "index": "pypi", - "version": "==1.21.0" + "version": "==2.1.1" }, "pycodestyle": { "hashes": [ @@ -886,19 +886,18 @@ }, "unittest-xml-reporting": { "hashes": [ - "sha256:358bbdaf24a26d904cc1c26ef3078bca7fc81541e0a54c8961693cc96a6f35e0", - "sha256:9d28ddf6524cf0ff9293f61bd12e792de298f8561a5c945acea63fb437789e0e" + "sha256:74eaf7739a7957a74f52b8187c5616f61157372189bef0a32ba5c30bbc00e58a", + "sha256:e09b8ae70cce9904cdd331f53bf929150962869a5324ab7ff3dd6c8b87e01f7d" ], "index": "pypi", - "version": "==2.5.2" + "version": "==3.0.2" }, "urllib3": { "hashes": [ - "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", - "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", + "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" ], - "index": "pypi", - "version": "==1.24.3" + "version": "==1.25.8" }, "virtualenv": { "hashes": [ -- cgit v1.2.3 From 9cca41b5d8943212b38eae3ae78e6472577ba6c1 Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Sun, 1 Mar 2020 13:59:17 -0500 Subject: Move syncer confirmation reaction check out of finally clause Returning directly out of a `finally` clause can cause any exceptions raised in the clause to be discarded, so we can remove the finally clause entirely and shift the control statements into the body of the function --- bot/cogs/sync/syncers.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/bot/cogs/sync/syncers.py b/bot/cogs/sync/syncers.py index d6891168f..c7ce54d65 100644 --- a/bot/cogs/sync/syncers.py +++ b/bot/cogs/sync/syncers.py @@ -125,17 +125,17 @@ class Syncer(abc.ABC): except TimeoutError: # reaction will remain none thus sync will be aborted in the finally block below. log.debug(f"The {self.name} syncer confirmation prompt timed out.") - finally: - if str(reaction) == constants.Emojis.check_mark: - log.trace(f"The {self.name} syncer was confirmed.") - await message.edit(content=f':ok_hand: {mention}{self.name} sync will proceed.') - return True - else: - log.warning(f"The {self.name} syncer was aborted or timed out!") - await message.edit( - content=f':warning: {mention}{self.name} sync aborted or timed out!' - ) - return False + + if str(reaction) == constants.Emojis.check_mark: + log.trace(f"The {self.name} syncer was confirmed.") + await message.edit(content=f':ok_hand: {mention}{self.name} sync will proceed.') + return True + else: + log.warning(f"The {self.name} syncer was aborted or timed out!") + await message.edit( + content=f':warning: {mention}{self.name} sync aborted or timed out!' + ) + return False @abc.abstractmethod async def _get_diff(self, guild: Guild) -> _Diff: -- cgit v1.2.3 From 91c6bcd0dfbaad201ee47af2ee7e36e4f372a115 Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Sun, 1 Mar 2020 14:27:14 -0500 Subject: Modify log test regex to be non-os-specific Previous regex utilized a `/`, which doesn't work for comparing against Windows paths, which use `\` --- tests/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_base.py b/tests/test_base.py index 235a2ee6c..a7db4bf3e 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -29,7 +29,7 @@ class LoggingTestCaseTests(unittest.TestCase): """Test if LoggingTestCase.assertNotLogs raises AssertionError when logs were emitted.""" msg_regex = ( r"1 logs of DEBUG or higher were triggered on root:\n" - r'' + r'' ) with self.assertRaisesRegex(AssertionError, msg_regex): with LoggingTestCase.assertNotLogs(self, level=logging.DEBUG): -- cgit v1.2.3 From db1f74654f1999eaa9b0ae9d8dc49b073222f70b Mon Sep 17 00:00:00 2001 From: Joseph Date: Sun, 1 Mar 2020 21:03:54 +0000 Subject: Add grabify (IP logger) domains to banned domains --- config-default.yml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/config-default.yml b/config-default.yml index ab237423f..9beb610cc 100644 --- a/config-default.yml +++ b/config-default.yml @@ -284,6 +284,30 @@ filter: domain_blacklist: - pornhub.com - liveleak.com + - grabify.link + - bmwforum.co + - leancoding.co + - spottyfly.com + - stopify.co + - yoütu.be + - discörd.com + - minecräft.com + - freegiftcards.co + - disçordapp.com + - fortnight.space + - fortnitechat.site + - joinmy.site + - curiouscat.club + - catsnthings.fun + - yourtube.site + - youtubeshort.watch + - catsnthing.com + - youtubeshort.pro + - canadianlumberjacks.online + - poweredbydialup.club + - poweredbydialup.online + - poweredbysecurity.org + - poweredbysecurity.online word_watchlist: - goo+ks* -- cgit v1.2.3 From cec9d77a0f8a738f74d02848265a6af86a9cc2d4 Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Mon, 2 Mar 2020 13:10:40 +0100 Subject: Adding helpers to the Filtering whitelist Resolves an issue mentioned in https://github.com/python-discord/bot/issues/767, giving Helpers access to post invites and other things caught by the Filtering cog. --- config-default.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/config-default.yml b/config-default.yml index 9beb610cc..5788d1e12 100644 --- a/config-default.yml +++ b/config-default.yml @@ -353,6 +353,7 @@ filter: - *ADMINS_ROLE - *MODS_ROLE - *OWNERS_ROLE + - *HELPERS_ROLE - *PY_COMMUNITY_ROLE -- cgit v1.2.3 From 28bcbf334eb08dfcd35b898b7cb803338664ee61 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 2 Mar 2020 09:42:00 -0800 Subject: Add more pre-commit hooks * Remove trailing whitespaces * Specify error code for a noqa in the free command --- .pre-commit-config.yaml | 23 ++++++++++++++++++++--- CONTRIBUTING.md | 2 +- bot/cogs/free.py | 2 +- tests/README.md | 10 +++++----- 4 files changed, 27 insertions(+), 10 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 860357868..4bb5e7e1c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,27 @@ +exclude: ^\.cache/|\.venv/|\.git/|htmlcov/|logs/ repos: -- repo: local + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.5.0 hooks: - - id: flake8 + - id: check-merge-conflict + - id: check-toml + - id: check-yaml + args: [--unsafe] # Required due to custom constructors (e.g. !ENV) + - id: end-of-file-fixer + - id: mixed-line-ending + args: [--fix=lf] + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.5.1 + hooks: + - id: python-check-blanket-noqa + - repo: local + hooks: + - id: flake8 name: Flake8 description: This hook runs flake8 within our project's pipenv environment. entry: pipenv run lint language: python types: [python] - require_serial: true \ No newline at end of file + require_serial: true diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 39f76c7b4..61d11f844 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -43,7 +43,7 @@ To provide a standalone development environment for this project, docker compose When pulling down changes from GitHub, remember to sync your environment using `pipenv sync --dev` to ensure you're using the most up-to-date versions the project's dependencies. ### Type Hinting -[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5. Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types. +[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5. Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types. For example: diff --git a/bot/cogs/free.py b/bot/cogs/free.py index 02c02d067..33b55e79a 100644 --- a/bot/cogs/free.py +++ b/bot/cogs/free.py @@ -55,7 +55,7 @@ class Free(Cog): msg = messages[seek - 1] # Otherwise get last message else: - msg = await channel.history(limit=1).next() # noqa (False positive) + msg = await channel.history(limit=1).next() # noqa: B305 inactive = (datetime.utcnow() - msg.created_at).seconds if inactive > TIMEOUT: diff --git a/tests/README.md b/tests/README.md index be78821bf..4f62edd68 100644 --- a/tests/README.md +++ b/tests/README.md @@ -83,7 +83,7 @@ TagContentConverter should return correct values for valid input. As we are trying to test our "units" of code independently, we want to make sure that we do not rely objects and data generated by "external" code. If we we did, then we wouldn't know if the failure we're observing was caused by the code we are actually trying to test or something external to it. -However, the features that we are trying to test often depend on those objects generated by external pieces of code. It would be difficult to test a bot command without having access to a `Context` instance. Fortunately, there's a solution for that: we use fake objects that act like the true object. We call these fake objects "mocks". +However, the features that we are trying to test often depend on those objects generated by external pieces of code. It would be difficult to test a bot command without having access to a `Context` instance. Fortunately, there's a solution for that: we use fake objects that act like the true object. We call these fake objects "mocks". To create these mock object, we mainly use the [`unittest.mock`](https://docs.python.org/3/library/unittest.mock.html) module. In addition, we have also defined a couple of specialized mock objects that mock specific `discord.py` types (see the section on the below.). @@ -114,13 +114,13 @@ class BotCogTests(unittest.TestCase): ### Mocking coroutines -By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. In anticipation of the `AsyncMock` that will be [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest), we have added an `AsyncMock` helper to [`helpers.py`](/tests/helpers.py). Do note that this drop-in replacement only implements an asynchronous `__call__` method, not the additional assertions that will come with the new `AsyncMock` type in Python 3.8. +By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. In anticipation of the `AsyncMock` that will be [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest), we have added an `AsyncMock` helper to [`helpers.py`](/tests/helpers.py). Do note that this drop-in replacement only implements an asynchronous `__call__` method, not the additional assertions that will come with the new `AsyncMock` type in Python 3.8. ### Special mocks for some `discord.py` types To quote Ned Batchelder, Mock objects are "automatic chameleons". This means that they will happily allow the access to any attribute or method and provide a mocked value in return. One downside to this is that if the code you are testing gets the name of the attribute wrong, your mock object will not complain and the test may still pass. -In order to avoid that, we have defined a number of Mock types in [`helpers.py`](/tests/helpers.py) that follow the specifications of the actual Discord types they are mocking. This means that trying to access an attribute or method on a mocked object that does not exist on the equivalent `discord.py` object will result in an `AttributeError`. In addition, these mocks have some sensible defaults and **pass `isinstance` checks for the types they are mocking**. +In order to avoid that, we have defined a number of Mock types in [`helpers.py`](/tests/helpers.py) that follow the specifications of the actual Discord types they are mocking. This means that trying to access an attribute or method on a mocked object that does not exist on the equivalent `discord.py` object will result in an `AttributeError`. In addition, these mocks have some sensible defaults and **pass `isinstance` checks for the types they are mocking**. These special mocks are added when they are needed, so if you think it would be sensible to add another one, feel free to propose one in your PR. @@ -144,7 +144,7 @@ Finally, there are some considerations to make when writing tests, both for writ ### Test coverage is a starting point -Having test coverage is a good starting point for unit testing: If a part of your code was not covered by a test, we know that we have not tested it properly. The reverse is unfortunately not true: Even if the code we are testing has 100% branch coverage, it does not mean it's fully tested or guaranteed to work. +Having test coverage is a good starting point for unit testing: If a part of your code was not covered by a test, we know that we have not tested it properly. The reverse is unfortunately not true: Even if the code we are testing has 100% branch coverage, it does not mean it's fully tested or guaranteed to work. One problem is that 100% branch coverage may be misleading if we haven't tested our code against all the realistic input it may get in production. For instance, take a look at the following `member_information` function and the test we've written for it: @@ -169,7 +169,7 @@ class FunctionsTests(unittest.TestCase): If you were to run this test, not only would the function pass the test, `coverage.py` will also tell us that the test provides 100% branch coverage for the function. Can you spot the bug the test suite did not catch? -The problem here is that we have only tested our function with a member object that had `None` for the `member.joined` attribute. This means that `member.joined.stfptime("%d-%m-%Y")` was never executed during our test, leading to us missing the spelling mistake in `stfptime` (it should be `strftime`). +The problem here is that we have only tested our function with a member object that had `None` for the `member.joined` attribute. This means that `member.joined.stfptime("%d-%m-%Y")` was never executed during our test, leading to us missing the spelling mistake in `stfptime` (it should be `strftime`). Adding another test would not increase the test coverage we have, but it does ensure that we'll notice that this function can fail with realistic data: -- cgit v1.2.3 From cc322adb7486560481cee1e2e5bf511114631d8c Mon Sep 17 00:00:00 2001 From: Mark Date: Tue, 3 Mar 2020 08:50:21 -0800 Subject: Fix typo in comment MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Leon Sandøy --- bot/bot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/bot.py b/bot/bot.py index 54b092193..950ac6751 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -44,7 +44,7 @@ class Bot(commands.Bot): Will cause a DeprecationWarning if called outside a coroutine. """ - # Because discord.py recreates the HTTPClient session, may as well follow suite and recreate + # Because discord.py recreates the HTTPClient session, may as well follow suit and recreate # our own stuff here too. self._recreate() super().clear() -- cgit v1.2.3 From eddd515d864666b45f243ddc516981f10cedcda3 Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Tue, 3 Mar 2020 21:18:40 -0500 Subject: Prevent exception if a watched user sends a DM to the bot The previous embed assumed that the messages would be sent on the server, where the channel would have a name and the message would have a jump URL. For a DM, neither of these are present and an exception will be raised when attempting to construct the embed for the webhook to send. --- bot/cogs/watchchannels/watchchannel.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/bot/cogs/watchchannels/watchchannel.py b/bot/cogs/watchchannels/watchchannel.py index 3667a80e8..479820444 100644 --- a/bot/cogs/watchchannels/watchchannel.py +++ b/bot/cogs/watchchannels/watchchannel.py @@ -9,7 +9,7 @@ from typing import Optional import dateutil.parser import discord -from discord import Color, Embed, HTTPException, Message, errors +from discord import Color, DMChannel, Embed, HTTPException, Message, errors from discord.ext.commands import Cog, Context from bot.api import ResponseCodeError @@ -273,7 +273,14 @@ class WatchChannel(metaclass=CogABCMeta): reason = self.watched_users[user_id]['reason'] - embed = Embed(description=f"{msg.author.mention} in [#{msg.channel.name}]({msg.jump_url})") + if isinstance(msg.channel, DMChannel): + # If a watched user DMs the bot there won't be a channel name or jump URL + # This could technically include a GroupChannel but bot's can't be in those + message_jump = "via DM" + else: + message_jump = f"in [#{msg.channel.name}]({msg.jump_url})" + + embed = Embed(description=f"{msg.author.mention} {message_jump}") embed.set_footer(text=f"Added {time_delta} by {actor} | Reason: {reason}") await self.webhook_send(embed=embed, username=msg.author.display_name, avatar_url=msg.author.avatar_url) -- cgit v1.2.3 From 3b65766cf8fe095b91556efa49fabffefce5d49e Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 2 Mar 2020 10:36:02 -0800 Subject: Use pre-commit in pipenv lint script --- .pre-commit-config.yaml | 2 +- Pipfile | 2 +- azure-pipelines.yml | 12 ++++++++++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4bb5e7e1c..f369fb7d1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,7 +21,7 @@ repos: - id: flake8 name: Flake8 description: This hook runs flake8 within our project's pipenv environment. - entry: pipenv run lint + entry: pipenv run flake8 language: python types: [python] require_serial: true diff --git a/Pipfile b/Pipfile index 64760f9dd..decc13e33 100644 --- a/Pipfile +++ b/Pipfile @@ -41,7 +41,7 @@ python_version = "3.8" [scripts] start = "python -m bot" -lint = "python -m flake8" +lint = "pre-commit run --all-files" precommit = "pre-commit install" build = "docker build -t pythondiscord/bot:latest -f Dockerfile ." push = "docker push pythondiscord/bot:latest" diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 35dea089a..902bfcd56 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -13,10 +13,12 @@ jobs: variables: PIP_CACHE_DIR: ".cache/pip" + PRE_COMMIT_HOME: $(Pipeline.Workspace)/pre-commit-cache steps: - task: UsePythonVersion@0 displayName: 'Set Python version' + name: PythonVersion inputs: versionSpec: '3.8.x' addToPath: true @@ -27,8 +29,14 @@ jobs: - script: pipenv install --dev --deploy --system displayName: 'Install project using pipenv' - - script: python -m flake8 - displayName: 'Run linter' + - task: Cache@2 + displayName: 'Restore pre-commit environment' + inputs: + key: pre-commit | .pre-commit-config.yaml | "$(PythonVersion.pythonLocation)" + path: $(PRE_COMMIT_HOME) + + - script: pre-commit run --all-files --show-diff-on-failure + displayName: 'Run pre-commit hooks' - script: BOT_API_KEY=foo BOT_SENTRY_DSN=blah BOT_TOKEN=bar WOLFRAM_API_KEY=baz REDDIT_CLIENT_ID=spam REDDIT_SECRET=ham coverage run -m xmlrunner displayName: Run tests -- cgit v1.2.3 From f5b5298bbe77c52ae0d8b01b3c44fc22cdce35c3 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 2 Mar 2020 19:33:13 -0800 Subject: CI: add a restore key for the pre-commit cache A cache for an outdated pre-commit environment may still be useful. It may be the case that only some hooks need to be updated rather than all. --- azure-pipelines.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 902bfcd56..fa85e6045 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -32,7 +32,9 @@ jobs: - task: Cache@2 displayName: 'Restore pre-commit environment' inputs: - key: pre-commit | .pre-commit-config.yaml | "$(PythonVersion.pythonLocation)" + key: pre-commit | "$(PythonVersion.pythonLocation)" | .pre-commit-config.yaml + restoreKeys: | + pre-commit | "$(PythonVersion.pythonLocation)" path: $(PRE_COMMIT_HOME) - script: pre-commit run --all-files --show-diff-on-failure -- cgit v1.2.3 From 93f29f8bfee77957770dab3dd9adc1dac62d0bb2 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 3 Mar 2020 19:45:46 -0800 Subject: CI: mock the pipenv binary The mock gets used by the flake8 pre-commit hook, which invokes flake8 via `pipenv run flake8`. It's normally useful to use pipenv here cause it ensures flake8 is invoked within the context of the venv. However, in CI, there is no venv - dependencies are installed directly to the system site-packages. `pipenv run` does not work in such case because it tries to create a new venv if one doesn't exist (it doesn't consider the system interpreter to be a venv). This workaround (okay, it's a hack) creates an executable shell script which replaces the original pipenv binary. The shell script simply ignores the first argument (i.e. ignores `run` in `pipenv run`) and executes the rest of the arguments as a command. It essentially makes `pipenv run flake8` equivalent to just having ran `flake8`. When pre-commit executes pipenv, the aforementioned script is what will run. --- azure-pipelines.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index fa85e6045..280f11a36 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -29,6 +29,16 @@ jobs: - script: pipenv install --dev --deploy --system displayName: 'Install project using pipenv' + # Create an executable shell script which replaces the original pipenv binary. + # The shell script ignores the first argument and executes the rest of the args as a command. + # It makes the `pipenv run flake8` command in the pre-commit hook work by circumventing + # pipenv entirely, which is too dumb to know it should use the system interpreter rather than + # creating a new venv. + - script: | + printf '%s\n%s' '#!/bin/bash' '"${@:2}"' > $(PythonVersion.pythonLocation)/bin/pipenv \ + && chmod +x $(PythonVersion.pythonLocation)/bin/pipenv + displayName: 'Mock pipenv binary' + - task: Cache@2 displayName: 'Restore pre-commit environment' inputs: -- cgit v1.2.3 From da94ddbcdcd644a36c329fb7ec84d1c384b8ac58 Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Tue, 3 Mar 2020 22:51:28 -0500 Subject: Add pep8-naming & relock --- Pipfile | 1 + Pipfile.lock | 41 +++++++++++++++++++++++++++-------------- 2 files changed, 28 insertions(+), 14 deletions(-) diff --git a/Pipfile b/Pipfile index decc13e33..0dcee0e3d 100644 --- a/Pipfile +++ b/Pipfile @@ -31,6 +31,7 @@ flake8-import-order = "~=0.18" flake8-string-format = "~=0.2" flake8-tidy-imports = "~=4.0" flake8-todo = "~=0.7" +pep8-naming = "~=0.9" pre-commit = "~=2.1" safety = "~=1.8" unittest-xml-reporting = "~=3.0" diff --git a/Pipfile.lock b/Pipfile.lock index 9953aab40..348456f2c 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "fae6dcdb6a5ebf27e8ea5044f4ca2ab854774d17affb5fd64ac85f8d0ae71187" + "sha256": "b8b38e84230bdc37f8c8955e8dddc442183a2e23c4dfc6ed37c522644aecdeea" }, "pipfile-spec": 6, "requires": { @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:4199122a450dffd8303b7857a9d82657bf1487fe329e489520833b40fbe92406", - "sha256:fe85c7456e5c060bce4eb9cffab5b2c4d3c563cb72177977b3556c54c8e3aeb6" + "sha256:0332bc13abbd8923dac657b331716778c55ea0a32ac0951306ce85edafcc916c", + "sha256:39770d8bc7e9059e28622d599e2ac9ebc16a7198b33d1743c1a496ca3b0f8170" ], "index": "pypi", - "version": "==6.5.2" + "version": "==6.5.3" }, "aiodns": { "hashes": [ @@ -189,10 +189,10 @@ }, "humanfriendly": { "hashes": [ - "sha256:cbe04ecf964ccb951a578f396091f258448ca4b4b4c6d4b6194f48ef458fe991", - "sha256:e8e2e4524409e55d5c5cbbb4c555a0c0a9599d5e8f74d0ce1ac504ba51ad1cd2" + "sha256:2f79aaa2965c0fc3d79452e64ec2c7601d70d67e51ea2e99cb40afe3fe2824c5", + "sha256:6990c0af4b72f50ddf302900eb982edf199247e621e06d80d71b00b1a1574214" ], - "version": "==7.2" + "version": "==8.0" }, "idna": { "hashes": [ @@ -379,8 +379,7 @@ }, "pycparser": { "hashes": [ - "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3", - "sha256:fd64020e8a5e0369de455adf9f22795a90fdb74e6bb999e9a13fd26b54f533ef" + "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" ], "version": "==2.19" }, @@ -663,8 +662,7 @@ }, "distlib": { "hashes": [ - "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21", - "sha256:9b183fb98f4870e02d315d5d17baef14be74c339d827346cae544f5597698555" + "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21" ], "version": "==0.3.0" }, @@ -707,11 +705,11 @@ }, "flake8-annotations": { "hashes": [ - "sha256:19a6637a5da1bb7ea7948483ca9e2b9e15b213e687e7bf5ff8c1bfc91c185006", - "sha256:bb033b72cdd3a2b0a530bbdf2081f12fbea7d70baeaaebb5899723a45f424b8e" + "sha256:a38b44d01abd480586a92a02a2b0a36231ec42dcc5e114de78fa5db016d8d3f9", + "sha256:d5b0e8704e4e7728b352fa1464e23539ff2341ba11cc153b536fa2cf921ee659" ], "index": "pypi", - "version": "==2.0.0" + "version": "==2.0.1" }, "flake8-bugbear": { "hashes": [ @@ -737,6 +735,13 @@ "index": "pypi", "version": "==0.18.1" }, + "flake8-polyfill": { + "hashes": [ + "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9", + "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda" + ], + "version": "==1.0.2" + }, "flake8-string-format": { "hashes": [ "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", @@ -794,6 +799,14 @@ ], "version": "==20.1" }, + "pep8-naming": { + "hashes": [ + "sha256:45f330db8fcfb0fba57458c77385e288e7a3be1d01e8ea4268263ef677ceea5f", + "sha256:a33d38177056321a167decd6ba70b890856ba5025f0a8eca6a3eda607da93caf" + ], + "index": "pypi", + "version": "==0.9.1" + }, "pre-commit": { "hashes": [ "sha256:09ebe467f43ce24377f8c2f200fe3cd2570d328eb2ce0568c8e96ce19da45fa6", -- cgit v1.2.3 From aae928ebc06e7e7a6ed5b5b848464ce95e4ea9d8 Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Tue, 3 Mar 2020 22:53:19 -0500 Subject: Remove CaseInsensitiveDict This was added by the now-removed Snake cog & is not used elsewhere on bot. --- bot/utils/__init__.py | 57 ------------------------------------------------- tests/bot/test_utils.py | 37 -------------------------------- 2 files changed, 94 deletions(-) delete mode 100644 tests/bot/test_utils.py diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py index 3e4b15ce4..9b32e515d 100644 --- a/bot/utils/__init__.py +++ b/bot/utils/__init__.py @@ -1,5 +1,4 @@ from abc import ABCMeta -from typing import Any, Hashable from discord.ext.commands import CogMeta @@ -8,59 +7,3 @@ class CogABCMeta(CogMeta, ABCMeta): """Metaclass for ABCs meant to be implemented as Cogs.""" pass - - -class CaseInsensitiveDict(dict): - """ - We found this class on StackOverflow. Thanks to m000 for writing it! - - https://stackoverflow.com/a/32888599/4022104 - """ - - @classmethod - def _k(cls, key: Hashable) -> Hashable: - """Return lowered key if a string-like is passed, otherwise pass key straight through.""" - return key.lower() if isinstance(key, str) else key - - def __init__(self, *args, **kwargs): - super(CaseInsensitiveDict, self).__init__(*args, **kwargs) - self._convert_keys() - - def __getitem__(self, key: Hashable) -> Any: - """Case insensitive __setitem__.""" - return super(CaseInsensitiveDict, self).__getitem__(self.__class__._k(key)) - - def __setitem__(self, key: Hashable, value: Any): - """Case insensitive __setitem__.""" - super(CaseInsensitiveDict, self).__setitem__(self.__class__._k(key), value) - - def __delitem__(self, key: Hashable) -> Any: - """Case insensitive __delitem__.""" - return super(CaseInsensitiveDict, self).__delitem__(self.__class__._k(key)) - - def __contains__(self, key: Hashable) -> bool: - """Case insensitive __contains__.""" - return super(CaseInsensitiveDict, self).__contains__(self.__class__._k(key)) - - def pop(self, key: Hashable, *args, **kwargs) -> Any: - """Case insensitive pop.""" - return super(CaseInsensitiveDict, self).pop(self.__class__._k(key), *args, **kwargs) - - def get(self, key: Hashable, *args, **kwargs) -> Any: - """Case insensitive get.""" - return super(CaseInsensitiveDict, self).get(self.__class__._k(key), *args, **kwargs) - - def setdefault(self, key: Hashable, *args, **kwargs) -> Any: - """Case insensitive setdefault.""" - return super(CaseInsensitiveDict, self).setdefault(self.__class__._k(key), *args, **kwargs) - - def update(self, E: Any = None, **F) -> None: - """Case insensitive update.""" - super(CaseInsensitiveDict, self).update(self.__class__(E)) - super(CaseInsensitiveDict, self).update(self.__class__(**F)) - - def _convert_keys(self) -> None: - """Helper method to lowercase all existing string-like keys.""" - for k in list(self.keys()): - v = super(CaseInsensitiveDict, self).pop(k) - self.__setitem__(k, v) diff --git a/tests/bot/test_utils.py b/tests/bot/test_utils.py deleted file mode 100644 index d7bcc3ba6..000000000 --- a/tests/bot/test_utils.py +++ /dev/null @@ -1,37 +0,0 @@ -import unittest - -from bot import utils - - -class CaseInsensitiveDictTests(unittest.TestCase): - """Tests for the `CaseInsensitiveDict` container.""" - - def test_case_insensitive_key_access(self): - """Tests case insensitive key access and storage.""" - instance = utils.CaseInsensitiveDict() - - key = 'LEMON' - value = 'trees' - - instance[key] = value - self.assertIn(key, instance) - self.assertEqual(instance.get(key), value) - self.assertEqual(instance.get(key.casefold()), value) - self.assertEqual(instance.pop(key.casefold()), value) - self.assertNotIn(key, instance) - self.assertNotIn(key.casefold(), instance) - - instance.setdefault(key, value) - del instance[key] - self.assertNotIn(key, instance) - - def test_initialization_from_kwargs(self): - """Tests creating the dictionary from keyword arguments.""" - instance = utils.CaseInsensitiveDict({'FOO': 'bar'}) - self.assertEqual(instance['foo'], 'bar') - - def test_update_from_other_mapping(self): - """Tests updating the dictionary from another mapping.""" - instance = utils.CaseInsensitiveDict() - instance.update({'FOO': 'bar'}) - self.assertEqual(instance['foo'], 'bar') -- cgit v1.2.3 From 2c85b2241bd8a1e7ca8290cd385cded97c54f9bb Mon Sep 17 00:00:00 2001 From: "S. Co1" Date: Tue, 3 Mar 2020 22:59:07 -0500 Subject: Update code for pep8-naming compliance --- tests/base.py | 4 ++-- tests/bot/cogs/sync/test_base.py | 2 +- tests/bot/cogs/test_snekbox.py | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/base.py b/tests/base.py index 42174e911..d99b9ac31 100644 --- a/tests/base.py +++ b/tests/base.py @@ -31,7 +31,7 @@ class LoggingTestsMixin: """ @contextmanager - def assertNotLogs(self, logger=None, level=None, msg=None): + def assertNotLogs(self, logger=None, level=None, msg=None): # noqa: N802 """ Asserts that no logs of `level` and higher were emitted by `logger`. @@ -81,7 +81,7 @@ class LoggingTestsMixin: class CommandTestCase(unittest.IsolatedAsyncioTestCase): """TestCase with additional assertions that are useful for testing Discord commands.""" - async def assertHasPermissionsCheck( + async def assertHasPermissionsCheck( # noqa: N802 self, cmd: commands.Command, permissions: Dict[str, bool], diff --git a/tests/bot/cogs/sync/test_base.py b/tests/bot/cogs/sync/test_base.py index fe0594efe..6ee9dfda6 100644 --- a/tests/bot/cogs/sync/test_base.py +++ b/tests/bot/cogs/sync/test_base.py @@ -84,7 +84,7 @@ class SyncerSendPromptTests(unittest.IsolatedAsyncioTestCase): method.assert_called_once_with(constants.Channels.dev_core) - async def test_send_prompt_returns_None_if_channel_fetch_fails(self): + async def test_send_prompt_returns_none_if_channel_fetch_fails(self): """None should be returned if there's an HTTPException when fetching the channel.""" self.bot.get_channel.return_value = None self.bot.fetch_channel.side_effect = discord.HTTPException(mock.MagicMock(), "test error!") diff --git a/tests/bot/cogs/test_snekbox.py b/tests/bot/cogs/test_snekbox.py index 9cd7f0154..fd9468829 100644 --- a/tests/bot/cogs/test_snekbox.py +++ b/tests/bot/cogs/test_snekbox.py @@ -89,15 +89,15 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): self.assertEqual(actual, expected) @patch('bot.cogs.snekbox.Signals', side_effect=ValueError) - def test_get_results_message_invalid_signal(self, mock_Signals: Mock): + def test_get_results_message_invalid_signal(self, mock_signals: Mock): self.assertEqual( self.cog.get_results_message({'stdout': '', 'returncode': 127}), ('Your eval job has completed with return code 127', '') ) @patch('bot.cogs.snekbox.Signals') - def test_get_results_message_valid_signal(self, mock_Signals: Mock): - mock_Signals.return_value.name = 'SIGTEST' + def test_get_results_message_valid_signal(self, mock_signals: Mock): + mock_signals.return_value.name = 'SIGTEST' self.assertEqual( self.cog.get_results_message({'stdout': '', 'returncode': 127}), ('Your eval job has completed with return code 127 (SIGTEST)', '') -- cgit v1.2.3 From c12b8e8d84cf8d2ad373c334e5c517d717285e14 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Wed, 4 Mar 2020 08:37:35 +0100 Subject: Use raw strings for docstrings with forward slashes A few docstrings in `bot.cogs.extensions` have forward slashed in them to escape Markdown rendering when our help feature uses these docstring in a Discord message. However, the use of forward slashes with an invalid escape sequence in docstrings now raises a DeprecationWarning in Python: /home/sebastiaan/pydis/repositories/bot/bot/cogs/extensions.py:72: DeprecationWarning: invalid escape sequence \* PEP 257 (Docstring Conventions, https://www.python.org/dev/peps/pep-0257/) states that raw strings should be used for docstrings that use forward slashes, so I've added the `r`-prefix to the docstrings that use forward slashes. --- bot/cogs/extensions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bot/cogs/extensions.py b/bot/cogs/extensions.py index b312e1a1d..fb6cd9aa3 100644 --- a/bot/cogs/extensions.py +++ b/bot/cogs/extensions.py @@ -69,7 +69,7 @@ class Extensions(commands.Cog): @extensions_group.command(name="load", aliases=("l",)) async def load_command(self, ctx: Context, *extensions: Extension) -> None: - """ + r""" Load extensions given their fully qualified or unqualified names. If '\*' or '\*\*' is given as the name, all unloaded extensions will be loaded. @@ -86,7 +86,7 @@ class Extensions(commands.Cog): @extensions_group.command(name="unload", aliases=("ul",)) async def unload_command(self, ctx: Context, *extensions: Extension) -> None: - """ + r""" Unload currently loaded extensions given their fully qualified or unqualified names. If '\*' or '\*\*' is given as the name, all loaded extensions will be unloaded. @@ -109,7 +109,7 @@ class Extensions(commands.Cog): @extensions_group.command(name="reload", aliases=("r",)) async def reload_command(self, ctx: Context, *extensions: Extension) -> None: - """ + r""" Reload extensions given their fully qualified or unqualified names. If an extension fails to be reloaded, it will be rolled-back to the prior working state. -- cgit v1.2.3