diff options
| -rw-r--r-- | bot/cogs/token_remover.py | 119 | ||||
| -rw-r--r-- | bot/utils/__init__.py | 5 | ||||
| -rw-r--r-- | tests/bot/cogs/test_token_remover.py | 367 | ||||
| -rw-r--r-- | tests/helpers.py | 20 | 
4 files changed, 357 insertions, 154 deletions
| diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 6721f0e02..d55e079e9 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -2,20 +2,22 @@ import base64  import binascii  import logging  import re -import struct  import typing as t -from datetime import datetime  from discord import Colour, Message  from discord.ext.commands import Cog -from discord.utils import snowflake_time +from bot import utils  from bot.bot import Bot  from bot.cogs.moderation import ModLog  from bot.constants import Channels, Colours, Event, Icons  log = logging.getLogger(__name__) +LOG_MESSAGE = ( +    "Censored a seemingly valid token sent by {author} (`{author_id}`) in {channel}, " +    "token was `{user_id}.{timestamp}.{hmac}`" +)  DELETION_MESSAGE_TEMPLATE = (      "Hey {mention}! I noticed you posted a seemingly valid Discord API "      "token in your message and have removed your message. " @@ -25,15 +27,22 @@ DELETION_MESSAGE_TEMPLATE = (      "Feel free to re-post it with the token removed. "      "If you believe this was a mistake, please let us know!"  ) -DISCORD_EPOCH_TIMESTAMP = datetime(2017, 1, 1) +DISCORD_EPOCH = 1_420_070_400  TOKEN_EPOCH = 1_293_840_000 -TOKEN_RE = re.compile( -    r"[^\s\.()\"']+"  # Matches token part 1: The user ID string, encoded as base64 -    r"\."             # Matches a literal dot between the token parts -    r"[^\s\.()\"']+"  # Matches token part 2: The creation timestamp, as an integer -    r"\."             # Matches a literal dot between the token parts -    r"[^\s\.()\"']+"  # Matches token part 3: The HMAC, unused by us, but check that it isn't empty -) + +# Three parts delimited by dots: user ID, creation timestamp, HMAC. +# The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string. +# Each part only matches base64 URL-safe characters. +# Padding has never been observed, but the padding character '=' is matched just in case. +TOKEN_RE = re.compile(r"([\w\-=]+)\.([\w\-=]+)\.([\w\-=]+)", re.ASCII) + + +class Token(t.NamedTuple): +    """A Discord Bot token.""" + +    user_id: str +    timestamp: str +    hmac: str  class TokenRemover(Cog): @@ -65,64 +74,58 @@ class TokenRemover(Cog):          See: https://discordapp.com/developers/docs/reference#snowflakes          """ -        found_token = self.find_token_in_message(after) -        if found_token: -            await self.take_action(after, found_token) +        await self.on_message(after) -    async def take_action(self, msg: Message, found_token: str) -> None: -        """Remove the `msg` containing a token an send a mod_log message.""" -        user_id, creation_timestamp, hmac = found_token.split('.') +    async def take_action(self, msg: Message, found_token: Token) -> None: +        """Remove the `msg` containing the `found_token` and send a mod log message."""          self.mod_log.ignore(Event.message_delete, msg.id)          await msg.delete()          await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) -        message = ( -            "Censored a seemingly valid token sent by " -            f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was " -            f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`" -        ) -        log.debug(message) +        log_message = self.format_log_message(msg, found_token) +        log.debug(log_message)          # Send pretty mod log embed to mod-alerts          await self.mod_log.send_log_message(              icon_url=Icons.token_removed,              colour=Colour(Colours.soft_red),              title="Token removed!", -            text=message, +            text=log_message,              thumbnail=msg.author.avatar_url_as(static_format="png"),              channel_id=Channels.mod_alerts,          )          self.bot.stats.incr("tokens.removed_tokens") +    @staticmethod +    def format_log_message(msg: Message, token: Token) -> str: +        """Return the log message to send for `token` being censored in `msg`.""" +        return LOG_MESSAGE.format( +            author=msg.author, +            author_id=msg.author.id, +            channel=msg.channel.mention, +            user_id=token.user_id, +            timestamp=token.timestamp, +            hmac='x' * len(token.hmac), +        ) +      @classmethod -    def find_token_in_message(cls, msg: Message) -> t.Optional[str]: +    def find_token_in_message(cls, msg: Message) -> t.Optional[Token]:          """Return a seemingly valid token found in `msg` or `None` if no token is found."""          if msg.author.bot:              return -        # Use findall rather than search to guard against method calls prematurely returning the +        # Use finditer rather than search to guard against method calls prematurely returning the          # token check (e.g. `message.channel.send` also matches our token pattern) -        maybe_matches = TOKEN_RE.findall(msg.content) -        for substr in maybe_matches: -            if cls.is_maybe_token(substr): +        for match in TOKEN_RE.finditer(msg.content): +            token = Token(*match.groups()) +            if cls.is_valid_user_id(token.user_id) and cls.is_valid_timestamp(token.timestamp):                  # Short-circuit on first match -                return substr +                return token          # No matching substring          return -    @classmethod -    def is_maybe_token(cls, test_str: str) -> bool: -        """Check the provided string to see if it is a seemingly valid token.""" -        try: -            user_id, creation_timestamp, hmac = test_str.split('.') -        except ValueError: -            return False - -        if cls.is_valid_user_id(user_id) and cls.is_valid_timestamp(creation_timestamp): -            return True -      @staticmethod      def is_valid_user_id(b64_content: str) -> bool:          """ @@ -130,29 +133,41 @@ class TokenRemover(Cog):          See: https://discordapp.com/developers/docs/reference#snowflakes          """ -        b64_content += '=' * (-len(b64_content) % 4) +        b64_content = utils.pad_base64(b64_content)          try: -            content: bytes = base64.b64decode(b64_content) -            return content.decode('utf-8').isnumeric() -        except (binascii.Error, UnicodeDecodeError): +            decoded_bytes = base64.urlsafe_b64decode(b64_content) +            string = decoded_bytes.decode('utf-8') + +            # isdigit on its own would match a lot of other Unicode characters, hence the isascii. +            return string.isascii() and string.isdigit() +        except (binascii.Error, ValueError):              return False      @staticmethod      def is_valid_timestamp(b64_content: str) -> bool:          """ -        Check potential token to see if it contains a valid timestamp. +        Return True if `b64_content` decodes to a valid timestamp. -        See: https://discordapp.com/developers/docs/reference#snowflakes +        If the timestamp is greater than the Discord epoch, it's probably valid. +        See: https://i.imgur.com/7WdehGn.png          """ -        b64_content += '=' * (-len(b64_content) % 4) +        b64_content = utils.pad_base64(b64_content)          try: -            content = base64.urlsafe_b64decode(b64_content) -            snowflake = struct.unpack('i', content)[0] -        except (binascii.Error, struct.error): +            decoded_bytes = base64.urlsafe_b64decode(b64_content) +            timestamp = int.from_bytes(decoded_bytes, byteorder="big") +        except (binascii.Error, ValueError) as e: +            log.debug(f"Failed to decode token timestamp '{b64_content}': {e}") +            return False + +        # Seems like newer tokens don't need the epoch added, but add anyway since an upper bound +        # is not checked. +        if timestamp + TOKEN_EPOCH >= DISCORD_EPOCH: +            return True +        else: +            log.debug(f"Invalid token timestamp '{b64_content}': smaller than Discord epoch")              return False -        return snowflake_time(snowflake + TOKEN_EPOCH) < DISCORD_EPOCH_TIMESTAMP  def setup(bot: Bot) -> None: diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py index c5a12d5e3..5a6e1811b 100644 --- a/bot/utils/__init__.py +++ b/bot/utils/__init__.py @@ -11,3 +11,8 @@ class CogABCMeta(CogMeta, ABCMeta):      """Metaclass for ABCs meant to be implemented as Cogs."""      pass + + +def pad_base64(data: str) -> str: +    """Return base64 `data` with padding characters to ensure its length is a multiple of 4.""" +    return data + "=" * (-len(data) % 4) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 33d1ec170..a10124d2d 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -1,56 +1,89 @@ -import asyncio -import logging  import unittest -from unittest.mock import AsyncMock, MagicMock +from re import Match +from unittest import mock +from unittest.mock import MagicMock  from discord import Colour -from bot.cogs.token_remover import ( -    DELETION_MESSAGE_TEMPLATE, -    TokenRemover, -    setup as setup_cog, -) -from bot.constants import Channels, Colours, Event, Icons -from tests.helpers import MockBot, MockMessage +from bot import constants +from bot.cogs import token_remover +from bot.cogs.moderation import ModLog +from bot.cogs.token_remover import Token, TokenRemover +from tests.helpers import MockBot, MockMessage, autospec -class TokenRemoverTests(unittest.TestCase): +class TokenRemoverTests(unittest.IsolatedAsyncioTestCase):      """Tests the `TokenRemover` cog."""      def setUp(self):          """Adds the cog, a bot, and a message to the instance for usage in tests."""          self.bot = MockBot() -        self.bot.get_cog.return_value = MagicMock() -        self.bot.get_cog.return_value.send_log_message = AsyncMock()          self.cog = TokenRemover(bot=self.bot) -        self.msg = MockMessage(id=555, content='') -        self.msg.author.__str__ = MagicMock() -        self.msg.author.__str__.return_value = 'lemon' -        self.msg.author.bot = False -        self.msg.author.avatar_url_as.return_value = 'picture-lemon.png' -        self.msg.author.id = 42 -        self.msg.author.mention = '@lemon' +        self.msg = MockMessage(id=555, content="hello world")          self.msg.channel.mention = "#lemonade-stand" +        self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) +        self.msg.author.avatar_url_as.return_value = "picture-lemon.png" -    def test_is_valid_user_id_is_true_for_numeric_content(self): -        """A string decoding to numeric characters is a valid user ID.""" -        # MTIz = base64(123) -        self.assertTrue(TokenRemover.is_valid_user_id('MTIz')) +    def test_is_valid_user_id_valid(self): +        """Should consider user IDs valid if they decode entirely to ASCII digits.""" +        ids = ( +            "NDcyMjY1OTQzMDYyNDEzMzMy", +            "NDc1MDczNjI5Mzk5NTQ3OTA0", +            "NDY3MjIzMjMwNjUwNzc3NjQx", +        ) + +        for user_id in ids: +            with self.subTest(user_id=user_id): +                result = TokenRemover.is_valid_user_id(user_id) +                self.assertTrue(result) -    def test_is_valid_user_id_is_false_for_alphabetic_content(self): -        """A string decoding to alphabetic characters is not a valid user ID.""" -        # YWJj = base64(abc) -        self.assertFalse(TokenRemover.is_valid_user_id('YWJj')) +    def test_is_valid_user_id_invalid(self): +        """Should consider non-digit and non-ASCII IDs invalid.""" +        ids = ( +            ("SGVsbG8gd29ybGQ", "non-digit ASCII"), +            ("0J_RgNC40LLQtdGCINC80LjRgA", "cyrillic text"), +            ("4pO14p6L4p6C4pG34p264pGl8J-EiOKSj-KCieKBsA", "Unicode digits"), +            ("4oaA4oaB4oWh4oWi4Lyz4Lyq4Lyr4LG9", "Unicode numerals"), +            ("8J2fjvCdn5nwnZ-k8J2fr_Cdn7rgravvvJngr6c", "Unicode decimals"), +            ("{hello}[world]&(bye!)", "ASCII invalid Base64"), +            ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), +        ) -    def test_is_valid_timestamp_is_true_for_valid_timestamps(self): -        """A string decoding to a valid timestamp should be recognized as such.""" -        self.assertTrue(TokenRemover.is_valid_timestamp('DN9r_A')) +        for user_id, msg in ids: +            with self.subTest(msg=msg): +                result = TokenRemover.is_valid_user_id(user_id) +                self.assertFalse(result) -    def test_is_valid_timestamp_is_false_for_invalid_values(self): -        """A string not decoding to a valid timestamp should not be recognized as such.""" -        # MTIz = base64(123) -        self.assertFalse(TokenRemover.is_valid_timestamp('MTIz')) +    def test_is_valid_timestamp_valid(self): +        """Should consider timestamps valid if they're greater than the Discord epoch.""" +        timestamps = ( +            "XsyRkw", +            "Xrim9Q", +            "XsyR-w", +            "XsySD_", +            "Dn9r_A", +        ) + +        for timestamp in timestamps: +            with self.subTest(timestamp=timestamp): +                result = TokenRemover.is_valid_timestamp(timestamp) +                self.assertTrue(result) + +    def test_is_valid_timestamp_invalid(self): +        """Should consider timestamps invalid if they're before Discord epoch or can't be parsed.""" +        timestamps = ( +            ("B4Yffw", "DISCORD_EPOCH - TOKEN_EPOCH - 1"), +            ("ew", "123"), +            ("AoIKgA", "42076800"), +            ("{hello}[world]&(bye!)", "ASCII invalid Base64"), +            ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), +        ) + +        for timestamp, msg in timestamps: +            with self.subTest(msg=msg): +                result = TokenRemover.is_valid_timestamp(timestamp) +                self.assertFalse(result)      def test_mod_log_property(self):          """The `mod_log` property should ask the bot to return the `ModLog` cog.""" @@ -58,74 +91,206 @@ class TokenRemoverTests(unittest.TestCase):          self.assertEqual(self.cog.mod_log, self.bot.get_cog.return_value)          self.bot.get_cog.assert_called_once_with('ModLog') -    def test_ignores_bot_messages(self): -        """When the message event handler is called with a bot message, nothing is done.""" +    async def test_on_message_edit_uses_on_message(self): +        """The edit listener should delegate handling of the message to the normal listener.""" +        self.cog.on_message = mock.create_autospec(self.cog.on_message, spec_set=True) + +        await self.cog.on_message_edit(MockMessage(), self.msg) +        self.cog.on_message.assert_awaited_once_with(self.msg) + +    @autospec(TokenRemover, "find_token_in_message", "take_action") +    async def test_on_message_takes_action(self, find_token_in_message, take_action): +        """Should take action if a valid token is found when a message is sent.""" +        cog = TokenRemover(self.bot) +        found_token = "foobar" +        find_token_in_message.return_value = found_token + +        await cog.on_message(self.msg) + +        find_token_in_message.assert_called_once_with(self.msg) +        take_action.assert_awaited_once_with(cog, self.msg, found_token) + +    @autospec(TokenRemover, "find_token_in_message", "take_action") +    async def test_on_message_skips_missing_token(self, find_token_in_message, take_action): +        """Shouldn't take action if a valid token isn't found when a message is sent.""" +        cog = TokenRemover(self.bot) +        find_token_in_message.return_value = False + +        await cog.on_message(self.msg) + +        find_token_in_message.assert_called_once_with(self.msg) +        take_action.assert_not_awaited() + +    @autospec("bot.cogs.token_remover", "TOKEN_RE") +    def test_find_token_ignores_bot_messages(self, token_re): +        """The token finder should ignore messages authored by bots."""          self.msg.author.bot = True -        coroutine = self.cog.on_message(self.msg) -        self.assertIsNone(asyncio.run(coroutine)) - -    def test_ignores_messages_without_tokens(self): -        """Messages without anything looking like a token are ignored.""" -        for content in ('', 'lemon wins'): -            with self.subTest(content=content): -                self.msg.content = content -                coroutine = self.cog.on_message(self.msg) -                self.assertIsNone(asyncio.run(coroutine)) - -    def test_ignores_messages_with_invalid_tokens(self): -        """Messages with values that are invalid tokens are ignored.""" -        for content in ('foo.bar.baz', 'x.y.'): -            with self.subTest(content=content): -                self.msg.content = content -                coroutine = self.cog.on_message(self.msg) -                self.assertIsNone(asyncio.run(coroutine)) - -    def test_censors_valid_tokens(self): -        """Valid tokens are censored.""" -        cases = ( -            # (content, censored_token) -            ('MTIz.DN9R_A.xyz', 'MTIz.DN9R_A.xxx'), + +        return_value = TokenRemover.find_token_in_message(self.msg) + +        self.assertIsNone(return_value) +        token_re.finditer.assert_not_called() + +    @autospec("bot.cogs.token_remover", "TOKEN_RE") +    def test_find_token_no_matches(self, token_re): +        """None should be returned if the regex matches no tokens in a message.""" +        token_re.finditer.return_value = () + +        return_value = TokenRemover.find_token_in_message(self.msg) + +        self.assertIsNone(return_value) +        token_re.finditer.assert_called_once_with(self.msg.content) + +    @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") +    @autospec("bot.cogs.token_remover", "Token") +    @autospec("bot.cogs.token_remover", "TOKEN_RE") +    def test_find_token_valid_match(self, token_re, token_cls, is_valid_id, is_valid_timestamp): +        """The first match with a valid user ID and timestamp should be returned as a `Token`.""" +        matches = [ +            mock.create_autospec(Match, spec_set=True, instance=True), +            mock.create_autospec(Match, spec_set=True, instance=True), +        ] +        tokens = [ +            mock.create_autospec(Token, spec_set=True, instance=True), +            mock.create_autospec(Token, spec_set=True, instance=True), +        ] + +        token_re.finditer.return_value = matches +        token_cls.side_effect = tokens +        is_valid_id.side_effect = (False, True)  # The 1st match will be invalid, 2nd one valid. +        is_valid_timestamp.return_value = True + +        return_value = TokenRemover.find_token_in_message(self.msg) + +        self.assertEqual(tokens[1], return_value) +        token_re.finditer.assert_called_once_with(self.msg.content) + +    @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") +    @autospec("bot.cogs.token_remover", "Token") +    @autospec("bot.cogs.token_remover", "TOKEN_RE") +    def test_find_token_invalid_matches(self, token_re, token_cls, is_valid_id, is_valid_timestamp): +        """None should be returned if no matches have valid user IDs or timestamps.""" +        token_re.finditer.return_value = [mock.create_autospec(Match, spec_set=True, instance=True)] +        token_cls.return_value = mock.create_autospec(Token, spec_set=True, instance=True) +        is_valid_id.return_value = False +        is_valid_timestamp.return_value = False + +        return_value = TokenRemover.find_token_in_message(self.msg) + +        self.assertIsNone(return_value) +        token_re.finditer.assert_called_once_with(self.msg.content) + +    def test_regex_invalid_tokens(self): +        """Messages without anything looking like a token are not matched.""" +        tokens = ( +            "", +            "lemon wins", +            "..", +            "x.y", +            "x.y.", +            ".y.z", +            ".y.", +            "..z", +            "x..z", +            " . . ", +            "\n.\n.\n", +            "hellö.world.bye", +            "base64.nötbåse64.morebase64", +            "19jd3J.dfkm3d.€víł§tüff", +        ) + +        for token in tokens: +            with self.subTest(token=token): +                results = token_remover.TOKEN_RE.findall(token) +                self.assertEqual(len(results), 0) + +    def test_regex_valid_tokens(self): +        """Messages that look like tokens should be matched.""" +        # Don't worry, these tokens have been invalidated. +        tokens = ( +            "NDcyMjY1OTQzMDYy_DEzMz-y.XsyRkw.VXmErH7j511turNpfURmb0rVNm8", +            "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8", +            "NDc1MDczNjI5Mzk5NTQ3OTA0.XsyR-w.sJf6omBPORBPju3WJEIAcwW9Zds", +            "NDY3MjIzMjMwNjUwNzc3NjQx.XsySD_.s45jqDV_Iisn-symw0yDRrk_jf4",          ) -        for content, censored_token in cases: -            with self.subTest(content=content, censored_token=censored_token): -                self.msg.content = content -                coroutine = self.cog.on_message(self.msg) -                with self.assertLogs(logger='bot.cogs.token_remover', level=logging.DEBUG) as cm: -                    self.assertIsNone(asyncio.run(coroutine))  # no return value - -                [line] = cm.output -                log_message = ( -                    "Censored a seemingly valid token sent by " -                    "lemon (`42`) in #lemonade-stand, " -                    f"token was `{censored_token}`" -                ) -                self.assertIn(log_message, line) - -                self.msg.delete.assert_called_once_with() -                self.msg.channel.send.assert_called_once_with( -                    DELETION_MESSAGE_TEMPLATE.format(mention='@lemon') -                ) -                self.bot.get_cog.assert_called_with('ModLog') -                self.msg.author.avatar_url_as.assert_called_once_with(static_format='png') - -                mod_log = self.bot.get_cog.return_value -                mod_log.ignore.assert_called_once_with(Event.message_delete, self.msg.id) -                mod_log.send_log_message.assert_called_once_with( -                    icon_url=Icons.token_removed, -                    colour=Colour(Colours.soft_red), -                    title="Token removed!", -                    text=log_message, -                    thumbnail='picture-lemon.png', -                    channel_id=Channels.mod_alerts -                ) - - -class TokenRemoverSetupTests(unittest.TestCase): -    """Tests setup of the `TokenRemover` cog.""" - -    def test_setup(self): -        """Setup of the extension should call add_cog.""" +        for token in tokens: +            with self.subTest(token=token): +                results = token_remover.TOKEN_RE.fullmatch(token) +                self.assertIsNotNone(results, f"{token} was not matched by the regex") + +    def test_regex_matches_multiple_valid(self): +        """Should support multiple matches in the middle of a string.""" +        token_1 = "NDY3MjIzMjMwNjUwNzc3NjQx.XsyWGg.uFNEQPCc4ePwGh7egG8UicQssz8" +        token_2 = "NDcyMjY1OTQzMDYyNDEzMzMy.XsyWMw.l8XPnDqb0lp-EiQ2g_0xVFT1pyc" +        message = f"garbage {token_1} hello {token_2} world" + +        results = token_remover.TOKEN_RE.finditer(message) +        results = [match[0] for match in results] +        self.assertCountEqual((token_1, token_2), results) + +    @autospec("bot.cogs.token_remover", "LOG_MESSAGE") +    def test_format_log_message(self, log_message): +        """Should correctly format the log message with info from the message and token.""" +        token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") +        log_message.format.return_value = "Howdy" + +        return_value = TokenRemover.format_log_message(self.msg, token) + +        self.assertEqual(return_value, log_message.format.return_value) +        log_message.format.assert_called_once_with( +            author=self.msg.author, +            author_id=self.msg.author.id, +            channel=self.msg.channel.mention, +            user_id=token.user_id, +            timestamp=token.timestamp, +            hmac="x" * len(token.hmac), +        ) + +    @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) +    @autospec("bot.cogs.token_remover", "log") +    @autospec(TokenRemover, "format_log_message") +    async def test_take_action(self, format_log_message, logger, mod_log_property): +        """Should delete the message and send a mod log.""" +        cog = TokenRemover(self.bot) +        mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True) +        token = mock.create_autospec(Token, spec_set=True, instance=True) +        log_msg = "testing123" + +        mod_log_property.return_value = mod_log +        format_log_message.return_value = log_msg + +        await cog.take_action(self.msg, token) + +        self.msg.delete.assert_called_once_with() +        self.msg.channel.send.assert_called_once_with( +            token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) +        ) + +        format_log_message.assert_called_once_with(self.msg, token) +        logger.debug.assert_called_with(log_msg) +        self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens") + +        mod_log.ignore.assert_called_once_with(constants.Event.message_delete, self.msg.id) +        mod_log.send_log_message.assert_called_once_with( +            icon_url=constants.Icons.token_removed, +            colour=Colour(constants.Colours.soft_red), +            title="Token removed!", +            text=log_msg, +            thumbnail=self.msg.author.avatar_url_as.return_value, +            channel_id=constants.Channels.mod_alerts +        ) + + +class TokenRemoverExtensionTests(unittest.TestCase): +    """Tests for the token_remover extension.""" + +    @autospec("bot.cogs.token_remover", "TokenRemover") +    def test_extension_setup(self, cog): +        """The TokenRemover cog should be added."""          bot = MockBot() -        setup_cog(bot) +        token_remover.setup(bot) + +        cog.assert_called_once_with(bot)          bot.add_cog.assert_called_once() +        self.assertTrue(isinstance(bot.add_cog.call_args.args[0], TokenRemover)) diff --git a/tests/helpers.py b/tests/helpers.py index faa839370..facc4e1af 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -5,7 +5,7 @@ import itertools  import logging  import unittest.mock  from asyncio import AbstractEventLoop -from typing import Iterable, Optional +from typing import Callable, Iterable, Optional  import discord  from aiohttp import ClientSession @@ -26,6 +26,24 @@ for logger in logging.Logger.manager.loggerDict.values():      logger.setLevel(logging.CRITICAL) +def autospec(target, *attributes: str, **kwargs) -> Callable: +    """Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True.""" +    # Caller's kwargs should take priority and overwrite the defaults. +    kwargs = {'spec_set': True, 'autospec': True, **kwargs} + +    # Import the target if it's a string. +    # This is to support both object and string targets like patch.multiple. +    if type(target) is str: +        target = unittest.mock._importer(target) + +    def decorator(func): +        for attribute in attributes: +            patcher = unittest.mock.patch.object(target, attribute, **kwargs) +            func = patcher(func) +        return func +    return decorator + +  class HashableMixin(discord.mixins.EqualityComparable):      """      Mixin that provides similar hashing and equality functionality as discord.py's `Hashable` mixin. | 
