From b366d655af0e0f5a9ff3e053a693838d49884ea2 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 13:28:32 -0700 Subject: Token remover: catch ValueError when non-ASCII chars are present The token uses base64 and base64 only allows ASCII characters. Thus, if a match has non-ASCII characters, it's not a valid token. Catching the ValueError is simpler than trying to adjust the regex to only match valid base64. Fixes #928 Fixes BOT-3X --- bot/cogs/token_remover.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 6721f0e02..860ae9f3a 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -135,7 +135,7 @@ class TokenRemover(Cog): try: content: bytes = base64.b64decode(b64_content) return content.decode('utf-8').isnumeric() - except (binascii.Error, UnicodeDecodeError): + except (binascii.Error, ValueError): return False @staticmethod @@ -150,7 +150,7 @@ class TokenRemover(Cog): try: content = base64.urlsafe_b64decode(b64_content) snowflake = struct.unpack('i', content)[0] - except (binascii.Error, struct.error): + except (binascii.Error, struct.error, ValueError): return False return snowflake_time(snowflake + TOKEN_EPOCH) < DISCORD_EPOCH_TIMESTAMP -- cgit v1.2.3 From f03ae8e49bb3d62776528e6339d6c713c93b7674 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 14:08:02 -0700 Subject: Token remover: reduce duplicated code in `on_message_edit` --- bot/cogs/token_remover.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 860ae9f3a..e90d5ab8b 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -65,9 +65,7 @@ class TokenRemover(Cog): See: https://discordapp.com/developers/docs/reference#snowflakes """ - found_token = self.find_token_in_message(after) - if found_token: - await self.take_action(after, found_token) + await self.on_message(after) async def take_action(self, msg: Message, found_token: str) -> None: """Remove the `msg` containing a token an send a mod_log message.""" -- cgit v1.2.3 From d193a93828582965eb361dc6f3185291fff649a7 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 14:11:39 -0700 Subject: Test on_message_edit of token remover uses on_message --- tests/bot/cogs/test_token_remover.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 33d1ec170..e7b5a9bea 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -1,6 +1,7 @@ import asyncio import logging import unittest +from unittest import mock from unittest.mock import AsyncMock, MagicMock from discord import Colour @@ -14,7 +15,7 @@ from bot.constants import Channels, Colours, Event, Icons from tests.helpers import MockBot, MockMessage -class TokenRemoverTests(unittest.TestCase): +class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): """Tests the `TokenRemover` cog.""" def setUp(self): @@ -58,6 +59,13 @@ class TokenRemoverTests(unittest.TestCase): self.assertEqual(self.cog.mod_log, self.bot.get_cog.return_value) self.bot.get_cog.assert_called_once_with('ModLog') + async def test_on_message_edit_uses_on_message(self): + """The edit listener should delegate handling of the message to the normal listener.""" + self.cog.on_message = mock.create_autospec(self.cog.on_message, spec_set=True) + + await self.cog.on_message_edit(MockMessage(), self.msg) + self.cog.on_message.assert_awaited_once_with(self.msg) + def test_ignores_bot_messages(self): """When the message event handler is called with a bot message, nothing is done.""" self.msg.author.bot = True @@ -77,7 +85,7 @@ class TokenRemoverTests(unittest.TestCase): for content in ('foo.bar.baz', 'x.y.'): with self.subTest(content=content): self.msg.content = content - coroutine = self.cog.on_message(self.msg) + coroutine = self.cog.is_maybe_token(self.msg) self.assertIsNone(asyncio.run(coroutine)) def test_censors_valid_tokens(self): -- cgit v1.2.3 From 0bfd003dbfc5919220129f984dc043421e535f8c Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 14:38:12 -0700 Subject: Add a test helper function to patch multiple attributes with autospecs This helper reduces redundancy/boilerplate by setting default values. It also has the consequence of shortening the length of the invocation, which makes it faster to use and easier to read. --- tests/helpers.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/helpers.py b/tests/helpers.py index 2b79a6c2a..d444cc49d 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -23,6 +23,15 @@ for logger in logging.Logger.manager.loggerDict.values(): logger.setLevel(logging.CRITICAL) +def autospec(target, *attributes: str, **kwargs) -> unittest.mock._patch: + """Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True.""" + # Caller's kwargs should take priority and overwrite the defaults. + kwargs = {'spec_set': True, 'autospec': True, **kwargs} + attributes = {attribute: unittest.mock.DEFAULT for attribute in attributes} + + return unittest.mock.patch.multiple(target, **attributes, **kwargs) + + class HashableMixin(discord.mixins.EqualityComparable): """ Mixin that provides similar hashing and equality functionality as discord.py's `Hashable` mixin. -- cgit v1.2.3 From e8bd69a6c556d78eca1a1eb2adfa26248273a1cd Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 14:42:07 -0700 Subject: Test token remover takes action if a token is found --- tests/bot/cogs/test_token_remover.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index e7b5a9bea..e0ec67684 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -12,7 +12,7 @@ from bot.cogs.token_remover import ( setup as setup_cog, ) from bot.constants import Channels, Colours, Event, Icons -from tests.helpers import MockBot, MockMessage +from tests.helpers import MockBot, MockMessage, autospec class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): @@ -66,6 +66,18 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): await self.cog.on_message_edit(MockMessage(), self.msg) self.cog.on_message.assert_awaited_once_with(self.msg) + @autospec(TokenRemover, "find_token_in_message", "take_action") + async def test_on_message_takes_action(self, find_token_in_message, take_action): + """Should take action if a valid token is found when a message is sent.""" + cog = TokenRemover(self.bot) + found_token = "foobar" + find_token_in_message.return_value = found_token + + await cog.on_message(self.msg) + + find_token_in_message.assert_called_once_with(self.msg) + take_action.assert_awaited_once_with(cog, self.msg, found_token) + def test_ignores_bot_messages(self): """When the message event handler is called with a bot message, nothing is done.""" self.msg.author.bot = True -- cgit v1.2.3 From 4cf7996a1d4630ccb05f57569ca62b1798dc7a93 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 14:44:54 -0700 Subject: Test token remover skips messages without tokens --- tests/bot/cogs/test_token_remover.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index e0ec67684..2b377e221 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -78,6 +78,17 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): find_token_in_message.assert_called_once_with(self.msg) take_action.assert_awaited_once_with(cog, self.msg, found_token) + @autospec(TokenRemover, "find_token_in_message", "take_action") + async def test_on_message_skips_missing_token(self, find_token_in_message, take_action): + """Shouldn't take action if a valid token isn't found when a message is sent.""" + cog = TokenRemover(self.bot) + find_token_in_message.return_value = False + + await cog.on_message(self.msg) + + find_token_in_message.assert_called_once_with(self.msg) + take_action.assert_not_awaited() + def test_ignores_bot_messages(self): """When the message event handler is called with a bot message, nothing is done.""" self.msg.author.bot = True -- cgit v1.2.3 From 593e09299c6e4115d41bfd5b074785a5e304a8d0 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 15:41:14 -0700 Subject: Allow using arbitrary parameter names with the autospec decorator This gives the caller more flexibility. Sometimes attribute names are too long or they don't follow a naming scheme accepted by the linter. --- tests/helpers.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index d444cc49d..1ab8b455f 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -24,12 +24,25 @@ for logger in logging.Logger.manager.loggerDict.values(): def autospec(target, *attributes: str, **kwargs) -> unittest.mock._patch: - """Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True.""" + """ + Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True. + + To allow for arbitrary parameter names to be used by the decorated function, the patchers have + no attribute names associated with them. As a consequence, it will not be possible to retrieve + mocks by their attribute names when using this as a context manager, + """ # Caller's kwargs should take priority and overwrite the defaults. kwargs = {'spec_set': True, 'autospec': True, **kwargs} attributes = {attribute: unittest.mock.DEFAULT for attribute in attributes} - return unittest.mock.patch.multiple(target, **attributes, **kwargs) + patcher = unittest.mock.patch.multiple(target, **attributes, **kwargs) + + # Unset attribute names to allow arbitrary parameter names for the decorator function. + patcher.attribute_name = None + for additional_patcher in patcher.additional_patchers: + additional_patcher.attribute_name = None + + return patcher class HashableMixin(discord.mixins.EqualityComparable): -- cgit v1.2.3 From b0dd290710799c342240d066abaebbe9e6940b54 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 15:09:22 -0700 Subject: Fix test for token remover ignoring bot messages It's not possible to test this via asserting the return value of `on_message` since it never returns anything. Instead, the actual relevant unit, `find_token_in_message,` should be tested. --- tests/bot/cogs/test_token_remover.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 2b377e221..e8b641101 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -89,11 +89,16 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): find_token_in_message.assert_called_once_with(self.msg) take_action.assert_not_awaited() - def test_ignores_bot_messages(self): - """When the message event handler is called with a bot message, nothing is done.""" + @autospec("bot.cogs.token_remover", "TOKEN_RE") + def test_find_token_ignores_bot_messages(self, token_re): + """The token finder should ignore messages authored by bots.""" + cog = TokenRemover(self.bot) self.msg.author.bot = True - coroutine = self.cog.on_message(self.msg) - self.assertIsNone(asyncio.run(coroutine)) + + return_value = cog.find_token_in_message(self.msg) + + self.assertIsNone(return_value) + token_re.findall.assert_not_called() def test_ignores_messages_without_tokens(self): """Messages without anything looking like a token are ignored.""" -- cgit v1.2.3 From 52f0f8a29d7f239c961beaa81881bf4b09da4749 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 15:53:06 -0700 Subject: Test `find_token_in_message` returns None if no matches found --- tests/bot/cogs/test_token_remover.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index e8b641101..5932cf4f0 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -100,6 +100,20 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.assertIsNone(return_value) token_re.findall.assert_not_called() + @autospec(TokenRemover, "is_maybe_token") + @autospec("bot.cogs.token_remover", "TOKEN_RE") + def test_find_token_no_matches_returns_none(self, token_re, is_maybe_token): + """None should be returned if the regex matches no tokens in a message.""" + cog = TokenRemover(self.bot) + token_re.findall.return_value = () + self.msg.content = "foobar" + + return_value = cog.find_token_in_message(self.msg) + + self.assertIsNone(return_value) + token_re.findall.assert_called_once_with(self.msg.content) + is_maybe_token.assert_not_called() + def test_ignores_messages_without_tokens(self): """Messages without anything looking like a token are ignored.""" for content in ('', 'lemon wins'): -- cgit v1.2.3 From cf658bd58559b2683527443f2908257f197ef0bb Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 16:06:47 -0700 Subject: Test `find_token_in_message` returns the found token --- tests/bot/cogs/test_token_remover.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 5932cf4f0..2b946778b 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -114,6 +114,30 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): token_re.findall.assert_called_once_with(self.msg.content) is_maybe_token.assert_not_called() + @autospec(TokenRemover, "is_maybe_token") + @autospec("bot.cogs.token_remover", "TOKEN_RE") + def test_find_token_returns_found_token(self, token_re, is_maybe_token): + """The found token should be returned.""" + true_index = 1 + matches = ("foo", "bar", "baz") + side_effects = [False] * len(matches) + side_effects[true_index] = True + + cog = TokenRemover(self.bot) + self.msg.content = "foobar" + token_re.findall.return_value = matches + is_maybe_token.side_effect = side_effects + + return_value = cog.find_token_in_message(self.msg) + + self.assertEqual(return_value, matches[true_index]) + token_re.findall.assert_called_once_with(self.msg.content) + + # assert_has_calls isn't used cause it'd allow for extra calls before or after. + # The function should short-circuit, so nothing past true_index should have been used. + calls = [mock.call(match) for match in matches[:true_index + 1]] + self.assertEqual(is_maybe_token.mock_calls, calls) + def test_ignores_messages_without_tokens(self): """Messages without anything looking like a token are ignored.""" for content in ('', 'lemon wins'): -- cgit v1.2.3 From f92bc80d6bddb5c57c190187adaa528ae44536f6 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 16:25:14 -0700 Subject: Test token regex doesn't match invalid tokens --- tests/bot/cogs/test_token_remover.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 2b946778b..b67602eb9 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -8,6 +8,7 @@ from discord import Colour from bot.cogs.token_remover import ( DELETION_MESSAGE_TEMPLATE, + TOKEN_RE, TokenRemover, setup as setup_cog, ) @@ -138,13 +139,30 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): calls = [mock.call(match) for match in matches[:true_index + 1]] self.assertEqual(is_maybe_token.mock_calls, calls) - def test_ignores_messages_without_tokens(self): - """Messages without anything looking like a token are ignored.""" - for content in ('', 'lemon wins'): - with self.subTest(content=content): - self.msg.content = content - coroutine = self.cog.on_message(self.msg) - self.assertIsNone(asyncio.run(coroutine)) + def test_regex_invalid_tokens(self): + """Messages without anything looking like a token are not matched.""" + tokens = ( + "", + "lemon wins", + "..", + "x.y", + "x.y.", + ".y.z", + ".y.", + "..z", + "x..z", + " . . ", + "\n.\n.\n", + "'.'.'", + '"."."', + "(.(.(", + ").).)" + ) + + for token in tokens: + with self.subTest(token=token): + results = TOKEN_RE.findall(token) + self.assertEquals(len(results), 0) def test_ignores_messages_with_invalid_tokens(self): """Messages with values that are invalid tokens are ignored.""" -- cgit v1.2.3 From 34b836a8eba0f006c77a7b3f48f7ab14c37d31ee Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 17:47:09 -0700 Subject: Fix autospec decorator when used with multiple attributes The original approach of messing with the `attribute_name` didn't work for reasons I won't discuss here (would require knowledge of patcher internals). The new approach doesn't use patch.multiple but mimics it by applying multiple patch decorators to the function. As a consequence, this can no longer be used as a context manager. --- tests/helpers.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index 1ab8b455f..dfbe539ec 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -24,25 +24,21 @@ for logger in logging.Logger.manager.loggerDict.values(): def autospec(target, *attributes: str, **kwargs) -> unittest.mock._patch: - """ - Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True. - - To allow for arbitrary parameter names to be used by the decorated function, the patchers have - no attribute names associated with them. As a consequence, it will not be possible to retrieve - mocks by their attribute names when using this as a context manager, - """ + """Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True.""" # Caller's kwargs should take priority and overwrite the defaults. kwargs = {'spec_set': True, 'autospec': True, **kwargs} - attributes = {attribute: unittest.mock.DEFAULT for attribute in attributes} - - patcher = unittest.mock.patch.multiple(target, **attributes, **kwargs) - - # Unset attribute names to allow arbitrary parameter names for the decorator function. - patcher.attribute_name = None - for additional_patcher in patcher.additional_patchers: - additional_patcher.attribute_name = None - return patcher + # Import the target if it's a string. + # This is to support both object and string targets like patch.multiple. + if type(target) is str: + target = unittest.mock._importer(target) + + def decorator(func): + for attribute in attributes: + patcher = unittest.mock.patch.object(target, attribute, **kwargs) + func = patcher(func) + return func + return decorator class HashableMixin(discord.mixins.EqualityComparable): -- cgit v1.2.3 From 834bd543d1d301bb853e713560a7447dc75f1ab8 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 17:53:40 -0700 Subject: Test `is_maybe_token` returns False for missing parts In practice, this won't ever happen since the regex wouldn't match strings with missing parts. However, the function does check it so may as well test it. It's not necessarily bound to always use inputs from the regex either I suppose. --- tests/bot/cogs/test_token_remover.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index b67602eb9..9e1d96a37 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -164,6 +164,16 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): results = TOKEN_RE.findall(token) self.assertEquals(len(results), 0) + @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") + def test_is_maybe_token_missing_part_returns_false(self, valid_user, valid_time): + """False should be returned for tokens which do not have all 3 parts.""" + cog = TokenRemover(self.bot) + return_value = cog.is_maybe_token("x.y") + + self.assertFalse(return_value) + valid_user.assert_not_called() + valid_time.assert_not_called() + def test_ignores_messages_with_invalid_tokens(self): """Messages with values that are invalid tokens are ignored.""" for content in ('foo.bar.baz', 'x.y.'): -- cgit v1.2.3 From 4248f88a7407b6e9a5d80800a96f8707003634d3 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 18:07:17 -0700 Subject: Token remover: fix `is_maybe_token` returning None instead of False It's annotated as returning a bool and when the split fails it already returns False. To be consistent, it should always return a bool. --- bot/cogs/token_remover.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index e90d5ab8b..543f4c5a7 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -121,6 +121,8 @@ class TokenRemover(Cog): if cls.is_valid_user_id(user_id) and cls.is_valid_timestamp(creation_timestamp): return True + return False + @staticmethod def is_valid_user_id(b64_content: str) -> bool: """ -- cgit v1.2.3 From ab5d194b90a7e068c8ab7171939f471e252ee073 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 18:11:31 -0700 Subject: Test is_maybe_token --- tests/bot/cogs/test_token_remover.py | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 9e1d96a37..85bbbdf6b 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -174,13 +174,30 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): valid_user.assert_not_called() valid_time.assert_not_called() - def test_ignores_messages_with_invalid_tokens(self): - """Messages with values that are invalid tokens are ignored.""" - for content in ('foo.bar.baz', 'x.y.'): - with self.subTest(content=content): - self.msg.content = content - coroutine = self.cog.is_maybe_token(self.msg) - self.assertIsNone(asyncio.run(coroutine)) + @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") + def test_is_maybe_token(self, valid_user, valid_time): + """Should return True if the user ID and timestamp are valid or return False otherwise.""" + cog = TokenRemover(self.bot) + subtests = ( + (False, True, False), + (True, False, False), + (True, True, True), + ) + + for user_return, time_return, expected in subtests: + valid_user.reset_mock() + valid_time.reset_mock() + + with self.subTest(user_return=user_return, time_return=time_return, expected=expected): + valid_user.return_value = user_return + valid_time.return_value = time_return + + actual = cog.is_maybe_token("x.y.z") + self.assertIs(actual, expected) + + valid_user.assert_called_once_with("x") + if user_return: + valid_time.assert_called_once_with("y") def test_censors_valid_tokens(self): """Valid tokens are censored.""" -- cgit v1.2.3 From 4b6fde69a7e193382701dccf80a5471ea7ccea72 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 18:22:31 -0700 Subject: Test token regex matches valid tokens --- tests/bot/cogs/test_token_remover.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 85bbbdf6b..7310b4637 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -164,6 +164,27 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): results = TOKEN_RE.findall(token) self.assertEquals(len(results), 0) + def test_regex_valid_tokens(self): + """Messages that look like tokens should be matched.""" + # Don't worry, the token's been invalidated. + tokens = ( + "x1.y2.z_3", + "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8" + ) + + for token in tokens: + with self.subTest(token=token): + results = TOKEN_RE.findall(token) + self.assertIn(token, results) + + def test_regex_matches_multiple_valid(self): + """Should support multiple matches in the middle of a string.""" + tokens = ["x.y.z", "a.b.c"] + message = f"garbage {tokens[0]} hello {tokens[1]} world" + + results = TOKEN_RE.findall(message) + self.assertEquals(tokens, results) + @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") def test_is_maybe_token_missing_part_returns_false(self, valid_user, valid_time): """False should be returned for tokens which do not have all 3 parts.""" -- cgit v1.2.3 From d8d8e144adfe4c2de15dbbf4346e2eec548a9f67 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 18:28:06 -0700 Subject: Correct the return type annotation for the autospec decorator --- tests/helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index dfbe539ec..3cd8a63c0 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -4,7 +4,7 @@ import collections import itertools import logging import unittest.mock -from typing import Iterable, Optional +from typing import Callable, Iterable, Optional import discord from discord.ext.commands import Context @@ -23,7 +23,7 @@ for logger in logging.Logger.manager.loggerDict.values(): logger.setLevel(logging.CRITICAL) -def autospec(target, *attributes: str, **kwargs) -> unittest.mock._patch: +def autospec(target, *attributes: str, **kwargs) -> Callable: """Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True.""" # Caller's kwargs should take priority and overwrite the defaults. kwargs = {'spec_set': True, 'autospec': True, **kwargs} -- cgit v1.2.3 From ab860e23a7e6206e68cb350257b63083cfbe1a15 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 18:53:42 -0700 Subject: Token remover: split some of `take_action` into separate functions --- bot/cogs/token_remover.py | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 543f4c5a7..d6919839e 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -68,31 +68,41 @@ class TokenRemover(Cog): await self.on_message(after) async def take_action(self, msg: Message, found_token: str) -> None: - """Remove the `msg` containing a token an send a mod_log message.""" - user_id, creation_timestamp, hmac = found_token.split('.') + """Remove the `msg` containing the `found_token` and send a mod log message.""" self.mod_log.ignore(Event.message_delete, msg.id) - await msg.delete() - await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) + await self.delete_message(msg) - message = ( - "Censored a seemingly valid token sent by " - f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was " - f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`" - ) - log.debug(message) + log_message = self.format_log_message(msg, found_token) + log.debug(log_message) # Send pretty mod log embed to mod-alerts await self.mod_log.send_log_message( icon_url=Icons.token_removed, colour=Colour(Colours.soft_red), title="Token removed!", - text=message, + text=log_message, thumbnail=msg.author.avatar_url_as(static_format="png"), channel_id=Channels.mod_alerts, ) self.bot.stats.incr("tokens.removed_tokens") + @staticmethod + async def delete_message(msg: Message) -> None: + """Remove a `msg` containing a token and send an explanatory message in the same channel.""" + await msg.delete() + await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) + + @staticmethod + def format_log_message(msg: Message, found_token: str) -> str: + """Return the log message to send for `found_token` being censored in `msg`.""" + user_id, creation_timestamp, hmac = found_token.split('.') + return ( + "Censored a seemingly valid token sent by " + f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was " + f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`" + ) + @classmethod def find_token_in_message(cls, msg: Message) -> t.Optional[str]: """Return a seemingly valid token found in `msg` or `None` if no token is found.""" -- cgit v1.2.3 From 09a6c2e211c0f209b258a02d9677240282c4fab3 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sun, 10 May 2020 18:55:24 -0700 Subject: Token remover: use a string template for the log message --- bot/cogs/token_remover.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index d6919839e..c576a67d0 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -16,6 +16,10 @@ from bot.constants import Channels, Colours, Event, Icons log = logging.getLogger(__name__) +LOG_MESSAGE = ( + "Censored a seemingly valid token sent by {author} (`{author_id}`) in {channel}," + "token was `{user_id}.{timestamp}.{hmac}`" +) DELETION_MESSAGE_TEMPLATE = ( "Hey {mention}! I noticed you posted a seemingly valid Discord API " "token in your message and have removed your message. " @@ -97,10 +101,13 @@ class TokenRemover(Cog): def format_log_message(msg: Message, found_token: str) -> str: """Return the log message to send for `found_token` being censored in `msg`.""" user_id, creation_timestamp, hmac = found_token.split('.') - return ( - "Censored a seemingly valid token sent by " - f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was " - f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`" + return LOG_MESSAGE.format( + author=msg.author, + author_id=msg.author.id, + channel=msg.channel.mention, + user_id=user_id, + timestamp=creation_timestamp, + hmac='x' * len(hmac), ) @classmethod -- cgit v1.2.3 From 5b9bf9aba686f570322cb9996dd35d3ab669a162 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 10:26:16 -0700 Subject: Avoid instantiating the cog when testing static/class methods --- tests/bot/cogs/test_token_remover.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 7310b4637..6a8247070 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -93,10 +93,9 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): @autospec("bot.cogs.token_remover", "TOKEN_RE") def test_find_token_ignores_bot_messages(self, token_re): """The token finder should ignore messages authored by bots.""" - cog = TokenRemover(self.bot) self.msg.author.bot = True - return_value = cog.find_token_in_message(self.msg) + return_value = TokenRemover.find_token_in_message(self.msg) self.assertIsNone(return_value) token_re.findall.assert_not_called() @@ -105,11 +104,10 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): @autospec("bot.cogs.token_remover", "TOKEN_RE") def test_find_token_no_matches_returns_none(self, token_re, is_maybe_token): """None should be returned if the regex matches no tokens in a message.""" - cog = TokenRemover(self.bot) token_re.findall.return_value = () self.msg.content = "foobar" - return_value = cog.find_token_in_message(self.msg) + return_value = TokenRemover.find_token_in_message(self.msg) self.assertIsNone(return_value) token_re.findall.assert_called_once_with(self.msg.content) @@ -124,12 +122,11 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): side_effects = [False] * len(matches) side_effects[true_index] = True - cog = TokenRemover(self.bot) self.msg.content = "foobar" token_re.findall.return_value = matches is_maybe_token.side_effect = side_effects - return_value = cog.find_token_in_message(self.msg) + return_value = TokenRemover.find_token_in_message(self.msg) self.assertEqual(return_value, matches[true_index]) token_re.findall.assert_called_once_with(self.msg.content) @@ -188,8 +185,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") def test_is_maybe_token_missing_part_returns_false(self, valid_user, valid_time): """False should be returned for tokens which do not have all 3 parts.""" - cog = TokenRemover(self.bot) - return_value = cog.is_maybe_token("x.y") + return_value = TokenRemover.is_maybe_token("x.y") self.assertFalse(return_value) valid_user.assert_not_called() @@ -198,7 +194,6 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") def test_is_maybe_token(self, valid_user, valid_time): """Should return True if the user ID and timestamp are valid or return False otherwise.""" - cog = TokenRemover(self.bot) subtests = ( (False, True, False), (True, False, False), @@ -213,7 +208,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): valid_user.return_value = user_return valid_time.return_value = time_return - actual = cog.is_maybe_token("x.y.z") + actual = TokenRemover.is_maybe_token("x.y.z") self.assertIs(actual, expected) valid_user.assert_called_once_with("x") -- cgit v1.2.3 From 2127239840085ba523d411899e0b7a188530df07 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 10:33:05 -0700 Subject: Simplify token remover's message mock * Rely on default values for the author * Set the content to a non-empty string --- tests/bot/cogs/test_token_remover.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 6a8247070..5ca863926 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -26,14 +26,10 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.bot.get_cog.return_value.send_log_message = AsyncMock() self.cog = TokenRemover(bot=self.bot) - self.msg = MockMessage(id=555, content='') - self.msg.author.__str__ = MagicMock() - self.msg.author.__str__.return_value = 'lemon' - self.msg.author.bot = False - self.msg.author.avatar_url_as.return_value = 'picture-lemon.png' - self.msg.author.id = 42 - self.msg.author.mention = '@lemon' + self.msg = MockMessage(id=555, content="hello world") self.msg.channel.mention = "#lemonade-stand" + self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) + self.msg.author.avatar_url_as.return_value = "picture-lemon.png" def test_is_valid_user_id_is_true_for_numeric_content(self): """A string decoding to numeric characters is a valid user ID.""" @@ -105,7 +101,6 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): def test_find_token_no_matches_returns_none(self, token_re, is_maybe_token): """None should be returned if the regex matches no tokens in a message.""" token_re.findall.return_value = () - self.msg.content = "foobar" return_value = TokenRemover.find_token_in_message(self.msg) @@ -122,7 +117,6 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): side_effects = [False] * len(matches) side_effects[true_index] = True - self.msg.content = "foobar" token_re.findall.return_value = matches is_maybe_token.side_effect = side_effects -- cgit v1.2.3 From e4790b330da1605573b5d23615bfe62b481e1e04 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 10:37:59 -0700 Subject: Test token remover's message deletion --- tests/bot/cogs/test_token_remover.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 5ca863926..d65ce2ce5 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -209,6 +209,15 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): if user_return: valid_time.assert_called_once_with("y") + async def test_delete_message(self): + """The message should be deleted, and a message should be sent to the same channel.""" + await TokenRemover.delete_message(self.msg) + + self.msg.delete.assert_called_once_with() + self.msg.channel.send.assert_called_once_with( + DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) + ) + def test_censors_valid_tokens(self): """Valid tokens are censored.""" cases = ( -- cgit v1.2.3 From 567a5f9242912d6a3340c088c0ae1a62977a141e Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 10:46:02 -0700 Subject: Test TokenRemover.format_log_message --- tests/bot/cogs/test_token_remover.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index d65ce2ce5..f5412e692 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -218,6 +218,22 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) ) + @autospec("bot.cogs.token_remover", "LOG_MESSAGE") + async def test_format_log_message(self, log_message): + """Should correctly format the log message with info from the message and token.""" + log_message.format.return_value = "Howdy" + return_value = TokenRemover.format_log_message(self.msg, "MTIz.DN9R_A.xyz") + + self.assertEqual(return_value, log_message.format.return_value) + log_message.format.assert_called_once_with( + author=self.msg.author, + author_id=self.msg.author.id, + channel=self.msg.channel.mention, + user_id="MTIz", + timestamp="DN9R_A", + hmac="xxx", + ) + def test_censors_valid_tokens(self): """Valid tokens are censored.""" cases = ( -- cgit v1.2.3 From f47cbef0b47ef11b8c1fd63076105e4cb7d73601 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 11:29:28 -0700 Subject: Test TokenRemover.take_action * Remove `bot.get_cog` mocks in `setUp` * Mock the logger cause it's easier to assert logs * Remove subtests * Assert helper functions were called * Create an autospec for ModLog --- tests/bot/cogs/test_token_remover.py | 73 +++++++++++++++--------------------- 1 file changed, 30 insertions(+), 43 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index f5412e692..3546e7964 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -1,11 +1,10 @@ -import asyncio -import logging import unittest from unittest import mock -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import MagicMock from discord import Colour +from bot.cogs.moderation import ModLog from bot.cogs.token_remover import ( DELETION_MESSAGE_TEMPLATE, TOKEN_RE, @@ -22,8 +21,6 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): def setUp(self): """Adds the cog, a bot, and a message to the instance for usage in tests.""" self.bot = MockBot() - self.bot.get_cog.return_value = MagicMock() - self.bot.get_cog.return_value.send_log_message = AsyncMock() self.cog = TokenRemover(bot=self.bot) self.msg = MockMessage(id=555, content="hello world") @@ -234,46 +231,36 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): hmac="xxx", ) - def test_censors_valid_tokens(self): - """Valid tokens are censored.""" - cases = ( - # (content, censored_token) - ('MTIz.DN9R_A.xyz', 'MTIz.DN9R_A.xxx'), + @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) + @autospec("bot.cogs.token_remover", "log") + @autospec(TokenRemover, "delete_message", "format_log_message") + async def test_take_action(self, delete_message, format_log_message, logger, mod_log_property): + """Should delete the message and send a mod log.""" + cog = TokenRemover(self.bot) + mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True) + token = "MTIz.DN9R_A.xyz" + log_msg = "testing123" + + mod_log_property.return_value = mod_log + format_log_message.return_value = log_msg + + await cog.take_action(self.msg, token) + + delete_message.assert_awaited_once_with(self.msg) + format_log_message.assert_called_once_with(self.msg, token) + logger.debug.assert_called_with(log_msg) + self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens") + + mod_log.ignore.assert_called_once_with(Event.message_delete, self.msg.id) + mod_log.send_log_message.assert_called_once_with( + icon_url=Icons.token_removed, + colour=Colour(Colours.soft_red), + title="Token removed!", + text=log_msg, + thumbnail=self.msg.author.avatar_url_as.return_value, + channel_id=Channels.mod_alerts ) - for content, censored_token in cases: - with self.subTest(content=content, censored_token=censored_token): - self.msg.content = content - coroutine = self.cog.on_message(self.msg) - with self.assertLogs(logger='bot.cogs.token_remover', level=logging.DEBUG) as cm: - self.assertIsNone(asyncio.run(coroutine)) # no return value - - [line] = cm.output - log_message = ( - "Censored a seemingly valid token sent by " - "lemon (`42`) in #lemonade-stand, " - f"token was `{censored_token}`" - ) - self.assertIn(log_message, line) - - self.msg.delete.assert_called_once_with() - self.msg.channel.send.assert_called_once_with( - DELETION_MESSAGE_TEMPLATE.format(mention='@lemon') - ) - self.bot.get_cog.assert_called_with('ModLog') - self.msg.author.avatar_url_as.assert_called_once_with(static_format='png') - - mod_log = self.bot.get_cog.return_value - mod_log.ignore.assert_called_once_with(Event.message_delete, self.msg.id) - mod_log.send_log_message.assert_called_once_with( - icon_url=Icons.token_removed, - colour=Colour(Colours.soft_red), - title="Token removed!", - text=log_message, - thumbnail='picture-lemon.png', - channel_id=Channels.mod_alerts - ) - class TokenRemoverSetupTests(unittest.TestCase): """Tests setup of the `TokenRemover` cog.""" -- cgit v1.2.3 From 5734a4d84922a9497014dfeb3eba31ad3c57536f Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 11:44:08 -0700 Subject: Refactor `TokenRemoverSetupTests` and add a more thorough test The test now ensures the cog is instantiated and that the instance is passed as an argument to `add_cog`. --- tests/bot/cogs/test_token_remover.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 3546e7964..c377de7b2 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -262,11 +262,15 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): ) -class TokenRemoverSetupTests(unittest.TestCase): - """Tests setup of the `TokenRemover` cog.""" +class TokenRemoverExtensionTests(unittest.TestCase): + """Tests for the token_remover extension.""" - def test_setup(self): - """Setup of the extension should call add_cog.""" + @autospec("bot.cogs.token_remover", "TokenRemover") + def test_extension_setup(self, cog): + """The TokenRemover cog should be added.""" bot = MockBot() setup_cog(bot) + + cog.assert_called_once_with(bot) bot.add_cog.assert_called_once() + self.assertTrue(isinstance(bot.add_cog.call_args.args[0], TokenRemover)) -- cgit v1.2.3 From d0303d715d485842a2d5c906099d767d74cf8bd8 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 11:45:50 -0700 Subject: Replace deprecated assertion methods --- tests/bot/cogs/test_token_remover.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index c377de7b2..aecb51403 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -150,7 +150,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): for token in tokens: with self.subTest(token=token): results = TOKEN_RE.findall(token) - self.assertEquals(len(results), 0) + self.assertEqual(len(results), 0) def test_regex_valid_tokens(self): """Messages that look like tokens should be matched.""" @@ -171,7 +171,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): message = f"garbage {tokens[0]} hello {tokens[1]} world" results = TOKEN_RE.findall(message) - self.assertEquals(tokens, results) + self.assertEqual(tokens, results) @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") def test_is_maybe_token_missing_part_returns_false(self, valid_user, valid_time): -- cgit v1.2.3 From 862153f2e4ab5b1408719fb2c1abc5143cfb15ce Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 11:47:40 -0700 Subject: Clean up token remover test imports --- tests/bot/cogs/test_token_remover.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index aecb51403..5cc8c7ad1 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -4,14 +4,10 @@ from unittest.mock import MagicMock from discord import Colour +from bot import constants +from bot.cogs import token_remover from bot.cogs.moderation import ModLog -from bot.cogs.token_remover import ( - DELETION_MESSAGE_TEMPLATE, - TOKEN_RE, - TokenRemover, - setup as setup_cog, -) -from bot.constants import Channels, Colours, Event, Icons +from bot.cogs.token_remover import TokenRemover from tests.helpers import MockBot, MockMessage, autospec @@ -149,7 +145,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): for token in tokens: with self.subTest(token=token): - results = TOKEN_RE.findall(token) + results = token_remover.TOKEN_RE.findall(token) self.assertEqual(len(results), 0) def test_regex_valid_tokens(self): @@ -162,7 +158,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): for token in tokens: with self.subTest(token=token): - results = TOKEN_RE.findall(token) + results = token_remover.TOKEN_RE.findall(token) self.assertIn(token, results) def test_regex_matches_multiple_valid(self): @@ -170,7 +166,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): tokens = ["x.y.z", "a.b.c"] message = f"garbage {tokens[0]} hello {tokens[1]} world" - results = TOKEN_RE.findall(message) + results = token_remover.TOKEN_RE.findall(message) self.assertEqual(tokens, results) @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") @@ -212,7 +208,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.msg.delete.assert_called_once_with() self.msg.channel.send.assert_called_once_with( - DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) + token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) ) @autospec("bot.cogs.token_remover", "LOG_MESSAGE") @@ -251,14 +247,14 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): logger.debug.assert_called_with(log_msg) self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens") - mod_log.ignore.assert_called_once_with(Event.message_delete, self.msg.id) + mod_log.ignore.assert_called_once_with(constants.Event.message_delete, self.msg.id) mod_log.send_log_message.assert_called_once_with( - icon_url=Icons.token_removed, - colour=Colour(Colours.soft_red), + icon_url=constants.Icons.token_removed, + colour=Colour(constants.Colours.soft_red), title="Token removed!", text=log_msg, thumbnail=self.msg.author.avatar_url_as.return_value, - channel_id=Channels.mod_alerts + channel_id=constants.Channels.mod_alerts ) @@ -269,7 +265,7 @@ class TokenRemoverExtensionTests(unittest.TestCase): def test_extension_setup(self, cog): """The TokenRemover cog should be added.""" bot = MockBot() - setup_cog(bot) + token_remover.setup(bot) cog.assert_called_once_with(bot) bot.add_cog.assert_called_once() -- cgit v1.2.3 From 4701b0da36c7f42792c0af258b785076237fd661 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 11 May 2020 11:56:15 -0700 Subject: Use subtests for valid ID/timestamp tests and test non-ASCII inputs --- tests/bot/cogs/test_token_remover.py | 43 +++++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 5cc8c7ad1..f1a56c235 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -24,24 +24,31 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) self.msg.author.avatar_url_as.return_value = "picture-lemon.png" - def test_is_valid_user_id_is_true_for_numeric_content(self): - """A string decoding to numeric characters is a valid user ID.""" - # MTIz = base64(123) - self.assertTrue(TokenRemover.is_valid_user_id('MTIz')) - - def test_is_valid_user_id_is_false_for_alphabetic_content(self): - """A string decoding to alphabetic characters is not a valid user ID.""" - # YWJj = base64(abc) - self.assertFalse(TokenRemover.is_valid_user_id('YWJj')) - - def test_is_valid_timestamp_is_true_for_valid_timestamps(self): - """A string decoding to a valid timestamp should be recognized as such.""" - self.assertTrue(TokenRemover.is_valid_timestamp('DN9r_A')) - - def test_is_valid_timestamp_is_false_for_invalid_values(self): - """A string not decoding to a valid timestamp should not be recognized as such.""" - # MTIz = base64(123) - self.assertFalse(TokenRemover.is_valid_timestamp('MTIz')) + def test_is_valid_user_id(self): + """Should correctly discern valid user IDs and ignore non-numeric and non-ASCII IDs.""" + subtests = ( + ("MTIz", True), # base64(123) + ("YWJj", False), # base64(abc) + ("λδµ", False), + ) + + for user_id, is_valid in subtests: + with self.subTest(user_id=user_id, is_valid=is_valid): + result = TokenRemover.is_valid_user_id(user_id) + self.assertIs(result, is_valid) + + def test_is_valid_timestamp(self): + """Should correctly discern valid timestamps.""" + subtests = ( + ("DN9r_A", True), + ("MTIz", False), # base64(123) + ("λδµ", False), + ) + + for timestamp, is_valid in subtests: + with self.subTest(timestamp=timestamp, is_valid=is_valid): + result = TokenRemover.is_valid_timestamp(timestamp) + self.assertIs(result, is_valid) def test_mod_log_property(self): """The `mod_log` property should ask the bot to return the `ModLog` cog.""" -- cgit v1.2.3 From 31aff51655d3783bc70f04628f189cf3c3591028 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Wed, 13 May 2020 18:58:43 -0700 Subject: Fix a test needlessly being a coroutine --- tests/bot/cogs/test_token_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index f1a56c235..8e743a715 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -219,7 +219,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): ) @autospec("bot.cogs.token_remover", "LOG_MESSAGE") - async def test_format_log_message(self, log_message): + def test_format_log_message(self, log_message): """Should correctly format the log message with info from the message and token.""" log_message.format.return_value = "Howdy" return_value = TokenRemover.format_log_message(self.msg, "MTIz.DN9R_A.xyz") -- cgit v1.2.3 From ab44bb38d874dfdec9d7dc61bbf13b06144b9a0e Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Wed, 13 May 2020 19:18:50 -0700 Subject: Add missing comma to token remover log message --- bot/cogs/token_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index c576a67d0..c57e7764e 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -17,7 +17,7 @@ from bot.constants import Channels, Colours, Event, Icons log = logging.getLogger(__name__) LOG_MESSAGE = ( - "Censored a seemingly valid token sent by {author} (`{author_id}`) in {channel}," + "Censored a seemingly valid token sent by {author} (`{author_id}`) in {channel}, " "token was `{user_id}.{timestamp}.{hmac}`" ) DELETION_MESSAGE_TEMPLATE = ( -- cgit v1.2.3 From 297089cde278ea09a27240f71f41006fab2b2ca4 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Wed, 13 May 2020 19:36:44 -0700 Subject: Token remover: add logs to clarify why token is invalid --- bot/cogs/token_remover.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index c57e7764e..244d52edb 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -133,12 +133,14 @@ class TokenRemover(Cog): try: user_id, creation_timestamp, hmac = test_str.split('.') except ValueError: + log.debug(f"Invalid token format in '{test_str}': does not have all 3 parts.") return False if cls.is_valid_user_id(user_id) and cls.is_valid_timestamp(creation_timestamp): return True - - return False + else: + log.debug(f"Invalid user ID or timestamp in '{test_str}'.") + return False @staticmethod def is_valid_user_id(b64_content: str) -> bool: -- cgit v1.2.3 From 73bcb2b434a30761494bbedd914508964c6fbbad Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 14 May 2020 10:34:37 -0700 Subject: Token remover: fix timestamp check The timestamp calculation was incorrect. The bytes need to be interpreted as big-endian and the result is just a timestamp rather than a snowflake. --- bot/cogs/token_remover.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 244d52edb..957c8a690 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -2,13 +2,10 @@ import base64 import binascii import logging import re -import struct import typing as t -from datetime import datetime from discord import Colour, Message from discord.ext.commands import Cog -from discord.utils import snowflake_time from bot.bot import Bot from bot.cogs.moderation import ModLog @@ -29,7 +26,7 @@ DELETION_MESSAGE_TEMPLATE = ( "Feel free to re-post it with the token removed. " "If you believe this was a mistake, please let us know!" ) -DISCORD_EPOCH_TIMESTAMP = datetime(2017, 1, 1) +DISCORD_EPOCH = 1_420_070_400_000 TOKEN_EPOCH = 1_293_840_000 TOKEN_RE = re.compile( r"[^\s\.()\"']+" # Matches token part 1: The user ID string, encoded as base64 @@ -160,18 +157,27 @@ class TokenRemover(Cog): @staticmethod def is_valid_timestamp(b64_content: str) -> bool: """ - Check potential token to see if it contains a valid timestamp. + Return True if `b64_content` decodes to a valid timestamp. - See: https://discordapp.com/developers/docs/reference#snowflakes + If the timestamp is greater than the Discord epoch, it's probably valid. + See: https://i.imgur.com/7WdehGn.png """ b64_content += '=' * (-len(b64_content) % 4) try: - content = base64.urlsafe_b64decode(b64_content) - snowflake = struct.unpack('i', content)[0] - except (binascii.Error, struct.error, ValueError): + decoded_bytes = base64.urlsafe_b64decode(b64_content) + timestamp = int.from_bytes(decoded_bytes, byteorder="big") + except (binascii.Error, ValueError) as e: + log.debug(f"Failed to decode token timestamp '{b64_content}': {e}") + return False + + # Seems like newer tokens don't need the epoch added, but add anyway since an upper bound + # is not checked. + if timestamp + TOKEN_EPOCH >= DISCORD_EPOCH: + return True + else: + log.debug(f"Invalid token timestamp '{b64_content}': smaller than Discord epoch") return False - return snowflake_time(snowflake + TOKEN_EPOCH) < DISCORD_EPOCH_TIMESTAMP def setup(bot: Bot) -> None: -- cgit v1.2.3 From 4a73c24678d4a893304f0b2f3a5f1e326cae817a Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Fri, 15 May 2020 08:54:36 -0700 Subject: Token remover: use strict check for digits in token ID `isnumeric` would be true for a wide range of characters in Unicode, but the ID must only consist of the characters 0-9 (ASCII digits). In fact, `isdigit` on its own would also match other Unicode characters too. --- bot/cogs/token_remover.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 957c8a690..43c12c4f7 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -149,8 +149,11 @@ class TokenRemover(Cog): b64_content += '=' * (-len(b64_content) % 4) try: - content: bytes = base64.b64decode(b64_content) - return content.decode('utf-8').isnumeric() + decoded_bytes: bytes = base64.b64decode(b64_content) + string = decoded_bytes.decode('utf-8') + + # isdigit on its own would match a lot of other Unicode characters, hence the isascii. + return string.isascii() and string.isdigit() except (binascii.Error, ValueError): return False -- cgit v1.2.3 From ad154f7f0d7daa3f962433f77d1cdd11cc66bfe0 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 16 May 2020 22:43:00 -0700 Subject: Add a utility function to pad base64 data --- bot/cogs/token_remover.py | 5 +++-- bot/utils/__init__.py | 5 +++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 43c12c4f7..cae482e6e 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -7,6 +7,7 @@ import typing as t from discord import Colour, Message from discord.ext.commands import Cog +from bot import utils from bot.bot import Bot from bot.cogs.moderation import ModLog from bot.constants import Channels, Colours, Event, Icons @@ -146,7 +147,7 @@ class TokenRemover(Cog): See: https://discordapp.com/developers/docs/reference#snowflakes """ - b64_content += '=' * (-len(b64_content) % 4) + b64_content = utils.pad_base64(b64_content) try: decoded_bytes: bytes = base64.b64decode(b64_content) @@ -165,7 +166,7 @@ class TokenRemover(Cog): If the timestamp is greater than the Discord epoch, it's probably valid. See: https://i.imgur.com/7WdehGn.png """ - b64_content += '=' * (-len(b64_content) % 4) + b64_content = utils.pad_base64(b64_content) try: decoded_bytes = base64.urlsafe_b64decode(b64_content) diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py index 9b32e515d..1dd0636df 100644 --- a/bot/utils/__init__.py +++ b/bot/utils/__init__.py @@ -7,3 +7,8 @@ class CogABCMeta(CogMeta, ABCMeta): """Metaclass for ABCs meant to be implemented as Cogs.""" pass + + +def pad_base64(data: str) -> str: + """Return base64 `data` with padding characters to ensure its length is a multiple of 4.""" + return data + "=" * (-len(data) % 4) -- cgit v1.2.3 From e23aa887959059e17fc21dcab9c83db20dc987f5 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 21 May 2020 20:29:44 -0700 Subject: Token remover: decode ID using URL-safe base64 Though I've not seen an ID with neither + and \ nor - and _, given that the timestamp uses URL-safe encoding, the ID probably does too. --- bot/cogs/token_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index cae482e6e..5b4598959 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -150,7 +150,7 @@ class TokenRemover(Cog): b64_content = utils.pad_base64(b64_content) try: - decoded_bytes: bytes = base64.b64decode(b64_content) + decoded_bytes = base64.urlsafe_b64decode(b64_content) string = decoded_bytes.decode('utf-8') # isdigit on its own would match a lot of other Unicode characters, hence the isascii. -- cgit v1.2.3 From 95ef2dc01143902289c9aacde7969fb5c9e1a85c Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 21 May 2020 21:34:10 -0700 Subject: Token remover: match only base64 in regex Making the regex more accurate reduces false positives at an earlier stage. There's no benefit to matching non-base64 as that would just be weeded out as invalid at a later stage anyway when it tries to decode it. --- bot/cogs/token_remover.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 5b4598959..fa0647828 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -29,13 +29,12 @@ DELETION_MESSAGE_TEMPLATE = ( ) DISCORD_EPOCH = 1_420_070_400_000 TOKEN_EPOCH = 1_293_840_000 -TOKEN_RE = re.compile( - r"[^\s\.()\"']+" # Matches token part 1: The user ID string, encoded as base64 - r"\." # Matches a literal dot between the token parts - r"[^\s\.()\"']+" # Matches token part 2: The creation timestamp, as an integer - r"\." # Matches a literal dot between the token parts - r"[^\s\.()\"']+" # Matches token part 3: The HMAC, unused by us, but check that it isn't empty -) + +# Three parts delimited by dots: user ID, creation timestamp, HMAC. +# The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string. +# Each part only matches base64 URL-safe characters. +# Padding has never been observed, but the padding character '=' is matched just in case. +TOKEN_RE = re.compile(r"[\w-=]+\.[\w-=]+\.[\w-=]+", re.ASCII) class TokenRemover(Cog): -- cgit v1.2.3 From 2c7ff94c956691dafa35c92dd0baa95a60aafacf Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Sat, 23 May 2020 18:02:39 -0700 Subject: Token remover: escape dashes in regex They need to be escaped when they're in a character set. By default, they are interpreted as part of the character range syntax. --- bot/cogs/token_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index fa0647828..f23eba89b 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -34,7 +34,7 @@ TOKEN_EPOCH = 1_293_840_000 # The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string. # Each part only matches base64 URL-safe characters. # Padding has never been observed, but the padding character '=' is matched just in case. -TOKEN_RE = re.compile(r"[\w-=]+\.[\w-=]+\.[\w-=]+", re.ASCII) +TOKEN_RE = re.compile(r"[\w\-=]+\.[\w\-=]+\.[\w\-=]+", re.ASCII) class TokenRemover(Cog): -- cgit v1.2.3 From 161bf818ed0f1690c63f4f54cc9549e298e3e45c Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 19:45:04 -0700 Subject: Token remover: use regex groups and pass the token as a NamedTuple It felt redundant to be splitting the token in two different functions when regex could take care of this from the outset. ' A NamedTuple was created to house the token. This is nicer than passing an re.Match object, because it's clearer which attributes are available. Even if the regex used named groups, it wouldn't be as obvious which group names exist. Without the split, `is_maybe_token` is dwindled down to a redundant function. Therefore, it's been removed. --- bot/cogs/token_remover.py | 47 ++++++++++++++++++++--------------------------- 1 file changed, 20 insertions(+), 27 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index f23eba89b..e5d0ae838 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -34,7 +34,15 @@ TOKEN_EPOCH = 1_293_840_000 # The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string. # Each part only matches base64 URL-safe characters. # Padding has never been observed, but the padding character '=' is matched just in case. -TOKEN_RE = re.compile(r"[\w\-=]+\.[\w\-=]+\.[\w\-=]+", re.ASCII) +TOKEN_RE = re.compile(r"([\w\-=]+)\.([\w\-=]+)\.([\w\-=]+)", re.ASCII) + + +class Token(t.NamedTuple): + """A Discord Bot token.""" + + user_id: str + timestamp: str + hmac: str class TokenRemover(Cog): @@ -68,7 +76,7 @@ class TokenRemover(Cog): """ await self.on_message(after) - async def take_action(self, msg: Message, found_token: str) -> None: + async def take_action(self, msg: Message, found_token: Token) -> None: """Remove the `msg` containing the `found_token` and send a mod log message.""" self.mod_log.ignore(Event.message_delete, msg.id) await self.delete_message(msg) @@ -95,20 +103,19 @@ class TokenRemover(Cog): await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) @staticmethod - def format_log_message(msg: Message, found_token: str) -> str: - """Return the log message to send for `found_token` being censored in `msg`.""" - user_id, creation_timestamp, hmac = found_token.split('.') + def format_log_message(msg: Message, token: Token) -> str: + """Return the log message to send for `token` being censored in `msg`.""" return LOG_MESSAGE.format( author=msg.author, author_id=msg.author.id, channel=msg.channel.mention, - user_id=user_id, - timestamp=creation_timestamp, - hmac='x' * len(hmac), + user_id=token.user_id, + timestamp=token.timestamp, + hmac='x' * len(token.hmac), ) @classmethod - def find_token_in_message(cls, msg: Message) -> t.Optional[str]: + def find_token_in_message(cls, msg: Message) -> t.Optional[Token]: """Return a seemingly valid token found in `msg` or `None` if no token is found.""" if msg.author.bot: return @@ -116,29 +123,15 @@ class TokenRemover(Cog): # Use findall rather than search to guard against method calls prematurely returning the # token check (e.g. `message.channel.send` also matches our token pattern) maybe_matches = TOKEN_RE.findall(msg.content) - for substr in maybe_matches: - if cls.is_maybe_token(substr): + for match_groups in maybe_matches: + token = Token(*match_groups) + if cls.is_valid_user_id(token.user_id) and cls.is_valid_timestamp(token.timestamp): # Short-circuit on first match - return substr + return token # No matching substring return - @classmethod - def is_maybe_token(cls, test_str: str) -> bool: - """Check the provided string to see if it is a seemingly valid token.""" - try: - user_id, creation_timestamp, hmac = test_str.split('.') - except ValueError: - log.debug(f"Invalid token format in '{test_str}': does not have all 3 parts.") - return False - - if cls.is_valid_user_id(user_id) and cls.is_valid_timestamp(creation_timestamp): - return True - else: - log.debug(f"Invalid user ID or timestamp in '{test_str}'.") - return False - @staticmethod def is_valid_user_id(b64_content: str) -> bool: """ -- cgit v1.2.3 From bfe79efdfe699bf7289cba9db95d5637a7fb965a Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 19:46:51 -0700 Subject: Token remover: use finditer instead of findall It makes more sense to use the lazy function when the loop is already short-circuiting on the first valid token it finds. --- bot/cogs/token_remover.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index e5d0ae838..8913ca64d 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -120,11 +120,10 @@ class TokenRemover(Cog): if msg.author.bot: return - # Use findall rather than search to guard against method calls prematurely returning the + # Use finditer rather than search to guard against method calls prematurely returning the # token check (e.g. `message.channel.send` also matches our token pattern) - maybe_matches = TOKEN_RE.findall(msg.content) - for match_groups in maybe_matches: - token = Token(*match_groups) + for match in TOKEN_RE.finditer(msg.content): + token = Token(*match.groups()) if cls.is_valid_user_id(token.user_id) and cls.is_valid_timestamp(token.timestamp): # Short-circuit on first match return token -- cgit v1.2.3 From 5386eda1731bb8eae287c20ed70a76399db2ae0e Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 19:55:23 -0700 Subject: Token remover: specify Discord epoch in seconds The timestamp in the token is in seconds and is being compared against the epoch. To make life easier, they should use the same unit. Previously, the epoch was in milliseconds. --- bot/cogs/token_remover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 8913ca64d..46329e207 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -27,7 +27,7 @@ DELETION_MESSAGE_TEMPLATE = ( "Feel free to re-post it with the token removed. " "If you believe this was a mistake, please let us know!" ) -DISCORD_EPOCH = 1_420_070_400_000 +DISCORD_EPOCH = 1_420_070_400 TOKEN_EPOCH = 1_293_840_000 # Three parts delimited by dots: user ID, creation timestamp, HMAC. -- cgit v1.2.3 From 47886501fb7d030f1cb91c69413058e3ffcb76bf Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 20:47:32 -0700 Subject: Test token regex won't match non-base64 characters --- tests/bot/cogs/test_token_remover.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 8e743a715..dbea5ad1b 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -144,10 +144,9 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): "x..z", " . . ", "\n.\n.\n", - "'.'.'", - '"."."', - "(.(.(", - ").).)" + "hellö.world.bye", + "base64.nötbåse64.morebase64", + "19jd3J.dfkm3d.€víł§tüff", ) for token in tokens: -- cgit v1.2.3 From e76099d48b9a895c48e58c5f5489886f4191eeb6 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 20:50:30 -0700 Subject: Add more valid tokens to test the regex with --- tests/bot/cogs/test_token_remover.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index dbea5ad1b..6a280f358 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -156,10 +156,12 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): def test_regex_valid_tokens(self): """Messages that look like tokens should be matched.""" - # Don't worry, the token's been invalidated. + # Don't worry, these tokens have been invalidated. tokens = ( - "x1.y2.z_3", - "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8" + "NDcyMjY1OTQzMDYy_DEzMz-y.XsyRkw.VXmErH7j511turNpfURmb0rVNm8", + "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8", + "NDc1MDczNjI5Mzk5NTQ3OTA0.XsyR-w.sJf6omBPORBPju3WJEIAcwW9Zds", + "NDY3MjIzMjMwNjUwNzc3NjQx.XsySD_.s45jqDV_Iisn-symw0yDRrk_jf4", ) for token in tokens: -- cgit v1.2.3 From a8a216d0803b67a330ae092a17bea563f5012275 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 21:02:24 -0700 Subject: Fix valid token regex test It was broken due to the addition of groups. Rather than returning the full match, `findall` returns groups if any exist. The test was comparing a tuple of groups to the token string, which was of course failing. Now `fullmatch` is used cause it's simpler - just check for `None` and don't worry about iterating matches to search. --- tests/bot/cogs/test_token_remover.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 6a280f358..518bf91ca 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -166,8 +166,8 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): for token in tokens: with self.subTest(token=token): - results = token_remover.TOKEN_RE.findall(token) - self.assertIn(token, results) + results = token_remover.TOKEN_RE.fullmatch(token) + self.assertIsNotNone(results, f"{token} was not matched by the regex") def test_regex_matches_multiple_valid(self): """Should support multiple matches in the middle of a string.""" -- cgit v1.2.3 From 19cc849d4c70bc3e792460ad712aa308fa500462 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 21:07:21 -0700 Subject: Fix multiple match text for token regex It has to account for the addition of groups. It's easiest to compare the entire string so `finditer` is used to return re.Match objects; the tuples of `findall` would be cumbersome. Also threw in a change to use `assertCountEqual` cause the order doesn't really matter. --- tests/bot/cogs/test_token_remover.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 518bf91ca..2ecfae2bd 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -174,8 +174,9 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): tokens = ["x.y.z", "a.b.c"] message = f"garbage {tokens[0]} hello {tokens[1]} world" - results = token_remover.TOKEN_RE.findall(message) - self.assertEqual(tokens, results) + results = token_remover.TOKEN_RE.finditer(message) + results = [match[0] for match in results] + self.assertCountEqual(tokens, results) @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") def test_is_maybe_token_missing_part_returns_false(self, valid_user, valid_time): -- cgit v1.2.3 From 300f8c093edea03855d94be179c64c328ec842ac Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Mon, 25 May 2020 21:09:04 -0700 Subject: Use real token values for testing multiple matches in regex --- tests/bot/cogs/test_token_remover.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 2ecfae2bd..971bc93fc 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -171,12 +171,13 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): def test_regex_matches_multiple_valid(self): """Should support multiple matches in the middle of a string.""" - tokens = ["x.y.z", "a.b.c"] - message = f"garbage {tokens[0]} hello {tokens[1]} world" + token_1 = "NDY3MjIzMjMwNjUwNzc3NjQx.XsyWGg.uFNEQPCc4ePwGh7egG8UicQssz8" + token_2 = "NDcyMjY1OTQzMDYyNDEzMzMy.XsyWMw.l8XPnDqb0lp-EiQ2g_0xVFT1pyc" + message = f"garbage {token_1} hello {token_2} world" results = token_remover.TOKEN_RE.finditer(message) results = [match[0] for match in results] - self.assertCountEqual(tokens, results) + self.assertCountEqual((token_1, token_2), results) @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") def test_is_maybe_token_missing_part_returns_false(self, valid_user, valid_time): -- cgit v1.2.3 From 96db6087254c957fcb8fb45aad7ffcddb46ee839 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Wed, 27 May 2020 17:08:18 -0700 Subject: Switch findall to finditer in assertions `find_token_in_message` now uses the latter so the tests should adjust accordingly. --- tests/bot/cogs/test_token_remover.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 971bc93fc..4fff3ab33 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -94,18 +94,18 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): return_value = TokenRemover.find_token_in_message(self.msg) self.assertIsNone(return_value) - token_re.findall.assert_not_called() + token_re.finditer.assert_not_called() @autospec(TokenRemover, "is_maybe_token") @autospec("bot.cogs.token_remover", "TOKEN_RE") def test_find_token_no_matches_returns_none(self, token_re, is_maybe_token): """None should be returned if the regex matches no tokens in a message.""" - token_re.findall.return_value = () + token_re.finditer.return_value = () return_value = TokenRemover.find_token_in_message(self.msg) self.assertIsNone(return_value) - token_re.findall.assert_called_once_with(self.msg.content) + token_re.finditer.assert_called_once_with(self.msg.content) is_maybe_token.assert_not_called() @autospec(TokenRemover, "is_maybe_token") @@ -123,7 +123,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): return_value = TokenRemover.find_token_in_message(self.msg) self.assertEqual(return_value, matches[true_index]) - token_re.findall.assert_called_once_with(self.msg.content) + token_re.finditer.assert_called_once_with(self.msg.content) # assert_has_calls isn't used cause it'd allow for extra calls before or after. # The function should short-circuit, so nothing past true_index should have been used. -- cgit v1.2.3 From f937032466a4124bacf217d1bfd0af097fc3395d Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Wed, 27 May 2020 19:31:55 -0700 Subject: Adjust token remover tests to use the Token NamedTuple --- tests/bot/cogs/test_token_remover.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 4fff3ab33..65bc1ee58 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -7,7 +7,7 @@ from discord import Colour from bot import constants from bot.cogs import token_remover from bot.cogs.moderation import ModLog -from bot.cogs.token_remover import TokenRemover +from bot.cogs.token_remover import Token, TokenRemover from tests.helpers import MockBot, MockMessage, autospec @@ -224,17 +224,19 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): @autospec("bot.cogs.token_remover", "LOG_MESSAGE") def test_format_log_message(self, log_message): """Should correctly format the log message with info from the message and token.""" + token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") log_message.format.return_value = "Howdy" - return_value = TokenRemover.format_log_message(self.msg, "MTIz.DN9R_A.xyz") + + return_value = TokenRemover.format_log_message(self.msg, token) self.assertEqual(return_value, log_message.format.return_value) log_message.format.assert_called_once_with( author=self.msg.author, author_id=self.msg.author.id, channel=self.msg.channel.mention, - user_id="MTIz", - timestamp="DN9R_A", - hmac="xxx", + user_id=token.user_id, + timestamp=token.timestamp, + hmac="x" * len(token.hmac), ) @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) @@ -244,7 +246,7 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): """Should delete the message and send a mod log.""" cog = TokenRemover(self.bot) mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True) - token = "MTIz.DN9R_A.xyz" + token = mock.create_autospec(Token, spec_set=True, instance=True) log_msg = "testing123" mod_log_property.return_value = mod_log -- cgit v1.2.3 From 12b8f5002807144451a313180c639bf6b4925f2e Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Wed, 27 May 2020 20:00:33 -0700 Subject: Add more thorough and realistic inputs for token ID and timestamp tests The tests for valid inputs and invalid inputs were split to make them more readable. --- tests/bot/cogs/test_token_remover.py | 70 ++++++++++++++++++++++++++---------- 1 file changed, 52 insertions(+), 18 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 65bc1ee58..ffe76865a 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -24,31 +24,65 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) self.msg.author.avatar_url_as.return_value = "picture-lemon.png" - def test_is_valid_user_id(self): - """Should correctly discern valid user IDs and ignore non-numeric and non-ASCII IDs.""" - subtests = ( - ("MTIz", True), # base64(123) - ("YWJj", False), # base64(abc) - ("λδµ", False), + def test_is_valid_user_id_valid(self): + """Should consider user IDs valid if they decode entirely to ASCII digits.""" + ids = ( + "NDcyMjY1OTQzMDYyNDEzMzMy", + "NDc1MDczNjI5Mzk5NTQ3OTA0", + "NDY3MjIzMjMwNjUwNzc3NjQx", ) - for user_id, is_valid in subtests: - with self.subTest(user_id=user_id, is_valid=is_valid): + for user_id in ids: + with self.subTest(user_id=user_id): result = TokenRemover.is_valid_user_id(user_id) - self.assertIs(result, is_valid) + self.assertTrue(result) + + def test_is_valid_user_id_invalid(self): + """Should consider non-digit and non-ASCII IDs invalid.""" + ids = ( + ("SGVsbG8gd29ybGQ", "non-digit ASCII"), + ("0J_RgNC40LLQtdGCINC80LjRgA", "cyrillic text"), + ("4pO14p6L4p6C4pG34p264pGl8J-EiOKSj-KCieKBsA", "Unicode digits"), + ("4oaA4oaB4oWh4oWi4Lyz4Lyq4Lyr4LG9", "Unicode numerals"), + ("8J2fjvCdn5nwnZ-k8J2fr_Cdn7rgravvvJngr6c", "Unicode decimals"), + ("{hello}[world]&(bye!)", "ASCII invalid Base64"), + ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), + ) - def test_is_valid_timestamp(self): - """Should correctly discern valid timestamps.""" - subtests = ( - ("DN9r_A", True), - ("MTIz", False), # base64(123) - ("λδµ", False), + for user_id, msg in ids: + with self.subTest(msg=msg): + result = TokenRemover.is_valid_user_id(user_id) + self.assertFalse(result) + + def test_is_valid_timestamp_valid(self): + """Should consider timestamps valid if they're greater than the Discord epoch.""" + timestamps = ( + "XsyRkw", + "Xrim9Q", + "XsyR-w", + "XsySD_", + "Dn9r_A", + ) + + for timestamp in timestamps: + with self.subTest(timestamp=timestamp): + result = TokenRemover.is_valid_timestamp(timestamp) + self.assertTrue(result) + + def test_is_valid_timestamp_invalid(self): + """Should consider timestamps invalid if they're before Discord epoch or can't be parsed.""" + timestamps = ( + ("B4Yffw", "DISCORD_EPOCH - TOKEN_EPOCH - 1"), + ("ew", "123"), + ("AoIKgA", "42076800"), + ("{hello}[world]&(bye!)", "ASCII invalid Base64"), + ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), ) - for timestamp, is_valid in subtests: - with self.subTest(timestamp=timestamp, is_valid=is_valid): + for timestamp, msg in timestamps: + with self.subTest(msg=msg): result = TokenRemover.is_valid_timestamp(timestamp) - self.assertIs(result, is_valid) + self.assertFalse(result) def test_mod_log_property(self): """The `mod_log` property should ask the bot to return the `ModLog` cog.""" -- cgit v1.2.3 From 67472080fef5c38b21d74daa2178c3f35081b58f Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 28 May 2020 19:52:41 -0700 Subject: Remove is_maybe_token tests The function was removed due to redundancy. Therefore, its tests are obsolete. --- tests/bot/cogs/test_token_remover.py | 33 --------------------------------- 1 file changed, 33 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index ffe76865a..5dd12636c 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -213,39 +213,6 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): results = [match[0] for match in results] self.assertCountEqual((token_1, token_2), results) - @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") - def test_is_maybe_token_missing_part_returns_false(self, valid_user, valid_time): - """False should be returned for tokens which do not have all 3 parts.""" - return_value = TokenRemover.is_maybe_token("x.y") - - self.assertFalse(return_value) - valid_user.assert_not_called() - valid_time.assert_not_called() - - @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") - def test_is_maybe_token(self, valid_user, valid_time): - """Should return True if the user ID and timestamp are valid or return False otherwise.""" - subtests = ( - (False, True, False), - (True, False, False), - (True, True, True), - ) - - for user_return, time_return, expected in subtests: - valid_user.reset_mock() - valid_time.reset_mock() - - with self.subTest(user_return=user_return, time_return=time_return, expected=expected): - valid_user.return_value = user_return - valid_time.return_value = time_return - - actual = TokenRemover.is_maybe_token("x.y.z") - self.assertIs(actual, expected) - - valid_user.assert_called_once_with("x") - if user_return: - valid_time.assert_called_once_with("y") - async def test_delete_message(self): """The message should be deleted, and a message should be sent to the same channel.""" await TokenRemover.delete_message(self.msg) -- cgit v1.2.3 From 84cd8235863acc80b7f140309424c33180cc34ea Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 28 May 2020 20:32:48 -0700 Subject: Adjust find_token_in_message tests for the recent cog changes It now supports the changes that switched to finditer, added match groups, and added the Token NamedTuple. It also accounts for the is_maybe_token function being removed. For the sake of simplicity, call assertions on is_valid_user_id and is_valid_timestamp were not made. --- tests/bot/cogs/test_token_remover.py | 39 ++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 5dd12636c..8238e235a 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -1,4 +1,5 @@ import unittest +from re import Match from unittest import mock from unittest.mock import MagicMock @@ -130,9 +131,8 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.assertIsNone(return_value) token_re.finditer.assert_not_called() - @autospec(TokenRemover, "is_maybe_token") @autospec("bot.cogs.token_remover", "TOKEN_RE") - def test_find_token_no_matches_returns_none(self, token_re, is_maybe_token): + def test_find_token_no_matches(self, token_re): """None should be returned if the regex matches no tokens in a message.""" token_re.finditer.return_value = () @@ -140,30 +140,31 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.assertIsNone(return_value) token_re.finditer.assert_called_once_with(self.msg.content) - is_maybe_token.assert_not_called() - @autospec(TokenRemover, "is_maybe_token") + @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") + @autospec("bot.cogs.token_remover", "Token") @autospec("bot.cogs.token_remover", "TOKEN_RE") - def test_find_token_returns_found_token(self, token_re, is_maybe_token): - """The found token should be returned.""" - true_index = 1 - matches = ("foo", "bar", "baz") - side_effects = [False] * len(matches) - side_effects[true_index] = True - - token_re.findall.return_value = matches - is_maybe_token.side_effect = side_effects + def test_find_token_valid_match(self, token_re, token_cls, is_valid_id, is_valid_timestamp): + """The first match with a valid user ID and timestamp should be returned as a `Token`.""" + matches = [ + mock.create_autospec(Match, spec_set=True, instance=True), + mock.create_autospec(Match, spec_set=True, instance=True), + ] + tokens = [ + mock.create_autospec(Token, spec_set=True, instance=True), + mock.create_autospec(Token, spec_set=True, instance=True), + ] + + token_re.finditer.return_value = matches + token_cls.side_effect = tokens + is_valid_id.side_effect = (False, True) # The 1st match will be invalid, 2nd one valid. + is_valid_timestamp.return_value = True return_value = TokenRemover.find_token_in_message(self.msg) - self.assertEqual(return_value, matches[true_index]) + self.assertEqual(tokens[1], return_value) token_re.finditer.assert_called_once_with(self.msg.content) - # assert_has_calls isn't used cause it'd allow for extra calls before or after. - # The function should short-circuit, so nothing past true_index should have been used. - calls = [mock.call(match) for match in matches[:true_index + 1]] - self.assertEqual(is_maybe_token.mock_calls, calls) - def test_regex_invalid_tokens(self): """Messages without anything looking like a token are not matched.""" tokens = ( -- cgit v1.2.3 From 5930a044b8347019d474a809fc86f89263574ad0 Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Thu, 28 May 2020 20:33:34 -0700 Subject: Test find_token_in_message returns None for invalid matches This covers the case when a token is matched, but its user ID and timestamp turn out to be invalid. --- tests/bot/cogs/test_token_remover.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 8238e235a..9b4b04ecd 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -165,6 +165,21 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): self.assertEqual(tokens[1], return_value) token_re.finditer.assert_called_once_with(self.msg.content) + @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") + @autospec("bot.cogs.token_remover", "Token") + @autospec("bot.cogs.token_remover", "TOKEN_RE") + def test_find_token_invalid_matches(self, token_re, token_cls, is_valid_id, is_valid_timestamp): + """None should be returned if no matches have valid user IDs or timestamps.""" + token_re.finditer.return_value = [mock.create_autospec(Match, spec_set=True, instance=True)] + token_cls.return_value = mock.create_autospec(Token, spec_set=True, instance=True) + is_valid_id.return_value = False + is_valid_timestamp.return_value = False + + return_value = TokenRemover.find_token_in_message(self.msg) + + self.assertIsNone(return_value) + token_re.finditer.assert_called_once_with(self.msg.content) + def test_regex_invalid_tokens(self): """Messages without anything looking like a token are not matched.""" tokens = ( -- cgit v1.2.3 From 9b3ab7df5ae1ecf95705f2fab7d99fdb36eb98ea Mon Sep 17 00:00:00 2001 From: MarkKoz Date: Tue, 2 Jun 2020 19:22:49 -0700 Subject: Token remover: remove the `delete_message` function It's redundant; there's no benefit here in abstracting two lines of code into a function. --- bot/cogs/token_remover.py | 9 ++------- tests/bot/cogs/test_token_remover.py | 19 +++++++------------ 2 files changed, 9 insertions(+), 19 deletions(-) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 46329e207..d55e079e9 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -79,7 +79,8 @@ class TokenRemover(Cog): async def take_action(self, msg: Message, found_token: Token) -> None: """Remove the `msg` containing the `found_token` and send a mod log message.""" self.mod_log.ignore(Event.message_delete, msg.id) - await self.delete_message(msg) + await msg.delete() + await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) log_message = self.format_log_message(msg, found_token) log.debug(log_message) @@ -96,12 +97,6 @@ class TokenRemover(Cog): self.bot.stats.incr("tokens.removed_tokens") - @staticmethod - async def delete_message(msg: Message) -> None: - """Remove a `msg` containing a token and send an explanatory message in the same channel.""" - await msg.delete() - await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) - @staticmethod def format_log_message(msg: Message, token: Token) -> str: """Return the log message to send for `token` being censored in `msg`.""" diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 9b4b04ecd..a10124d2d 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -229,15 +229,6 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): results = [match[0] for match in results] self.assertCountEqual((token_1, token_2), results) - async def test_delete_message(self): - """The message should be deleted, and a message should be sent to the same channel.""" - await TokenRemover.delete_message(self.msg) - - self.msg.delete.assert_called_once_with() - self.msg.channel.send.assert_called_once_with( - token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) - ) - @autospec("bot.cogs.token_remover", "LOG_MESSAGE") def test_format_log_message(self, log_message): """Should correctly format the log message with info from the message and token.""" @@ -258,8 +249,8 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) @autospec("bot.cogs.token_remover", "log") - @autospec(TokenRemover, "delete_message", "format_log_message") - async def test_take_action(self, delete_message, format_log_message, logger, mod_log_property): + @autospec(TokenRemover, "format_log_message") + async def test_take_action(self, format_log_message, logger, mod_log_property): """Should delete the message and send a mod log.""" cog = TokenRemover(self.bot) mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True) @@ -271,7 +262,11 @@ class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): await cog.take_action(self.msg, token) - delete_message.assert_awaited_once_with(self.msg) + self.msg.delete.assert_called_once_with() + self.msg.channel.send.assert_called_once_with( + token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) + ) + format_log_message.assert_called_once_with(self.msg, token) logger.debug.assert_called_with(log_msg) self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens") -- cgit v1.2.3