diff options
Diffstat (limited to 'tests')
-rw-r--r-- | tests/bot/exts/moderation/infraction/test_infractions.py | 89 | ||||
-rw-r--r-- | tests/bot/exts/moderation/infraction/test_utils.py | 36 | ||||
-rw-r--r-- | tests/bot/exts/moderation/test_clean.py | 104 | ||||
-rw-r--r-- | tests/bot/exts/utils/test_snekbox.py | 159 |
4 files changed, 294 insertions, 94 deletions
diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py index f89465f84..052048053 100644 --- a/tests/bot/exts/moderation/infraction/test_infractions.py +++ b/tests/bot/exts/moderation/infraction/test_infractions.py @@ -1,13 +1,15 @@ import inspect import textwrap import unittest -from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch +from unittest.mock import ANY, AsyncMock, DEFAULT, MagicMock, Mock, patch from discord.errors import NotFound from bot.constants import Event +from bot.exts.moderation.clean import Clean from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction.infractions import Infractions +from bot.exts.moderation.infraction.management import ModManagement from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockRole, MockUser, autospec @@ -231,3 +233,88 @@ class VoiceMuteTests(unittest.IsolatedAsyncioTestCase): "DM": "**Failed**" }) notify_pardon_mock.assert_awaited_once() + + +class CleanBanTests(unittest.IsolatedAsyncioTestCase): + """Tests for cleanban functionality.""" + + def setUp(self): + self.bot = MockBot() + self.mod = MockMember(roles=[MockRole(id=7890123, position=10)]) + self.user = MockMember(roles=[MockRole(id=123456, position=1)]) + self.guild = MockGuild() + self.ctx = MockContext(bot=self.bot, author=self.mod) + self.cog = Infractions(self.bot) + self.clean_cog = Clean(self.bot) + self.management_cog = ModManagement(self.bot) + + self.cog.apply_ban = AsyncMock(return_value={"id": 42}) + self.log_url = "https://www.youtube.com/watch?v=dQw4w9WgXcQ" + self.clean_cog._clean_messages = AsyncMock(return_value=self.log_url) + + def mock_get_cog(self, enable_clean, enable_manage): + """Mock get cog factory that allows the user to specify whether clean and manage cogs are enabled.""" + def inner(name): + if name == "ModManagement": + return self.management_cog if enable_manage else None + elif name == "Clean": + return self.clean_cog if enable_clean else None + else: + return DEFAULT + return inner + + async def test_cleanban_falls_back_to_native_purge_without_clean_cog(self): + """Should fallback to native purge if the Clean cog is not available.""" + self.bot.get_cog.side_effect = self.mock_get_cog(False, False) + + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + self.cog.apply_ban.assert_awaited_once_with( + self.ctx, + self.user, + "FooBar", + purge_days=1, + expires_at=None, + ) + + async def test_cleanban_doesnt_purge_messages_if_clean_cog_available(self): + """Cleanban command should use the native purge messages if the clean cog is available.""" + self.bot.get_cog.side_effect = self.mock_get_cog(True, False) + + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + self.cog.apply_ban.assert_awaited_once_with( + self.ctx, + self.user, + "FooBar", + expires_at=None, + ) + + @patch("bot.exts.moderation.infraction.infractions.Age") + async def test_cleanban_uses_clean_cog_when_available(self, mocked_age_converter): + """Test cleanban uses the clean cog to clean messages if it's available.""" + self.bot.api_client.patch = AsyncMock() + self.bot.get_cog.side_effect = self.mock_get_cog(True, False) + + mocked_age_converter.return_value.convert = AsyncMock(return_value="81M") + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + + self.clean_cog._clean_messages.assert_awaited_once_with( + self.ctx, + users=[self.user], + channels="*", + first_limit="81M", + attempt_delete_invocation=False, + ) + + async def test_cleanban_edits_infraction_reason(self): + """Ensure cleanban edits the ban reason with a link to the clean log.""" + self.bot.get_cog.side_effect = self.mock_get_cog(True, True) + + self.management_cog.infraction_append = AsyncMock() + self.assertIsNone(await self.cog.cleanban(self.cog, self.ctx, self.user, None, reason="FooBar")) + + self.management_cog.infraction_append.assert_awaited_once_with( + self.ctx, + {"id": 42}, + None, + reason=f"[Clean log]({self.log_url})" + ) diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py index 350274ecd..ff81ddd65 100644 --- a/tests/bot/exts/moderation/infraction/test_utils.py +++ b/tests/bot/exts/moderation/infraction/test_utils.py @@ -15,7 +15,10 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): """Tests Moderation utils.""" def setUp(self): - self.bot = MockBot() + patcher = patch("bot.instance", new=MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + self.member = MockMember(id=1234) self.user = MockUser(id=1234) self.ctx = MockContext(bot=self.bot, author=self.member) @@ -123,8 +126,9 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): else: self.ctx.send.assert_not_awaited() + @unittest.skip("Current time needs to be patched so infraction duration is correct.") @patch("bot.exts.moderation.infraction._utils.send_private_embed") - async def test_notify_infraction(self, send_private_embed_mock): + async def test_send_infraction_embed(self, send_private_embed_mock): """ Should send an embed of a certain format as a DM and return `True` if DM successful. @@ -132,7 +136,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): """ test_cases = [ { - "args": (self.bot, self.user, 0, "ban", "2020-02-26 09:20 (23 hours and 59 minutes)"), + "args": (dict(id=0, type="ban", reason=None, expires_at=datetime(2020, 2, 26, 9, 20)), self.user), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -145,12 +149,12 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, - icon_url=Icons.token_removed + icon_url=Icons.user_ban ), "send_result": True }, { - "args": (self.bot, self.user, 0, "warning", None, "Test reason."), + "args": (dict(id=0, type="warning", reason="Test reason.", expires_at=None), self.user), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -163,14 +167,14 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, - icon_url=Icons.token_removed + icon_url=Icons.user_warn ), "send_result": False }, # Note that this test case asserts that the DM that *would* get sent to the user is formatted # correctly, even though that message is deliberately never sent. { - "args": (self.bot, self.user, 0, "note", None, None, Icons.defcon_denied), + "args": (dict(id=0, type="note", reason=None, expires_at=None), self.user), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -183,20 +187,12 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, - icon_url=Icons.defcon_denied + icon_url=Icons.user_warn ), "send_result": False }, { - "args": ( - self.bot, - self.user, - 0, - "mute", - "2020-02-26 09:20 (23 hours and 59 minutes)", - "Test", - Icons.defcon_denied - ), + "args": (dict(id=0, type="mute", reason="Test", expires_at=datetime(2020, 2, 26, 9, 20)), self.user), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -209,12 +205,12 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, - icon_url=Icons.defcon_denied + icon_url=Icons.user_mute ), "send_result": False }, { - "args": (self.bot, self.user, 0, "mute", None, "foo bar" * 4000, Icons.defcon_denied), + "args": (dict(id=0, type="mute", reason="foo bar" * 4000, expires_at=None), self.user), "expected_output": Embed( title=utils.INFRACTION_TITLE, description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( @@ -227,7 +223,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): ).set_author( name=utils.INFRACTION_AUTHOR_NAME, url=utils.RULES_URL, - icon_url=Icons.defcon_denied + icon_url=Icons.user_mute ), "send_result": True } diff --git a/tests/bot/exts/moderation/test_clean.py b/tests/bot/exts/moderation/test_clean.py new file mode 100644 index 000000000..d7647fa48 --- /dev/null +++ b/tests/bot/exts/moderation/test_clean.py @@ -0,0 +1,104 @@ +import unittest +from unittest.mock import AsyncMock, MagicMock, patch + +from bot.exts.moderation.clean import Clean +from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockMessage, MockRole, MockTextChannel + + +class CleanTests(unittest.IsolatedAsyncioTestCase): + """Tests for clean cog functionality.""" + + def setUp(self): + self.bot = MockBot() + self.mod = MockMember(roles=[MockRole(id=7890123, position=10)]) + self.user = MockMember(roles=[MockRole(id=123456, position=1)]) + self.guild = MockGuild() + self.ctx = MockContext(bot=self.bot, author=self.mod) + self.cog = Clean(self.bot) + + self.log_url = "https://www.youtube.com/watch?v=dQw4w9WgXcQ" + self.cog._modlog_cleaned_messages = AsyncMock(return_value=self.log_url) + + self.cog._use_cache = MagicMock(return_value=True) + self.cog._delete_found = AsyncMock(return_value=[42, 84]) + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_deletes_invocation_in_non_mod_channel(self, mod_channel_check): + """Clean command should delete the invocation message if ran in a non mod channel.""" + mod_channel_check.return_value = False + self.ctx.message.delete = AsyncMock() + + self.assertIsNone(await self.cog._delete_invocation(self.ctx)) + + self.ctx.message.delete.assert_awaited_once() + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_doesnt_delete_invocation_in_mod_channel(self, mod_channel_check): + """Clean command should not delete the invocation message if ran in a mod channel.""" + mod_channel_check.return_value = True + self.ctx.message.delete = AsyncMock() + + self.assertIsNone(await self.cog._delete_invocation(self.ctx)) + + self.ctx.message.delete.assert_not_awaited() + + async def test_clean_doesnt_attempt_deletion_when_attempt_delete_invocation_is_false(self): + """Clean command should not attempt to delete the invocation message if attempt_delete_invocation is false.""" + self.cog._delete_invocation = AsyncMock() + self.bot.get_channel = MagicMock(return_value=False) + + self.assertEqual( + await self.cog._clean_messages( + self.ctx, + None, + first_limit=MockMessage(), + attempt_delete_invocation=False, + ), + self.log_url, + ) + + self.cog._delete_invocation.assert_not_awaited() + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_replies_with_success_message_when_ran_in_mod_channel(self, mod_channel_check): + """Clean command should reply to the message with a confirmation message if invoked in a mod channel.""" + mod_channel_check.return_value = True + self.ctx.reply = AsyncMock() + + self.assertEqual( + await self.cog._clean_messages( + self.ctx, + None, + first_limit=MockMessage(), + attempt_delete_invocation=False, + ), + self.log_url, + ) + + self.ctx.reply.assert_awaited_once() + sent_message = self.ctx.reply.await_args[0][0] + self.assertIn(self.log_url, sent_message) + self.assertIn("2 messages", sent_message) + + @patch("bot.exts.moderation.clean.is_mod_channel") + async def test_clean_send_success_message_to_mods_when_ran_in_non_mod_channel(self, mod_channel_check): + """Clean command should send a confirmation message to #mods if invoked in a non-mod channel.""" + mod_channel_check.return_value = False + mocked_mods = MockTextChannel(id=1234567) + mocked_mods.send = AsyncMock() + self.bot.get_channel = MagicMock(return_value=mocked_mods) + + self.assertEqual( + await self.cog._clean_messages( + self.ctx, + None, + first_limit=MockMessage(), + attempt_delete_invocation=False, + ), + self.log_url, + ) + + mocked_mods.send.assert_awaited_once() + sent_message = mocked_mods.send.await_args[0][0] + self.assertIn(self.log_url, sent_message) + self.assertIn("2 messages", sent_message) diff --git a/tests/bot/exts/utils/test_snekbox.py b/tests/bot/exts/utils/test_snekbox.py index 8bdeedd27..f68a20089 100644 --- a/tests/bot/exts/utils/test_snekbox.py +++ b/tests/bot/exts/utils/test_snekbox.py @@ -17,7 +17,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): self.bot = MockBot() self.cog = Snekbox(bot=self.bot) - async def test_post_eval(self): + async def test_post_job(self): """Post the eval code to the URLs.snekbox_eval_api endpoint.""" resp = MagicMock() resp.json = AsyncMock(return_value="return") @@ -26,7 +26,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): context_manager.__aenter__.return_value = resp self.bot.http_session.post.return_value = context_manager - self.assertEqual(await self.cog.post_eval("import random"), "return") + self.assertEqual(await self.cog.post_job("import random"), "return") self.bot.http_session.post.assert_called_with( constants.URLs.snekbox_eval_api, json={"input": "import random"}, @@ -45,7 +45,8 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): await self.cog.upload_output("Test output.") mock_paste_util.assert_called_once_with("Test output.", extension="txt") - def test_prepare_input(self): + async def test_codeblock_converter(self): + ctx = MockContext() cases = ( ('print("Hello world!")', 'print("Hello world!")', 'non-formatted'), ('`print("Hello world!")`', 'print("Hello world!")', 'one line code block'), @@ -61,7 +62,24 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): ) for case, expected, testname in cases: with self.subTest(msg=f'Extract code from {testname}.'): - self.assertEqual(self.cog.prepare_input(case), expected) + self.assertEqual( + '\n'.join(await snekbox.CodeblockConverter.convert(ctx, case)), expected + ) + + def test_prepare_timeit_input(self): + """Test the prepare_timeit_input codeblock detection.""" + base_args = ('-m', 'timeit', '-s') + cases = ( + (['print("Hello World")'], '', 'single block of code'), + (['x = 1', 'print(x)'], 'x = 1', 'two blocks of code'), + (['x = 1', 'print(x)', 'print("Some other code.")'], 'x = 1', 'three blocks of code') + ) + + for case, setup_code, testname in cases: + setup = snekbox.TIMEIT_SETUP_WRAPPER.format(setup=setup_code) + expected = ('\n'.join(case[1:] if setup_code else case), [*base_args, setup]) + with self.subTest(msg=f'Test with {testname} and expected return {expected}'): + self.assertEqual(self.cog.prepare_timeit_input(case), expected) def test_get_results_message(self): """Return error and message according to the eval result.""" @@ -72,13 +90,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): ) for stdout, returncode, expected in cases: with self.subTest(stdout=stdout, returncode=returncode, expected=expected): - actual = self.cog.get_results_message({'stdout': stdout, 'returncode': returncode}) + actual = self.cog.get_results_message({'stdout': stdout, 'returncode': returncode}, 'eval') self.assertEqual(actual, expected) @patch('bot.exts.utils.snekbox.Signals', side_effect=ValueError) def test_get_results_message_invalid_signal(self, mock_signals: Mock): self.assertEqual( - self.cog.get_results_message({'stdout': '', 'returncode': 127}), + self.cog.get_results_message({'stdout': '', 'returncode': 127}, 'eval'), ('Your eval job has completed with return code 127', '') ) @@ -86,7 +104,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): def test_get_results_message_valid_signal(self, mock_signals: Mock): mock_signals.return_value.name = 'SIGTEST' self.assertEqual( - self.cog.get_results_message({'stdout': '', 'returncode': 127}), + self.cog.get_results_message({'stdout': '', 'returncode': 127}, 'eval'), ('Your eval job has completed with return code 127 (SIGTEST)', '') ) @@ -156,28 +174,29 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): """Test the eval command procedure.""" ctx = MockContext() response = MockMessage() - self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode') - self.cog.send_eval = AsyncMock(return_value=response) - self.cog.continue_eval = AsyncMock(return_value=None) + ctx.command = MagicMock() - await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode') - self.cog.prepare_input.assert_called_once_with('MyAwesomeCode') - self.cog.send_eval.assert_called_once_with(ctx, 'MyAwesomeFormattedCode') - self.cog.continue_eval.assert_called_once_with(ctx, response) + self.cog.send_job = AsyncMock(return_value=response) + self.cog.continue_job = AsyncMock(return_value=(None, None)) + + await self.cog.eval_command(self.cog, ctx=ctx, code=['MyAwesomeCode']) + self.cog.send_job.assert_called_once_with(ctx, 'MyAwesomeCode', args=None, job_name='eval') + self.cog.continue_job.assert_called_once_with(ctx, response, ctx.command) async def test_eval_command_evaluate_twice(self): """Test the eval and re-eval command procedure.""" ctx = MockContext() response = MockMessage() - self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode') - self.cog.send_eval = AsyncMock(return_value=response) - self.cog.continue_eval = AsyncMock() - self.cog.continue_eval.side_effect = ('MyAwesomeCode-2', None) - - await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode') - self.cog.prepare_input.has_calls(call('MyAwesomeCode'), call('MyAwesomeCode-2')) - self.cog.send_eval.assert_called_with(ctx, 'MyAwesomeFormattedCode') - self.cog.continue_eval.assert_called_with(ctx, response) + ctx.command = MagicMock() + self.cog.send_job = AsyncMock(return_value=response) + self.cog.continue_job = AsyncMock() + self.cog.continue_job.side_effect = (('MyAwesomeFormattedCode', None), (None, None)) + + await self.cog.eval_command(self.cog, ctx=ctx, code=['MyAwesomeCode']) + self.cog.send_job.assert_called_with( + ctx, 'MyAwesomeFormattedCode', args=None, job_name='eval' + ) + self.cog.continue_job.assert_called_with(ctx, response, ctx.command) async def test_eval_command_reject_two_eval_at_the_same_time(self): """Test if the eval command rejects an eval if the author already have a running eval.""" @@ -191,29 +210,23 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): "@LemonLemonishBeard#0042 You've already got a job running - please wait for it to finish!" ) - async def test_eval_command_call_help(self): - """Test if the eval command call the help command if no code is provided.""" - ctx = MockContext(command="sentinel") - await self.cog.eval_command(self.cog, ctx=ctx, code='') - ctx.send_help.assert_called_once_with(ctx.command) - - async def test_send_eval(self): - """Test the send_eval function.""" + async def test_send_job(self): + """Test the send_job function.""" ctx = MockContext() ctx.message = MockMessage() ctx.send = AsyncMock() ctx.author = MockUser(mention='@LemonLemonishBeard#0042') - self.cog.post_eval = AsyncMock(return_value={'stdout': '', 'returncode': 0}) + self.cog.post_job = AsyncMock(return_value={'stdout': '', 'returncode': 0}) self.cog.get_results_message = MagicMock(return_value=('Return code 0', '')) self.cog.get_status_emoji = MagicMock(return_value=':yay!:') self.cog.format_output = AsyncMock(return_value=('[No output]', None)) mocked_filter_cog = MagicMock() - mocked_filter_cog.filter_eval = AsyncMock(return_value=False) + mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False) self.bot.get_cog.return_value = mocked_filter_cog - await self.cog.send_eval(ctx, 'MyAwesomeCode') + await self.cog.send_job(ctx, 'MyAwesomeCode', job_name='eval') ctx.send.assert_called_once() self.assertEqual( @@ -224,28 +237,28 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): expected_allowed_mentions = AllowedMentions(everyone=False, roles=False, users=[ctx.author]) self.assertEqual(allowed_mentions.to_dict(), expected_allowed_mentions.to_dict()) - self.cog.post_eval.assert_called_once_with('MyAwesomeCode') + self.cog.post_job.assert_called_once_with('MyAwesomeCode', args=None) self.cog.get_status_emoji.assert_called_once_with({'stdout': '', 'returncode': 0}) - self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0}) + self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0}, 'eval') self.cog.format_output.assert_called_once_with('') - async def test_send_eval_with_paste_link(self): - """Test the send_eval function with a too long output that generate a paste link.""" + async def test_send_job_with_paste_link(self): + """Test the send_job function with a too long output that generate a paste link.""" ctx = MockContext() ctx.message = MockMessage() ctx.send = AsyncMock() ctx.author.mention = '@LemonLemonishBeard#0042' - self.cog.post_eval = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0}) + self.cog.post_job = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0}) self.cog.get_results_message = MagicMock(return_value=('Return code 0', '')) self.cog.get_status_emoji = MagicMock(return_value=':yay!:') self.cog.format_output = AsyncMock(return_value=('Way too long beard', 'lookatmybeard.com')) mocked_filter_cog = MagicMock() - mocked_filter_cog.filter_eval = AsyncMock(return_value=False) + mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False) self.bot.get_cog.return_value = mocked_filter_cog - await self.cog.send_eval(ctx, 'MyAwesomeCode') + await self.cog.send_job(ctx, 'MyAwesomeCode', job_name='eval') ctx.send.assert_called_once() self.assertEqual( @@ -254,27 +267,27 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): '\n\n```\nWay too long beard\n```\nFull output: lookatmybeard.com' ) - self.cog.post_eval.assert_called_once_with('MyAwesomeCode') + self.cog.post_job.assert_called_once_with('MyAwesomeCode', args=None) self.cog.get_status_emoji.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) - self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) + self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}, 'eval') self.cog.format_output.assert_called_once_with('Way too long beard') - async def test_send_eval_with_non_zero_eval(self): - """Test the send_eval function with a code returning a non-zero code.""" + async def test_send_job_with_non_zero_eval(self): + """Test the send_job function with a code returning a non-zero code.""" ctx = MockContext() ctx.message = MockMessage() ctx.send = AsyncMock() ctx.author.mention = '@LemonLemonishBeard#0042' - self.cog.post_eval = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127}) + self.cog.post_job = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127}) self.cog.get_results_message = MagicMock(return_value=('Return code 127', 'Beard got stuck in the eval')) self.cog.get_status_emoji = MagicMock(return_value=':nope!:') self.cog.format_output = AsyncMock() # This function isn't called mocked_filter_cog = MagicMock() - mocked_filter_cog.filter_eval = AsyncMock(return_value=False) + mocked_filter_cog.filter_snekbox_output = AsyncMock(return_value=False) self.bot.get_cog.return_value = mocked_filter_cog - await self.cog.send_eval(ctx, 'MyAwesomeCode') + await self.cog.send_job(ctx, 'MyAwesomeCode', job_name='eval') ctx.send.assert_called_once() self.assertEqual( @@ -282,14 +295,14 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): '@LemonLemonishBeard#0042 :nope!: Return code 127.\n\n```\nBeard got stuck in the eval\n```' ) - self.cog.post_eval.assert_called_once_with('MyAwesomeCode') + self.cog.post_job.assert_called_once_with('MyAwesomeCode', args=None) self.cog.get_status_emoji.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) - self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) + self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}, 'eval') self.cog.format_output.assert_not_called() @patch("bot.exts.utils.snekbox.partial") - async def test_continue_eval_does_continue(self, partial_mock): - """Test that the continue_eval function does continue if required conditions are met.""" + async def test_continue_job_does_continue(self, partial_mock): + """Test that the continue_job function does continue if required conditions are met.""" ctx = MockContext(message=MockMessage(add_reaction=AsyncMock(), clear_reactions=AsyncMock())) response = MockMessage(delete=AsyncMock()) new_msg = MockMessage() @@ -297,30 +310,30 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): expected = "NewCode" self.cog.get_code = create_autospec(self.cog.get_code, spec_set=True, return_value=expected) - actual = await self.cog.continue_eval(ctx, response) - self.cog.get_code.assert_awaited_once_with(new_msg) - self.assertEqual(actual, expected) + actual = await self.cog.continue_job(ctx, response, self.cog.eval_command) + self.cog.get_code.assert_awaited_once_with(new_msg, ctx.command) + self.assertEqual(actual, (expected, None)) self.bot.wait_for.assert_has_awaits( ( call( 'message_edit', - check=partial_mock(snekbox.predicate_eval_message_edit, ctx), - timeout=snekbox.REEVAL_TIMEOUT, + check=partial_mock(snekbox.predicate_message_edit, ctx), + timeout=snekbox.REDO_TIMEOUT, ), - call('reaction_add', check=partial_mock(snekbox.predicate_eval_emoji_reaction, ctx), timeout=10) + call('reaction_add', check=partial_mock(snekbox.predicate_emoji_reaction, ctx), timeout=10) ) ) - ctx.message.add_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI) - ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI) + ctx.message.add_reaction.assert_called_once_with(snekbox.REDO_EMOJI) + ctx.message.clear_reaction.assert_called_once_with(snekbox.REDO_EMOJI) response.delete.assert_called_once() - async def test_continue_eval_does_not_continue(self): + async def test_continue_job_does_not_continue(self): ctx = MockContext(message=MockMessage(clear_reactions=AsyncMock())) self.bot.wait_for.side_effect = asyncio.TimeoutError - actual = await self.cog.continue_eval(ctx, MockMessage()) - self.assertEqual(actual, None) - ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI) + actual = await self.cog.continue_job(ctx, MockMessage(), self.cog.eval_command) + self.assertEqual(actual, (None, None)) + ctx.message.clear_reaction.assert_called_once_with(snekbox.REDO_EMOJI) async def test_get_code(self): """Should return 1st arg (or None) if eval cmd in message, otherwise return full content.""" @@ -343,13 +356,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): self.bot.get_context.return_value = MockContext(command=command) message = MockMessage(content=content) - actual_code = await self.cog.get_code(message) + actual_code = await self.cog.get_code(message, self.cog.eval_command) self.bot.get_context.assert_awaited_once_with(message) self.assertEqual(actual_code, expected_code) - def test_predicate_eval_message_edit(self): - """Test the predicate_eval_message_edit function.""" + def test_predicate_message_edit(self): + """Test the predicate_message_edit function.""" msg0 = MockMessage(id=1, content='abc') msg1 = MockMessage(id=2, content='abcdef') msg2 = MockMessage(id=1, content='abcdef') @@ -362,18 +375,18 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): for ctx_msg, new_msg, expected, testname in cases: with self.subTest(msg=f'Messages with {testname} return {expected}'): ctx = MockContext(message=ctx_msg) - actual = snekbox.predicate_eval_message_edit(ctx, ctx_msg, new_msg) + actual = snekbox.predicate_message_edit(ctx, ctx_msg, new_msg) self.assertEqual(actual, expected) - def test_predicate_eval_emoji_reaction(self): - """Test the predicate_eval_emoji_reaction function.""" + def test_predicate_emoji_reaction(self): + """Test the predicate_emoji_reaction function.""" valid_reaction = MockReaction(message=MockMessage(id=1)) - valid_reaction.__str__.return_value = snekbox.REEVAL_EMOJI + valid_reaction.__str__.return_value = snekbox.REDO_EMOJI valid_ctx = MockContext(message=MockMessage(id=1), author=MockUser(id=2)) valid_user = MockUser(id=2) invalid_reaction_id = MockReaction(message=MockMessage(id=42)) - invalid_reaction_id.__str__.return_value = snekbox.REEVAL_EMOJI + invalid_reaction_id.__str__.return_value = snekbox.REDO_EMOJI invalid_user_id = MockUser(id=42) invalid_reaction_str = MockReaction(message=MockMessage(id=1)) invalid_reaction_str.__str__.return_value = ':longbeard:' @@ -386,7 +399,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): ) for reaction, user, expected, testname in cases: with self.subTest(msg=f'Test with {testname} and expected return {expected}'): - actual = snekbox.predicate_eval_emoji_reaction(valid_ctx, reaction, user) + actual = snekbox.predicate_emoji_reaction(valid_ctx, reaction, user) self.assertEqual(actual, expected) |