aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Jeremiah Boby <[email protected]>2020-02-27 22:16:05 +0000
committerGravatar GitHub <[email protected]>2020-02-27 22:16:05 +0000
commitafeb92010e58569e4910c708fa63f0e808278e26 (patch)
tree1e4f9baa3be386499d52b5373a901b3f2ddf24a8
parentMerge branch 'master' into spoiler-check (diff)
parentMerge pull request #798 from python-discord/bug/mod/bot-1v/infr-edit-task-cancel (diff)
Merge branch 'master' into spoiler-check
-rw-r--r--.github/CODEOWNERS1
-rw-r--r--Pipfile6
-rw-r--r--Pipfile.lock704
-rw-r--r--README.md2
-rw-r--r--azure-pipelines.yml2
-rw-r--r--bot/__init__.py86
-rw-r--r--bot/__main__.py35
-rw-r--r--bot/api.py146
-rw-r--r--bot/bot.py97
-rw-r--r--bot/cogs/alias.py25
-rw-r--r--bot/cogs/antimalware.py56
-rw-r--r--bot/cogs/antispam.py41
-rw-r--r--bot/cogs/bot.py27
-rw-r--r--bot/cogs/clean.py68
-rw-r--r--bot/cogs/config_verifier.py40
-rw-r--r--bot/cogs/defcon.py12
-rw-r--r--bot/cogs/doc.py67
-rw-r--r--bot/cogs/duck_pond.py182
-rw-r--r--bot/cogs/error_handler.py41
-rw-r--r--bot/cogs/eval.py6
-rw-r--r--bot/cogs/extensions.py4
-rw-r--r--bot/cogs/filtering.py6
-rw-r--r--bot/cogs/free.py6
-rw-r--r--bot/cogs/help.py10
-rw-r--r--bot/cogs/information.py119
-rw-r--r--bot/cogs/jams.py8
-rw-r--r--bot/cogs/logging.py8
-rw-r--r--bot/cogs/moderation/__init__.py16
-rw-r--r--bot/cogs/moderation/infractions.py45
-rw-r--r--bot/cogs/moderation/management.py76
-rw-r--r--bot/cogs/moderation/modlog.py272
-rw-r--r--bot/cogs/moderation/scheduler.py69
-rw-r--r--bot/cogs/moderation/superstarify.py11
-rw-r--r--bot/cogs/moderation/utils.py89
-rw-r--r--bot/cogs/off_topic_names.py8
-rw-r--r--bot/cogs/reddit.py104
-rw-r--r--bot/cogs/reminders.py125
-rw-r--r--bot/cogs/security.py7
-rw-r--r--bot/cogs/site.py10
-rw-r--r--bot/cogs/snekbox.py8
-rw-r--r--bot/cogs/sync/__init__.py10
-rw-r--r--bot/cogs/sync/cog.py146
-rw-r--r--bot/cogs/sync/syncers.py554
-rw-r--r--bot/cogs/tags.py103
-rw-r--r--bot/cogs/token_remover.py77
-rw-r--r--bot/cogs/utils.py20
-rw-r--r--bot/cogs/verification.py72
-rw-r--r--bot/cogs/watchchannels/__init__.py13
-rw-r--r--bot/cogs/watchchannels/bigbrother.py16
-rw-r--r--bot/cogs/watchchannels/talentpool.py17
-rw-r--r--bot/cogs/watchchannels/watchchannel.py23
-rw-r--r--bot/cogs/wolfram.py10
-rw-r--r--bot/constants.py109
-rw-r--r--bot/converters.py97
-rw-r--r--bot/decorators.py18
-rw-r--r--bot/interpreter.py4
-rw-r--r--bot/pagination.py62
-rw-r--r--bot/rules/attachments.py6
-rw-r--r--bot/utils/__init__.py12
-rw-r--r--bot/utils/messages.py55
-rw-r--r--bot/utils/time.py62
-rw-r--r--config-default.yml72
-rw-r--r--docker-compose.yml4
-rw-r--r--tests/README.md10
-rw-r--r--tests/base.py34
-rw-r--r--tests/bot/cogs/sync/test_base.py412
-rw-r--r--tests/bot/cogs/sync/test_cog.py395
-rw-r--r--tests/bot/cogs/sync/test_roles.py287
-rw-r--r--tests/bot/cogs/sync/test_users.py241
-rw-r--r--tests/bot/cogs/test_duck_pond.py584
-rw-r--r--tests/bot/cogs/test_information.py14
-rw-r--r--tests/bot/cogs/test_security.py11
-rw-r--r--tests/bot/cogs/test_token_remover.py8
-rw-r--r--tests/bot/rules/__init__.py76
-rw-r--r--tests/bot/rules/test_attachments.py91
-rw-r--r--tests/bot/rules/test_burst.py56
-rw-r--r--tests/bot/rules/test_burst_shared.py59
-rw-r--r--tests/bot/rules/test_chars.py66
-rw-r--r--tests/bot/rules/test_discord_emojis.py54
-rw-r--r--tests/bot/rules/test_duplicates.py66
-rw-r--r--tests/bot/rules/test_links.py94
-rw-r--r--tests/bot/rules/test_mentions.py67
-rw-r--r--tests/bot/rules/test_newlines.py105
-rw-r--r--tests/bot/rules/test_role_mentions.py57
-rw-r--r--tests/bot/test_api.py64
-rw-r--r--tests/bot/test_utils.py15
-rw-r--r--tests/bot/utils/test_time.py162
-rw-r--r--tests/helpers.py175
-rw-r--r--tox.ini6
89 files changed, 5374 insertions, 2042 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 000000000..cf5f1590d
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @python-discord/core-developers
diff --git a/Pipfile b/Pipfile
index 48d839fc3..400e64c18 100644
--- a/Pipfile
+++ b/Pipfile
@@ -4,9 +4,8 @@ verify_ssl = true
name = "pypi"
[packages]
-discord-py = "~=1.2"
+discord-py = "~=1.3.1"
aiodns = "~=2.0"
-logmatic-python = "~=0.1"
aiohttp = "~=3.5"
sphinx = "~=2.2"
markdownify = "~=0.4"
@@ -19,11 +18,12 @@ deepdiff = "~=4.0"
requests = "~=2.22"
more_itertools = "~=7.2"
urllib3 = ">=1.24.2,<1.25"
+sentry-sdk = "~=0.14"
[dev-packages]
coverage = "~=4.5"
flake8 = "~=3.7"
-flake8-annotations = "~=1.1"
+flake8-annotations = "~=2.0"
flake8-bugbear = "~=19.8"
flake8-docstrings = "~=1.4"
flake8-import-order = "~=0.18"
diff --git a/Pipfile.lock b/Pipfile.lock
index 69caf4646..fa29bf995 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "c27d699b4aeeed204dee41f924f682ae2a670add8549a8826e58776594370582"
+ "sha256": "c7706a61eb96c06d073898018ea2dbcf5bd3b15d007496e2d60120a65647f31e"
},
"pipfile-spec": 6,
"requires": {
@@ -18,11 +18,11 @@
"default": {
"aio-pika": {
"hashes": [
- "sha256:1da038b3d2c1b49e0e816d87424e702912bb77f9b5197f2bf279217915b4f7ed",
- "sha256:29fe851374b86c997a22174c04352b5941bc1c2e36bbf542918ac18a76cfc9d3"
+ "sha256:4199122a450dffd8303b7857a9d82657bf1487fe329e489520833b40fbe92406",
+ "sha256:fe85c7456e5c060bce4eb9cffab5b2c4d3c563cb72177977b3556c54c8e3aeb6"
],
"index": "pypi",
- "version": "==6.3.0"
+ "version": "==6.5.2"
},
"aiodns": {
"hashes": [
@@ -34,38 +34,28 @@
},
"aiohttp": {
"hashes": [
- "sha256:00d198585474299c9c3b4f1d5de1a576cc230d562abc5e4a0e81d71a20a6ca55",
- "sha256:0155af66de8c21b8dba4992aaeeabf55503caefae00067a3b1139f86d0ec50ed",
- "sha256:09654a9eca62d1bd6d64aa44db2498f60a5c1e0ac4750953fdd79d5c88955e10",
- "sha256:199f1d106e2b44b6dacdf6f9245493c7d716b01d0b7fbe1959318ba4dc64d1f5",
- "sha256:296f30dedc9f4b9e7a301e5cc963012264112d78a1d3094cd83ef148fdf33ca1",
- "sha256:368ed312550bd663ce84dc4b032a962fcb3c7cae099dbbd48663afc305e3b939",
- "sha256:40d7ea570b88db017c51392349cf99b7aefaaddd19d2c78368aeb0bddde9d390",
- "sha256:629102a193162e37102c50713e2e31dc9a2fe7ac5e481da83e5bb3c0cee700aa",
- "sha256:6d5ec9b8948c3d957e75ea14d41e9330e1ac3fed24ec53766c780f82805140dc",
- "sha256:87331d1d6810214085a50749160196391a712a13336cd02ce1c3ea3d05bcf8d5",
- "sha256:9a02a04bbe581c8605ac423ba3a74999ec9d8bce7ae37977a3d38680f5780b6d",
- "sha256:9c4c83f4fa1938377da32bc2d59379025ceeee8e24b89f72fcbccd8ca22dc9bf",
- "sha256:9cddaff94c0135ee627213ac6ca6d05724bfe6e7a356e5e09ec57bd3249510f6",
- "sha256:a25237abf327530d9561ef751eef9511ab56fd9431023ca6f4803f1994104d72",
- "sha256:a5cbd7157b0e383738b8e29d6e556fde8726823dae0e348952a61742b21aeb12",
- "sha256:a97a516e02b726e089cffcde2eea0d3258450389bbac48cbe89e0f0b6e7b0366",
- "sha256:acc89b29b5f4e2332d65cd1b7d10c609a75b88ef8925d487a611ca788432dfa4",
- "sha256:b05bd85cc99b06740aad3629c2585bda7b83bd86e080b44ba47faf905fdf1300",
- "sha256:c2bec436a2b5dafe5eaeb297c03711074d46b6eb236d002c13c42f25c4a8ce9d",
- "sha256:cc619d974c8c11fe84527e4b5e1c07238799a8c29ea1c1285149170524ba9303",
- "sha256:d4392defd4648badaa42b3e101080ae3313e8f4787cb517efd3f5b8157eaefd6",
- "sha256:e1c3c582ee11af7f63a34a46f0448fca58e59889396ffdae1f482085061a2889"
+ "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e",
+ "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326",
+ "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a",
+ "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654",
+ "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a",
+ "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4",
+ "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17",
+ "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec",
+ "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd",
+ "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48",
+ "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59",
+ "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965"
],
"index": "pypi",
- "version": "==3.5.4"
+ "version": "==3.6.2"
},
"aiormq": {
"hashes": [
- "sha256:afc0d46837b121585e4faec0a7646706429b4e2f5110ae8d0b5cdc3708b4b0e5",
- "sha256:dc0fbbc7f8ad5af6a2cc18e00ccc5f925984cde3db6e8fe952c07b7ef157b5f2"
+ "sha256:286e0b0772075580466e45f98f051b9728a9316b9c36f0c14c7bc1409be375b0",
+ "sha256:7ed7d6df6b57af7f8bce7d1ebcbdfc32b676192e46703e81e9e217316e56b5bd"
],
- "version": "==2.9.1"
+ "version": "==3.2.1"
},
"alabaster": {
"hashes": [
@@ -90,63 +80,58 @@
},
"babel": {
"hashes": [
- "sha256:af92e6106cb7c55286b25b38ad7695f8b4efb36a90ba483d7f7a6628c46158ab",
- "sha256:e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28"
+ "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38",
+ "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"
],
- "version": "==2.7.0"
+ "version": "==2.8.0"
},
"beautifulsoup4": {
"hashes": [
- "sha256:5279c36b4b2ec2cb4298d723791467e3000e5384a43ea0cdf5d45207c7e97169",
- "sha256:6135db2ba678168c07950f9a16c4031822c6f4aec75a65e0a97bc5ca09789931",
- "sha256:dcdef580e18a76d54002088602eba453eec38ebbcafafeaabd8cab12b6155d57"
+ "sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a",
+ "sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887",
+ "sha256:e1505eeed31b0f4ce2dbb3bc8eb256c04cc2b3b72af7d551a4ab6efd5cbe5dae"
],
- "version": "==4.8.1"
+ "version": "==4.8.2"
},
"certifi": {
"hashes": [
- "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
- "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
+ "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
+ "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
],
- "version": "==2019.9.11"
+ "version": "==2019.11.28"
},
"cffi": {
"hashes": [
- "sha256:0b49274afc941c626b605fb59b59c3485c17dc776dc3cc7cc14aca74cc19cc42",
- "sha256:0e3ea92942cb1168e38c05c1d56b0527ce31f1a370f6117f1d490b8dcd6b3a04",
- "sha256:135f69aecbf4517d5b3d6429207b2dff49c876be724ac0c8bf8e1ea99df3d7e5",
- "sha256:19db0cdd6e516f13329cba4903368bff9bb5a9331d3410b1b448daaadc495e54",
- "sha256:2781e9ad0e9d47173c0093321bb5435a9dfae0ed6a762aabafa13108f5f7b2ba",
- "sha256:291f7c42e21d72144bb1c1b2e825ec60f46d0a7468f5346841860454c7aa8f57",
- "sha256:2c5e309ec482556397cb21ede0350c5e82f0eb2621de04b2633588d118da4396",
- "sha256:2e9c80a8c3344a92cb04661115898a9129c074f7ab82011ef4b612f645939f12",
- "sha256:32a262e2b90ffcfdd97c7a5e24a6012a43c61f1f5a57789ad80af1d26c6acd97",
- "sha256:3c9fff570f13480b201e9ab69453108f6d98244a7f495e91b6c654a47486ba43",
- "sha256:415bdc7ca8c1c634a6d7163d43fb0ea885a07e9618a64bda407e04b04333b7db",
- "sha256:42194f54c11abc8583417a7cf4eaff544ce0de8187abaf5d29029c91b1725ad3",
- "sha256:4424e42199e86b21fc4db83bd76909a6fc2a2aefb352cb5414833c030f6ed71b",
- "sha256:4a43c91840bda5f55249413037b7a9b79c90b1184ed504883b72c4df70778579",
- "sha256:599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346",
- "sha256:5c4fae4e9cdd18c82ba3a134be256e98dc0596af1e7285a3d2602c97dcfa5159",
- "sha256:5ecfa867dea6fabe2a58f03ac9186ea64da1386af2159196da51c4904e11d652",
- "sha256:62f2578358d3a92e4ab2d830cd1c2049c9c0d0e6d3c58322993cc341bdeac22e",
- "sha256:6471a82d5abea994e38d2c2abc77164b4f7fbaaf80261cb98394d5793f11b12a",
- "sha256:6d4f18483d040e18546108eb13b1dfa1000a089bcf8529e30346116ea6240506",
- "sha256:71a608532ab3bd26223c8d841dde43f3516aa5d2bf37b50ac410bb5e99053e8f",
- "sha256:74a1d8c85fb6ff0b30fbfa8ad0ac23cd601a138f7509dc617ebc65ef305bb98d",
- "sha256:7b93a885bb13073afb0aa73ad82059a4c41f4b7d8eb8368980448b52d4c7dc2c",
- "sha256:7d4751da932caaec419d514eaa4215eaf14b612cff66398dd51129ac22680b20",
- "sha256:7f627141a26b551bdebbc4855c1157feeef18241b4b8366ed22a5c7d672ef858",
- "sha256:8169cf44dd8f9071b2b9248c35fc35e8677451c52f795daa2bb4643f32a540bc",
- "sha256:aa00d66c0fab27373ae44ae26a66a9e43ff2a678bf63a9c7c1a9a4d61172827a",
- "sha256:ccb032fda0873254380aa2bfad2582aedc2959186cce61e3a17abc1a55ff89c3",
- "sha256:d754f39e0d1603b5b24a7f8484b22d2904fa551fe865fd0d4c3332f078d20d4e",
- "sha256:d75c461e20e29afc0aee7172a0950157c704ff0dd51613506bd7d82b718e7410",
- "sha256:dcd65317dd15bc0451f3e01c80da2216a31916bdcffd6221ca1202d96584aa25",
- "sha256:e570d3ab32e2c2861c4ebe6ffcad6a8abf9347432a37608fe1fbd157b3f0036b",
- "sha256:fd43a88e045cf992ed09fa724b5315b790525f2676883a6ea64e3263bae6549d"
- ],
- "version": "==1.13.2"
+ "sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff",
+ "sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b",
+ "sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac",
+ "sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0",
+ "sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384",
+ "sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26",
+ "sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6",
+ "sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b",
+ "sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e",
+ "sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd",
+ "sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2",
+ "sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66",
+ "sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc",
+ "sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8",
+ "sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55",
+ "sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4",
+ "sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5",
+ "sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d",
+ "sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78",
+ "sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa",
+ "sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793",
+ "sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f",
+ "sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a",
+ "sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f",
+ "sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30",
+ "sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f",
+ "sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3",
+ "sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c"
+ ],
+ "version": "==1.14.0"
},
"chardet": {
"hashes": [
@@ -157,101 +142,87 @@
},
"deepdiff": {
"hashes": [
- "sha256:3457ea7cecd51ba48015d89edbb569358af4d9b9e65e28bdb3209608420627f9",
- "sha256:5e2343398e90538edaa59c0c99207e996a3a834fdc878c666376f632a760c35a"
+ "sha256:b3fa588d1eac7fa318ec1fb4f2004568e04cb120a1989feda8e5e7164bcbf07a",
+ "sha256:ed7342d3ed3c0c2058a3fb05b477c943c9959ef62223dca9baa3375718a25d87"
],
"index": "pypi",
- "version": "==4.0.9"
+ "version": "==4.2.0"
},
"discord-py": {
"hashes": [
- "sha256:7c843b523bb011062b453864e75c7b675a03faf573c58d14c9f096e85984329d"
+ "sha256:8bfe5628d31771744000f19135c386c74ac337479d7282c26cc1627b9d31f360"
],
"index": "pypi",
- "version": "==1.2.5"
+ "version": "==1.3.1"
},
"docutils": {
"hashes": [
- "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0",
- "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827",
- "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"
+ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
+ "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
],
- "version": "==0.15.2"
+ "version": "==0.16"
},
"fuzzywuzzy": {
"hashes": [
- "sha256:5ac7c0b3f4658d2743aa17da53a55598144edbc5bee3c6863840636e6926f254",
- "sha256:6f49de47db00e1c71d40ad16da42284ac357936fa9b66bea1df63fed07122d62"
+ "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8",
+ "sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993"
],
"index": "pypi",
- "version": "==0.17.0"
+ "version": "==0.18.0"
},
"idna": {
"hashes": [
- "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
- "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
+ "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
],
- "version": "==2.8"
+ "version": "==2.9"
},
"imagesize": {
"hashes": [
- "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8",
- "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5"
+ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
+ "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
],
- "version": "==1.1.0"
+ "version": "==1.2.0"
},
"jinja2": {
"hashes": [
- "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f",
- "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"
+ "sha256:93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250",
+ "sha256:b0eaf100007721b5c16c1fc1eecb87409464edc10469ddc9a22a27a99123be49"
],
- "version": "==2.10.3"
- },
- "jsonpickle": {
- "hashes": [
- "sha256:d0c5a4e6cb4e58f6d5406bdded44365c2bcf9c836c4f52910cc9ba7245a59dc2",
- "sha256:d3e922d781b1d0096df2dad89a2e1f47177d7969b596aea806a9d91b4626b29b"
- ],
- "version": "==1.2"
- },
- "logmatic-python": {
- "hashes": [
- "sha256:0c15ac9f5faa6a60059b28910db642c3dc7722948c3cc940923f8c9039604342"
- ],
- "index": "pypi",
- "version": "==0.1.7"
+ "version": "==2.11.1"
},
"lxml": {
"hashes": [
- "sha256:02ca7bf899da57084041bb0f6095333e4d239948ad3169443f454add9f4e9cb4",
- "sha256:096b82c5e0ea27ce9138bcbb205313343ee66a6e132f25c5ed67e2c8d960a1bc",
- "sha256:0a920ff98cf1aac310470c644bc23b326402d3ef667ddafecb024e1713d485f1",
- "sha256:1409b14bf83a7d729f92e2a7fbfe7ec929d4883ca071b06e95c539ceedb6497c",
- "sha256:17cae1730a782858a6e2758fd20dd0ef7567916c47757b694a06ffafdec20046",
- "sha256:17e3950add54c882e032527795c625929613adbd2ce5162b94667334458b5a36",
- "sha256:1f4f214337f6ee5825bf90a65d04d70aab05526c08191ab888cb5149501923c5",
- "sha256:2e8f77db25b0a96af679e64ff9bf9dddb27d379c9900c3272f3041c4d1327c9d",
- "sha256:4dffd405390a45ecb95ab5ab1c1b847553c18b0ef8ed01e10c1c8b1a76452916",
- "sha256:6b899931a5648862c7b88c795eddff7588fb585e81cecce20f8d9da16eff96e0",
- "sha256:726c17f3e0d7a7200718c9a890ccfeab391c9133e363a577a44717c85c71db27",
- "sha256:760c12276fee05c36f95f8040180abc7fbebb9e5011447a97cdc289b5d6ab6fc",
- "sha256:796685d3969815a633827c818863ee199440696b0961e200b011d79b9394bbe7",
- "sha256:891fe897b49abb7db470c55664b198b1095e4943b9f82b7dcab317a19116cd38",
- "sha256:9277562f175d2334744ad297568677056861070399cec56ff06abbe2564d1232",
- "sha256:a471628e20f03dcdfde00770eeaf9c77811f0c331c8805219ca7b87ac17576c5",
- "sha256:a63b4fd3e2cabdcc9d918ed280bdde3e8e9641e04f3c59a2a3109644a07b9832",
- "sha256:ae88588d687bd476be588010cbbe551e9c2872b816f2da8f01f6f1fda74e1ef0",
- "sha256:b0b84408d4eabc6de9dd1e1e0bc63e7731e890c0b378a62443e5741cfd0ae90a",
- "sha256:be78485e5d5f3684e875dab60f40cddace2f5b2a8f7fede412358ab3214c3a6f",
- "sha256:c27eaed872185f047bb7f7da2d21a7d8913457678c9a100a50db6da890bc28b9",
- "sha256:c7fccd08b14aa437fe096c71c645c0f9be0655a9b1a4b7cffc77bcb23b3d61d2",
- "sha256:c81cb40bff373ab7a7446d6bbca0190bccc5be3448b47b51d729e37799bb5692",
- "sha256:d11874b3c33ee441059464711cd365b89fa1a9cf19ae75b0c189b01fbf735b84",
- "sha256:e9c028b5897901361d81a4718d1db217b716424a0283afe9d6735fe0caf70f79",
- "sha256:fe489d486cd00b739be826e8c1be188ddb74c7a1ca784d93d06fda882a6a1681"
- ],
- "index": "pypi",
- "version": "==4.4.1"
+ "sha256:06d4e0bbb1d62e38ae6118406d7cdb4693a3fa34ee3762238bcb96c9e36a93cd",
+ "sha256:0701f7965903a1c3f6f09328c1278ac0eee8f56f244e66af79cb224b7ef3801c",
+ "sha256:1f2c4ec372bf1c4a2c7e4bb20845e8bcf8050365189d86806bad1e3ae473d081",
+ "sha256:4235bc124fdcf611d02047d7034164897ade13046bda967768836629bc62784f",
+ "sha256:5828c7f3e615f3975d48f40d4fe66e8a7b25f16b5e5705ffe1d22e43fb1f6261",
+ "sha256:585c0869f75577ac7a8ff38d08f7aac9033da2c41c11352ebf86a04652758b7a",
+ "sha256:5d467ce9c5d35b3bcc7172c06320dddb275fea6ac2037f72f0a4d7472035cea9",
+ "sha256:63dbc21efd7e822c11d5ddbedbbb08cd11a41e0032e382a0fd59b0b08e405a3a",
+ "sha256:7bc1b221e7867f2e7ff1933165c0cec7153dce93d0cdba6554b42a8beb687bdb",
+ "sha256:8620ce80f50d023d414183bf90cc2576c2837b88e00bea3f33ad2630133bbb60",
+ "sha256:8a0ebda56ebca1a83eb2d1ac266649b80af8dd4b4a3502b2c1e09ac2f88fe128",
+ "sha256:90ed0e36455a81b25b7034038e40880189169c308a3df360861ad74da7b68c1a",
+ "sha256:95e67224815ef86924fbc2b71a9dbd1f7262384bca4bc4793645794ac4200717",
+ "sha256:afdb34b715daf814d1abea0317b6d672476b498472f1e5aacbadc34ebbc26e89",
+ "sha256:b4b2c63cc7963aedd08a5f5a454c9f67251b1ac9e22fd9d72836206c42dc2a72",
+ "sha256:d068f55bda3c2c3fcaec24bd083d9e2eede32c583faf084d6e4b9daaea77dde8",
+ "sha256:d5b3c4b7edd2e770375a01139be11307f04341ec709cf724e0f26ebb1eef12c3",
+ "sha256:deadf4df349d1dcd7b2853a2c8796593cc346600726eff680ed8ed11812382a7",
+ "sha256:df533af6f88080419c5a604d0d63b2c33b1c0c4409aba7d0cb6de305147ea8c8",
+ "sha256:e4aa948eb15018a657702fee0b9db47e908491c64d36b4a90f59a64741516e77",
+ "sha256:e5d842c73e4ef6ed8c1bd77806bf84a7cb535f9c0cf9b2c74d02ebda310070e1",
+ "sha256:ebec08091a22c2be870890913bdadd86fcd8e9f0f22bcb398abd3af914690c15",
+ "sha256:edc15fcfd77395e24543be48871c251f38132bb834d9fdfdad756adb6ea37679",
+ "sha256:f2b74784ed7e0bc2d02bd53e48ad6ba523c9b36c194260b7a5045071abbb1012",
+ "sha256:fa071559f14bd1e92077b1b5f6c22cf09756c6de7139370249eb372854ce51e6",
+ "sha256:fd52e796fee7171c4361d441796b64df1acfceb51f29e545e812f16d023c4bbc",
+ "sha256:fe976a0f1ef09b3638778024ab9fb8cde3118f203364212c198f71341c0715ca"
+ ],
+ "index": "pypi",
+ "version": "==4.5.0"
},
"markdownify": {
"hashes": [
@@ -266,13 +237,16 @@
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
+ "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
+ "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
+ "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
@@ -289,7 +263,9 @@
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
- "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
+ "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
+ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
+ "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
],
"version": "==1.1.1"
},
@@ -303,37 +279,25 @@
},
"multidict": {
"hashes": [
- "sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f",
- "sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3",
- "sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef",
- "sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b",
- "sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73",
- "sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc",
- "sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3",
- "sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd",
- "sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351",
- "sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941",
- "sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d",
- "sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1",
- "sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b",
- "sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a",
- "sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3",
- "sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7",
- "sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0",
- "sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0",
- "sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014",
- "sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5",
- "sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036",
- "sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d",
- "sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a",
- "sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce",
- "sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1",
- "sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a",
- "sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9",
- "sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7",
- "sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b"
- ],
- "version": "==4.5.2"
+ "sha256:13f3ebdb5693944f52faa7b2065b751cb7e578b8dd0a5bb8e4ab05ad0188b85e",
+ "sha256:26502cefa86d79b86752e96639352c7247846515c864d7c2eb85d036752b643c",
+ "sha256:4fba5204d32d5c52439f88437d33ad14b5f228e25072a192453f658bddfe45a7",
+ "sha256:527124ef435f39a37b279653ad0238ff606b58328ca7989a6df372fd75d7fe26",
+ "sha256:5414f388ffd78c57e77bd253cf829373721f450613de53dc85a08e34d806e8eb",
+ "sha256:5eee66f882ab35674944dfa0d28b57fa51e160b4dce0ce19e47f495fdae70703",
+ "sha256:63810343ea07f5cd86ba66ab66706243a6f5af075eea50c01e39b4ad6bc3c57a",
+ "sha256:6bd10adf9f0d6a98ccc792ab6f83d18674775986ba9bacd376b643fe35633357",
+ "sha256:83c6ddf0add57c6b8a7de0bc7e2d656be3eefeff7c922af9a9aae7e49f225625",
+ "sha256:93166e0f5379cf6cd29746989f8a594fa7204dcae2e9335ddba39c870a287e1c",
+ "sha256:9a7b115ee0b9b92d10ebc246811d8f55d0c57e82dbb6a26b23c9a9a6ad40ce0c",
+ "sha256:a38baa3046cce174a07a59952c9f876ae8875ef3559709639c17fdf21f7b30dd",
+ "sha256:a6d219f49821f4b2c85c6d426346a5d84dab6daa6f85ca3da6c00ed05b54022d",
+ "sha256:a8ed33e8f9b67e3b592c56567135bb42e7e0e97417a4b6a771e60898dfd5182b",
+ "sha256:d7d428488c67b09b26928950a395e41cc72bb9c3d5abfe9f0521940ee4f796d4",
+ "sha256:dcfed56aa085b89d644af17442cdc2debaa73388feba4b8026446d168ca8dad7",
+ "sha256:f29b885e4903bd57a7789f09fe9d60b6475a6c1a4c0eca874d8558f00f9d4b51"
+ ],
+ "version": "==4.7.4"
},
"ordered-set": {
"hashes": [
@@ -343,10 +307,10 @@
},
"packaging": {
"hashes": [
- "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
- "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
+ "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73",
+ "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334"
],
- "version": "==19.2"
+ "version": "==20.1"
},
"pamqp": {
"hashes": [
@@ -357,21 +321,37 @@
},
"pycares": {
"hashes": [
- "sha256:2ca080db265ea238dc45f997f94effb62b979a617569889e265c26a839ed6305",
- "sha256:6f79c6afb6ce603009db2042fddc2e348ad093ece9784cbe2daa809499871a23",
- "sha256:70918d06eb0603016d37092a5f2c0228509eb4e6c5a3faacb4184f6ab7be7650",
- "sha256:755187d28d24a9ea63aa2b4c0638be31d65fbf7f0ce16d41261b9f8cb55a1b99",
- "sha256:7baa4b1f2146eb8423ff8303ebde3a20fb444a60db761fba0430d104fe35ddbf",
- "sha256:90b27d4df86395f465a171386bc341098d6d47b65944df46518814ae298f6cc6",
- "sha256:9e090dd6b2afa65cb51c133883b2bf2240fd0f717b130b0048714b33fb0f47ce",
- "sha256:a11b7d63c3718775f6e805d6464cb10943780395ab042c7e5a0a7a9f612735dd",
- "sha256:b253f5dcaa0ac7076b79388a3ac80dd8f3bd979108f813baade40d3a9b8bf0bd",
- "sha256:c7f4f65e44ba35e35ad3febc844270665bba21cfb0fb7d749434e705b556e087",
- "sha256:cdb342e6a254f035bd976d95807a2184038fc088d957a5104dcaab8be602c093",
- "sha256:cf08e164f8bfb83b9fe633feb56f2754fae6baefcea663593794fa0518f8f98c",
- "sha256:df9bc694cf03673878ea8ce674082c5acd134991d64d6c306d4bd61c0c1df98f"
+ "sha256:050f00b39ed77ea8a4e555f09417d4b1a6b5baa24bb9531a3e15d003d2319b3f",
+ "sha256:0a24d2e580a8eb567140d7b69f12cb7de90c836bd7b6488ec69394d308605ac3",
+ "sha256:0c5bd1f6f885a219d5e972788d6eef7b8043b55c3375a845e5399638436e0bba",
+ "sha256:11c628402cc8fc8ef461076d4e47f88afc1f8609989ebbff0dbffcd54c97239f",
+ "sha256:18dfd4fd300f570d6c4536c1d987b7b7673b2a9d14346592c5d6ed716df0d104",
+ "sha256:1917b82494907a4a342db420bc4dd5bac355a5fa3984c35ba9bf51422b020b48",
+ "sha256:1b90fa00a89564df059fb18e796458864cc4e00cb55e364dbf921997266b7c55",
+ "sha256:1d8d177c40567de78108a7835170f570ab04f09084bfd32df9919c0eaec47aa1",
+ "sha256:236286f81664658b32c141c8e79d20afc3d54f6e2e49dfc8b702026be7265855",
+ "sha256:2e4f74677542737fb5af4ea9a2e415ec5ab31aa67e7b8c3c969fdb15c069f679",
+ "sha256:48a7750f04e69e1f304f4332b755728067e7c4b1abe2760bba1cacd9ff7a847a",
+ "sha256:7d86e62b700b21401ffe7fd1bbfe91e08489416fecae99c6570ab023c6896022",
+ "sha256:7e2d7effd08d2e5a3cb95d98a7286ebab71ab2fbce84fa93cc2dd56caf7240dd",
+ "sha256:81edb016d9e43dde7473bc3999c29cdfee3a6b67308fed1ea21049f458e83ae0",
+ "sha256:96c90e11b4a4c7c0b8ff5aaaae969c5035493136586043ff301979aae0623941",
+ "sha256:9a0a1845f8cb2e62332bca0aaa9ad5494603ac43fb60d510a61d5b5b170d7216",
+ "sha256:a05bbfdfd41f8410a905a818f329afe7510cbd9ee65c60f8860a72b6c64ce5dc",
+ "sha256:a5089fd660f0b0d228b14cdaa110d0d311edfa5a63f800618dbf1321dcaef66b",
+ "sha256:c457a709e6f2befea7e2996c991eda6d79705dd075f6521593ba6ebc1485b811",
+ "sha256:c5cb72644b04e5e5abfb1e10a0e7eb75da6684ea0e60871652f348e412cf3b11",
+ "sha256:cce46dd4717debfd2aab79d6d7f0cbdf6b1e982dc4d9bebad81658d59ede07c2",
+ "sha256:cfdd1f90bcf373b00f4b2c55ea47868616fe2f779f792fc913fa82a3d64ffe43",
+ "sha256:d88a279cbc5af613f73e86e19b3f63850f7a2e2736e249c51995dedcc830b1bb",
+ "sha256:eba9a9227438da5e78fc8eee32f32eb35d9a50cf0a0bd937eb6275c7cc3015fe",
+ "sha256:eee7b6a5f5b5af050cb7d66ab28179287b416f06d15a8974ac831437fec51336",
+ "sha256:f41ac1c858687e53242828c9f59c2e7b0b95dbcd5bdd09c7e5d3c48b0f89a25a",
+ "sha256:f8deaefefc3a589058df1b177275f79233e8b0eeee6734cf4336d80164ecd022",
+ "sha256:fa78e919f3bd7d6d075db262aa41079b4c02da315c6043c6f43881e2ebcdd623",
+ "sha256:fadb97d2e02dabdc15a0091591a972a938850d79ddde23d385d813c1731983f0"
],
- "version": "==3.0.0"
+ "version": "==3.1.1"
},
"pycparser": {
"hashes": [
@@ -381,17 +361,17 @@
},
"pygments": {
"hashes": [
- "sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127",
- "sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
+ "sha256:2a3fe295e54a20164a9df49c75fa58526d3be48e14aceba6d6b1e8ac0bfd6f1b",
+ "sha256:98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe"
],
- "version": "==2.4.2"
+ "version": "==2.5.2"
},
"pyparsing": {
"hashes": [
- "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f",
- "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a"
+ "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
+ "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
],
- "version": "==2.4.5"
+ "version": "==2.4.6"
},
"python-dateutil": {
"hashes": [
@@ -401,12 +381,6 @@
"index": "pypi",
"version": "==2.8.1"
},
- "python-json-logger": {
- "hashes": [
- "sha256:b7a31162f2a01965a5efb94453ce69230ed208468b0bbc7fdfc56e6d8df2e281"
- ],
- "version": "==0.1.11"
- },
"pytz": {
"hashes": [
"sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d",
@@ -416,37 +390,43 @@
},
"pyyaml": {
"hashes": [
- "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9",
- "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4",
- "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8",
- "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696",
- "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34",
- "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9",
- "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73",
- "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299",
- "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b",
- "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae",
- "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681",
- "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41",
- "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8"
+ "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6",
+ "sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf",
+ "sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5",
+ "sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e",
+ "sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811",
+ "sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e",
+ "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d",
+ "sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20",
+ "sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689",
+ "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994",
+ "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615"
],
"index": "pypi",
- "version": "==5.1.2"
+ "version": "==5.3"
},
"requests": {
"hashes": [
- "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
- "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
+ "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
],
"index": "pypi",
- "version": "==2.22.0"
+ "version": "==2.23.0"
+ },
+ "sentry-sdk": {
+ "hashes": [
+ "sha256:b06dd27391fd11fb32f84fe054e6a64736c469514a718a99fb5ce1dff95d6b28",
+ "sha256:e023da07cfbead3868e1e2ba994160517885a32dfd994fc455b118e37989479b"
+ ],
+ "index": "pypi",
+ "version": "==0.14.1"
},
"six": {
"hashes": [
- "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd",
- "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
+ "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
+ "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
],
- "version": "==1.13.0"
+ "version": "==1.14.0"
},
"snowballstemmer": {
"hashes": [
@@ -464,11 +444,11 @@
},
"sphinx": {
"hashes": [
- "sha256:31088dfb95359384b1005619827eaee3056243798c62724fd3fa4b84ee4d71bd",
- "sha256:52286a0b9d7caa31efee301ec4300dbdab23c3b05da1c9024b4e84896fb73d79"
+ "sha256:525527074f2e0c2585f68f73c99b4dc257c34bbe308b27f5f8c7a6e20642742f",
+ "sha256:543d39db5f82d83a5c1aa0c10c88f2b6cff2da3e711aa849b2c627b4b403bbd9"
],
"index": "pypi",
- "version": "==2.2.1"
+ "version": "==2.4.2"
},
"sphinxcontrib-applehelp": {
"hashes": [
@@ -522,48 +502,62 @@
},
"websockets": {
"hashes": [
- "sha256:0e2f7d6567838369af074f0ef4d0b802d19fa1fee135d864acc656ceefa33136",
- "sha256:2a16dac282b2fdae75178d0ed3d5b9bc3258dabfae50196cbb30578d84b6f6a6",
- "sha256:5a1fa6072405648cb5b3688e9ed3b94be683ce4a4e5723e6f5d34859dee495c1",
- "sha256:5c1f55a1274df9d6a37553fef8cff2958515438c58920897675c9bc70f5a0538",
- "sha256:669d1e46f165e0ad152ed8197f7edead22854a6c90419f544e0f234cc9dac6c4",
- "sha256:695e34c4dbea18d09ab2c258994a8bf6a09564e762655408241f6a14592d2908",
- "sha256:6b2e03d69afa8d20253455e67b64de1a82ff8612db105113cccec35d3f8429f0",
- "sha256:79ca7cdda7ad4e3663ea3c43bfa8637fc5d5604c7737f19a8964781abbd1148d",
- "sha256:7fd2dd9a856f72e6ed06f82facfce01d119b88457cd4b47b7ae501e8e11eba9c",
- "sha256:82c0354ac39379d836719a77ee360ef865377aa6fdead87909d50248d0f05f4d",
- "sha256:8f3b956d11c5b301206382726210dc1d3bee1a9ccf7aadf895aaf31f71c3716c",
- "sha256:91ec98640220ae05b34b79ee88abf27f97ef7c61cf525eec57ea8fcea9f7dddb",
- "sha256:952be9540d83dba815569d5cb5f31708801e0bbfc3a8c5aef1890b57ed7e58bf",
- "sha256:99ac266af38ba1b1fe13975aea01ac0e14bb5f3a3200d2c69f05385768b8568e",
- "sha256:9fa122e7adb24232247f8a89f2d9070bf64b7869daf93ac5e19546b409e47e96",
- "sha256:a0873eadc4b8ca93e2e848d490809e0123eea154aa44ecd0109c4d0171869584",
- "sha256:cb998bd4d93af46b8b49ecf5a72c0a98e5cc6d57fdca6527ba78ad89d6606484",
- "sha256:e02e57346f6a68523e3c43bbdf35dde5c440318d1f827208ae455f6a2ace446d",
- "sha256:e79a5a896bcee7fff24a788d72e5c69f13e61369d055f28113e71945a7eb1559",
- "sha256:ee55eb6bcf23ecc975e6b47c127c201b913598f38b6a300075f84eeef2d3baff",
- "sha256:f1414e6cbcea8d22843e7eafdfdfae3dd1aba41d1945f6ca66e4806c07c4f454"
- ],
- "version": "==6.0"
+ "sha256:0e4fb4de42701340bd2353bb2eee45314651caa6ccee80dbd5f5d5978888fed5",
+ "sha256:1d3f1bf059d04a4e0eb4985a887d49195e15ebabc42364f4eb564b1d065793f5",
+ "sha256:20891f0dddade307ffddf593c733a3fdb6b83e6f9eef85908113e628fa5a8308",
+ "sha256:295359a2cc78736737dd88c343cd0747546b2174b5e1adc223824bcaf3e164cb",
+ "sha256:2db62a9142e88535038a6bcfea70ef9447696ea77891aebb730a333a51ed559a",
+ "sha256:3762791ab8b38948f0c4d281c8b2ddfa99b7e510e46bd8dfa942a5fff621068c",
+ "sha256:3db87421956f1b0779a7564915875ba774295cc86e81bc671631379371af1170",
+ "sha256:3ef56fcc7b1ff90de46ccd5a687bbd13a3180132268c4254fc0fa44ecf4fc422",
+ "sha256:4f9f7d28ce1d8f1295717c2c25b732c2bc0645db3215cf757551c392177d7cb8",
+ "sha256:5c01fd846263a75bc8a2b9542606927cfad57e7282965d96b93c387622487485",
+ "sha256:5c65d2da8c6bce0fca2528f69f44b2f977e06954c8512a952222cea50dad430f",
+ "sha256:751a556205d8245ff94aeef23546a1113b1dd4f6e4d102ded66c39b99c2ce6c8",
+ "sha256:7ff46d441db78241f4c6c27b3868c9ae71473fe03341340d2dfdbe8d79310acc",
+ "sha256:965889d9f0e2a75edd81a07592d0ced54daa5b0785f57dc429c378edbcffe779",
+ "sha256:9b248ba3dd8a03b1a10b19efe7d4f7fa41d158fdaa95e2cf65af5a7b95a4f989",
+ "sha256:9bef37ee224e104a413f0780e29adb3e514a5b698aabe0d969a6ba426b8435d1",
+ "sha256:c1ec8db4fac31850286b7cd3b9c0e1b944204668b8eb721674916d4e28744092",
+ "sha256:c8a116feafdb1f84607cb3b14aa1418424ae71fee131642fc568d21423b51824",
+ "sha256:ce85b06a10fc65e6143518b96d3dca27b081a740bae261c2fb20375801a9d56d",
+ "sha256:d705f8aeecdf3262379644e4b55107a3b55860eb812b673b28d0fbc347a60c55",
+ "sha256:e898a0863421650f0bebac8ba40840fc02258ef4714cb7e1fd76b6a6354bda36",
+ "sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b"
+ ],
+ "version": "==8.1"
},
"yarl": {
"hashes": [
- "sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9",
- "sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f",
- "sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb",
- "sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320",
- "sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842",
- "sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0",
- "sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829",
- "sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310",
- "sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4",
- "sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8",
- "sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1"
- ],
- "version": "==1.3.0"
+ "sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce",
+ "sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6",
+ "sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce",
+ "sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae",
+ "sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d",
+ "sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f",
+ "sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b",
+ "sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b",
+ "sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb",
+ "sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462",
+ "sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea",
+ "sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70",
+ "sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1",
+ "sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a",
+ "sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b",
+ "sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080",
+ "sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2"
+ ],
+ "version": "==1.4.2"
}
},
"develop": {
+ "appdirs": {
+ "hashes": [
+ "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92",
+ "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"
+ ],
+ "version": "==1.4.3"
+ },
"aspy.yaml": {
"hashes": [
"sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc",
@@ -580,17 +574,17 @@
},
"certifi": {
"hashes": [
- "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
- "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
+ "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
+ "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
],
- "version": "==2019.9.11"
+ "version": "==2019.11.28"
},
"cfgv": {
"hashes": [
- "sha256:edb387943b665bf9c434f717bf630fa78aecd53d5900d2e05da6ad6048553144",
- "sha256:fbd93c9ab0a523bf7daec408f3be2ed99a980e20b2d19b50fc184ca6b820d289"
+ "sha256:04b093b14ddf9fd4d17c53ebfd55582d27b76ed30050193c14e560770c5360eb",
+ "sha256:f22b426ed59cd2ab2b54ff96608d846c33dfb8766a67f0b4a6ce130ce244414f"
],
- "version": "==2.0.1"
+ "version": "==3.0.0"
},
"chardet": {
"hashes": [
@@ -644,12 +638,19 @@
"index": "pypi",
"version": "==4.5.4"
},
+ "distlib": {
+ "hashes": [
+ "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21"
+ ],
+ "version": "==0.3.0"
+ },
"dodgy": {
"hashes": [
- "sha256:65e13cf878d7aff129f1461c13cb5fd1bb6dfe66bb5327e09379c3877763280c"
+ "sha256:28323cbfc9352139fdd3d316fa17f325cc0e9ac74438cbba51d70f9b48f86c3a",
+ "sha256:51f54c0fd886fa3854387f354b19f429d38c04f984f38bc572558b703c0542a6"
],
"index": "pypi",
- "version": "==0.1.9"
+ "version": "==0.2.1"
},
"dparse": {
"hashes": [
@@ -665,6 +666,13 @@
],
"version": "==0.3"
},
+ "filelock": {
+ "hashes": [
+ "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59",
+ "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"
+ ],
+ "version": "==3.0.12"
+ },
"flake8": {
"hashes": [
"sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb",
@@ -675,11 +683,11 @@
},
"flake8-annotations": {
"hashes": [
- "sha256:6ac7ca1e706307686b60af8043ff1db31dc2cfc1233c8210d67a3d9b8f364736",
- "sha256:b51131007000d67217608fa028a35ff80aa400b474e5972f1f99c2cf9d26bd2e"
+ "sha256:19a6637a5da1bb7ea7948483ca9e2b9e15b213e687e7bf5ff8c1bfc91c185006",
+ "sha256:bb033b72cdd3a2b0a530bbdf2081f12fbea7d70baeaaebb5899723a45f424b8e"
],
"index": "pypi",
- "version": "==1.1.0"
+ "version": "==2.0.0"
},
"flake8-bugbear": {
"hashes": [
@@ -707,11 +715,11 @@
},
"flake8-string-format": {
"hashes": [
- "sha256:68ea72a1a5b75e7018cae44d14f32473c798cf73d75cbaed86c6a9a907b770b2",
- "sha256:774d56103d9242ed968897455ef49b7d6de272000cfa83de5814273a868832f1"
+ "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2",
+ "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"
],
"index": "pypi",
- "version": "==0.2.3"
+ "version": "==0.3.0"
},
"flake8-tidy-imports": {
"hashes": [
@@ -730,25 +738,25 @@
},
"identify": {
"hashes": [
- "sha256:4f1fe9a59df4e80fcb0213086fcf502bc1765a01ea4fe8be48da3b65afd2a017",
- "sha256:d8919589bd2a5f99c66302fec0ef9027b12ae150b0b0213999ad3f695fc7296e"
+ "sha256:1222b648251bdcb8deb240b294f450fbf704c7984e08baa92507e4ea10b436d5",
+ "sha256:d824ebe21f38325c771c41b08a95a761db1982f1fc0eee37c6c97df3f1636b96"
],
- "version": "==1.4.7"
+ "version": "==1.4.11"
},
"idna": {
"hashes": [
- "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
- "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
+ "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
],
- "version": "==2.8"
+ "version": "==2.9"
},
"importlib-metadata": {
"hashes": [
- "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26",
- "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"
+ "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302",
+ "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b"
],
"markers": "python_version < '3.8'",
- "version": "==0.23"
+ "version": "==1.5.0"
},
"mccabe": {
"hashes": [
@@ -757,34 +765,26 @@
],
"version": "==0.6.1"
},
- "more-itertools": {
- "hashes": [
- "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
- "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
- ],
- "index": "pypi",
- "version": "==7.2.0"
- },
"nodeenv": {
"hashes": [
- "sha256:ad8259494cf1c9034539f6cced78a1da4840a4b157e23640bc4a0c0546b0cb7a"
+ "sha256:5b2438f2e42af54ca968dd1b374d14a1194848955187b0e5e4be1f73813a5212"
],
- "version": "==1.3.3"
+ "version": "==1.3.5"
},
"packaging": {
"hashes": [
- "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
- "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
+ "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73",
+ "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334"
],
- "version": "==19.2"
+ "version": "==20.1"
},
"pre-commit": {
"hashes": [
- "sha256:9f152687127ec90642a2cc3e4d9e1e6240c4eb153615cb02aa1ad41d331cbb6e",
- "sha256:c2e4810d2d3102d354947907514a78c5d30424d299dc0fe48f5aa049826e9b50"
+ "sha256:8f48d8637bdae6fa70cc97db9c1dd5aa7c5c8bf71968932a380628c25978b850",
+ "sha256:f92a359477f3252452ae2e8d3029de77aec59415c16ae4189bcfba40b757e029"
],
"index": "pypi",
- "version": "==1.20.0"
+ "version": "==1.21.0"
},
"pycodestyle": {
"hashes": [
@@ -795,10 +795,10 @@
},
"pydocstyle": {
"hashes": [
- "sha256:04c84e034ebb56eb6396c820442b8c4499ac5eb94a3bda88951ac3dc519b6058",
- "sha256:66aff87ffe34b1e49bff2dd03a88ce6843be2f3346b0c9814410d34987fbab59"
+ "sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586",
+ "sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5"
],
- "version": "==4.0.1"
+ "version": "==5.0.2"
},
"pyflakes": {
"hashes": [
@@ -809,37 +809,35 @@
},
"pyparsing": {
"hashes": [
- "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f",
- "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a"
+ "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
+ "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
],
- "version": "==2.4.5"
+ "version": "==2.4.6"
},
"pyyaml": {
"hashes": [
- "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9",
- "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4",
- "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8",
- "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696",
- "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34",
- "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9",
- "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73",
- "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299",
- "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b",
- "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae",
- "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681",
- "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41",
- "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8"
+ "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6",
+ "sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf",
+ "sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5",
+ "sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e",
+ "sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811",
+ "sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e",
+ "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d",
+ "sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20",
+ "sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689",
+ "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994",
+ "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615"
],
"index": "pypi",
- "version": "==5.1.2"
+ "version": "==5.3"
},
"requests": {
"hashes": [
- "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
- "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
+ "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
],
"index": "pypi",
- "version": "==2.22.0"
+ "version": "==2.23.0"
},
"safety": {
"hashes": [
@@ -851,10 +849,10 @@
},
"six": {
"hashes": [
- "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd",
- "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
+ "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
+ "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
],
- "version": "==1.13.0"
+ "version": "==1.14.0"
},
"snowballstemmer": {
"hashes": [
@@ -872,28 +870,30 @@
},
"typed-ast": {
"hashes": [
- "sha256:1170afa46a3799e18b4c977777ce137bb53c7485379d9706af8a59f2ea1aa161",
- "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
- "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
- "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
- "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
- "sha256:48e5b1e71f25cfdef98b013263a88d7145879fbb2d5185f2a0c79fa7ebbeae47",
- "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
- "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
- "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
- "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
- "sha256:7954560051331d003b4e2b3eb822d9dd2e376fa4f6d98fee32f452f52dd6ebb2",
- "sha256:838997f4310012cf2e1ad3803bce2f3402e9ffb71ded61b5ee22617b3a7f6b6e",
- "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
- "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
- "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
- "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
- "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
- "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
- "sha256:fdc1c9bbf79510b76408840e009ed65958feba92a88833cdceecff93ae8fff66",
- "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
- ],
- "version": "==1.4.0"
+ "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355",
+ "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919",
+ "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa",
+ "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652",
+ "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75",
+ "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01",
+ "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d",
+ "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1",
+ "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907",
+ "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c",
+ "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3",
+ "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b",
+ "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614",
+ "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb",
+ "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b",
+ "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41",
+ "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6",
+ "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34",
+ "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe",
+ "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4",
+ "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"
+ ],
+ "markers": "python_version < '3.8'",
+ "version": "==1.4.1"
},
"unittest-xml-reporting": {
"hashes": [
@@ -913,17 +913,17 @@
},
"virtualenv": {
"hashes": [
- "sha256:11cb4608930d5fd3afb545ecf8db83fa50e1f96fc4fca80c94b07d2c83146589",
- "sha256:d257bb3773e48cac60e475a19b608996c73f4d333b3ba2e4e57d5ac6134e0136"
+ "sha256:08f3623597ce73b85d6854fb26608a6f39ee9d055c81178dc6583803797f8994",
+ "sha256:de2cbdd5926c48d7b84e0300dea9e8f276f61d186e8e49223d71d91250fbaebd"
],
- "version": "==16.7.7"
+ "version": "==20.0.4"
},
"zipp": {
"hashes": [
- "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
- "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ "sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
+ "sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
],
- "version": "==0.6.0"
+ "version": "==3.0.0"
}
}
}
diff --git a/README.md b/README.md
index 7a7f1b992..1e7b21271 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# Python Utility Bot
-[![Discord](https://img.shields.io/discord/267624335836053506?color=%237289DA&label=Python%20Discord&logo=discord&logoColor=white)](https://discord.gg/2B963hn)
+[![Discord](https://img.shields.io/static/v1?label=Python%20Discord&logo=discord&message=%3E30k%20members&color=%237289DA&logoColor=white)](https://discord.gg/2B963hn)
[![Build Status](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master)](https://dev.azure.com/python-discord/Python%20Discord/_build/latest?definitionId=1&branchName=master)
[![Tests](https://img.shields.io/azure-devops/tests/python-discord/Python%20Discord/1?compact_message)](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master)
[![Coverage](https://img.shields.io/azure-devops/coverage/python-discord/Python%20Discord/1/master)](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index da3b06201..874364a6f 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -30,7 +30,7 @@ jobs:
- script: python -m flake8
displayName: 'Run linter'
- - script: BOT_API_KEY=foo BOT_TOKEN=bar WOLFRAM_API_KEY=baz coverage run -m xmlrunner
+ - script: BOT_API_KEY=foo BOT_SENTRY_DSN=blah BOT_TOKEN=bar WOLFRAM_API_KEY=baz REDDIT_CLIENT_ID=spam REDDIT_SECRET=ham coverage run -m xmlrunner
displayName: Run tests
- script: coverage report -m && coverage xml -o coverage.xml
diff --git a/bot/__init__.py b/bot/__init__.py
index 4a2df730d..f7a410706 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -4,10 +4,8 @@ import sys
from logging import Logger, StreamHandler, handlers
from pathlib import Path
-from logmatic import JsonFormatter
-
-logging.TRACE = 5
-logging.addLevelName(logging.TRACE, "TRACE")
+TRACE_LEVEL = logging.TRACE = 5
+logging.addLevelName(TRACE_LEVEL, "TRACE")
def monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None:
@@ -19,75 +17,29 @@ def monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None:
logger.trace("Houston, we have an %s", "interesting problem", exc_info=1)
"""
- if self.isEnabledFor(logging.TRACE):
- self._log(logging.TRACE, msg, args, **kwargs)
+ if self.isEnabledFor(TRACE_LEVEL):
+ self._log(TRACE_LEVEL, msg, args, **kwargs)
Logger.trace = monkeypatch_trace
-# Set up logging
-logging_handlers = []
-
-# We can't import this yet, so we have to define it ourselves
-DEBUG_MODE = True if 'local' in os.environ.get("SITE_URL", "local") else False
-
-LOG_DIR = Path("logs")
-LOG_DIR.mkdir(exist_ok=True)
-
-if DEBUG_MODE:
- logging_handlers.append(StreamHandler(stream=sys.stdout))
-
- json_handler = logging.FileHandler(filename=Path(LOG_DIR, "log.json"), mode="w")
- json_handler.formatter = JsonFormatter()
- logging_handlers.append(json_handler)
-else:
-
- logfile = Path(LOG_DIR, "bot.log")
- megabyte = 1048576
-
- filehandler = handlers.RotatingFileHandler(logfile, maxBytes=(megabyte*5), backupCount=7)
- logging_handlers.append(filehandler)
-
- json_handler = logging.StreamHandler(stream=sys.stdout)
- json_handler.formatter = JsonFormatter()
- logging_handlers.append(json_handler)
-
-
-logging.basicConfig(
- format="%(asctime)s Bot: | %(name)33s | %(levelname)8s | %(message)s",
- datefmt="%b %d %H:%M:%S",
- level=logging.TRACE if DEBUG_MODE else logging.INFO,
- handlers=logging_handlers
-)
-
-log = logging.getLogger(__name__)
-
-
-for key, value in logging.Logger.manager.loggerDict.items():
- # Force all existing loggers to the correct level and handlers
- # This happens long before we instantiate our loggers, so
- # those should still have the expected level
-
- if key == "bot":
- continue
-
- if not isinstance(value, logging.Logger):
- # There might be some logging.PlaceHolder objects in there
- continue
+DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local")
- if DEBUG_MODE:
- value.setLevel(logging.DEBUG)
- else:
- value.setLevel(logging.INFO)
+log_format = logging.Formatter("%(asctime)s | %(name)s | %(levelname)s | %(message)s")
- for handler in value.handlers.copy():
- value.removeHandler(handler)
+stream_handler = StreamHandler(stream=sys.stdout)
+stream_handler.setFormatter(log_format)
- for handler in logging_handlers:
- value.addHandler(handler)
+log_file = Path("logs", "bot.log")
+log_file.parent.mkdir(exist_ok=True)
+file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7)
+file_handler.setFormatter(log_format)
+root_log = logging.getLogger()
+root_log.setLevel(TRACE_LEVEL if DEBUG_MODE else logging.INFO)
+root_log.addHandler(stream_handler)
+root_log.addHandler(file_handler)
-# Silence irrelevant loggers
-logging.getLogger("aio_pika").setLevel(logging.ERROR)
-logging.getLogger("discord").setLevel(logging.ERROR)
-logging.getLogger("websockets").setLevel(logging.ERROR)
+logging.getLogger("discord").setLevel(logging.WARNING)
+logging.getLogger("websockets").setLevel(logging.WARNING)
+logging.getLogger(__name__)
diff --git a/bot/__main__.py b/bot/__main__.py
index f352cd60e..d21a1bcbc 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -1,17 +1,23 @@
-import asyncio
import logging
-import socket
import discord
-from aiohttp import AsyncResolver, ClientSession, TCPConnector
-from discord.ext.commands import Bot, when_mentioned_or
+import sentry_sdk
+from discord.ext.commands import when_mentioned_or
+from sentry_sdk.integrations.logging import LoggingIntegration
from bot import patches
-from bot.api import APIClient, APILoggingHandler
+from bot.bot import Bot
from bot.constants import Bot as BotConfig, DEBUG_MODE
+sentry_logging = LoggingIntegration(
+ level=logging.DEBUG,
+ event_level=logging.WARNING
+)
-log = logging.getLogger('bot')
+sentry_sdk.init(
+ dsn=BotConfig.sentry_dsn,
+ integrations=[sentry_logging]
+)
bot = Bot(
command_prefix=when_mentioned_or(BotConfig.prefix),
@@ -20,23 +26,12 @@ bot = Bot(
max_messages=10_000,
)
-# Global aiohttp session for all cogs
-# - Uses asyncio for DNS resolution instead of threads, so we don't spam threads
-# - Uses AF_INET as its socket family to prevent https related problems both locally and in prod.
-bot.http_session = ClientSession(
- connector=TCPConnector(
- resolver=AsyncResolver(),
- family=socket.AF_INET,
- )
-)
-bot.api_client = APIClient(loop=asyncio.get_event_loop())
-log.addHandler(APILoggingHandler(bot.api_client))
-
# Internal/debug
bot.load_extension("bot.cogs.error_handler")
bot.load_extension("bot.cogs.filtering")
bot.load_extension("bot.cogs.logging")
bot.load_extension("bot.cogs.security")
+bot.load_extension("bot.cogs.config_verifier")
# Commands, etc
bot.load_extension("bot.cogs.antimalware")
@@ -55,6 +50,7 @@ if not DEBUG_MODE:
bot.load_extension("bot.cogs.alias")
bot.load_extension("bot.cogs.defcon")
bot.load_extension("bot.cogs.eval")
+bot.load_extension("bot.cogs.duck_pond")
bot.load_extension("bot.cogs.free")
bot.load_extension("bot.cogs.information")
bot.load_extension("bot.cogs.jams")
@@ -76,6 +72,3 @@ if not hasattr(discord.message.Message, '_handle_edited_timestamp'):
patches.message_edited_at.apply_patch()
bot.run(BotConfig.token)
-
-# This calls a coroutine, so it doesn't do anything at the moment.
-# bot.http_session.close() # Close the aiohttp session when the bot finishes running
diff --git a/bot/api.py b/bot/api.py
index 7f26e5305..e59916114 100644
--- a/bot/api.py
+++ b/bot/api.py
@@ -32,7 +32,12 @@ class ResponseCodeError(ValueError):
class APIClient:
"""Django Site API wrapper."""
- def __init__(self, **kwargs):
+ # These are class attributes so they can be seen when being mocked for tests.
+ # See commit 22a55534ef13990815a6f69d361e2a12693075d5 for details.
+ session: Optional[aiohttp.ClientSession] = None
+ loop: asyncio.AbstractEventLoop = None
+
+ def __init__(self, loop: asyncio.AbstractEventLoop, **kwargs):
auth_headers = {
'Authorization': f"Token {Keys.site_api}"
}
@@ -42,12 +47,39 @@ class APIClient:
else:
kwargs['headers'] = auth_headers
- self.session = aiohttp.ClientSession(**kwargs)
+ self.session = None
+ self.loop = loop
+
+ self._ready = asyncio.Event(loop=loop)
+ self._creation_task = None
+ self._session_args = kwargs
+
+ self.recreate()
@staticmethod
def _url_for(endpoint: str) -> str:
return f"{URLs.site_schema}{URLs.site_api}/{quote_url(endpoint)}"
+ async def _create_session(self) -> None:
+ """Create the aiohttp session and set the ready event."""
+ self.session = aiohttp.ClientSession(**self._session_args)
+ self._ready.set()
+
+ async def close(self) -> None:
+ """Close the aiohttp session and unset the ready event."""
+ if not self._ready.is_set():
+ return
+
+ await self.session.close()
+ self._ready.clear()
+
+ def recreate(self) -> None:
+ """Schedule the aiohttp session to be created if it's been closed."""
+ if self.session is None or self.session.closed:
+ # Don't schedule a task if one is already in progress.
+ if self._creation_task is None or self._creation_task.done():
+ self._creation_task = self.loop.create_task(self._create_session())
+
async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None:
"""Raise ResponseCodeError for non-OK response if an exception should be raised."""
if should_raise and response.status >= 400:
@@ -58,33 +90,35 @@ class APIClient:
response_text = await response.text()
raise ResponseCodeError(response=response, response_text=response_text)
- async def get(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict:
- """Site API GET."""
- async with self.session.get(self._url_for(endpoint), *args, **kwargs) as resp:
+ async def request(self, method: str, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
+ """Send an HTTP request to the site API and return the JSON response."""
+ await self._ready.wait()
+
+ async with self.session.request(method.upper(), self._url_for(endpoint), **kwargs) as resp:
await self.maybe_raise_for_status(resp, raise_for_status)
return await resp.json()
- async def patch(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict:
+ async def get(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
+ """Site API GET."""
+ return await self.request("GET", endpoint, raise_for_status=raise_for_status, **kwargs)
+
+ async def patch(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
"""Site API PATCH."""
- async with self.session.patch(self._url_for(endpoint), *args, **kwargs) as resp:
- await self.maybe_raise_for_status(resp, raise_for_status)
- return await resp.json()
+ return await self.request("PATCH", endpoint, raise_for_status=raise_for_status, **kwargs)
- async def post(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict:
+ async def post(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
"""Site API POST."""
- async with self.session.post(self._url_for(endpoint), *args, **kwargs) as resp:
- await self.maybe_raise_for_status(resp, raise_for_status)
- return await resp.json()
+ return await self.request("POST", endpoint, raise_for_status=raise_for_status, **kwargs)
- async def put(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict:
+ async def put(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
"""Site API PUT."""
- async with self.session.put(self._url_for(endpoint), *args, **kwargs) as resp:
- await self.maybe_raise_for_status(resp, raise_for_status)
- return await resp.json()
+ return await self.request("PUT", endpoint, raise_for_status=raise_for_status, **kwargs)
- async def delete(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> Optional[dict]:
+ async def delete(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> Optional[dict]:
"""Site API DELETE."""
- async with self.session.delete(self._url_for(endpoint), *args, **kwargs) as resp:
+ await self._ready.wait()
+
+ async with self.session.delete(self._url_for(endpoint), **kwargs) as resp:
if resp.status == 204:
return None
@@ -104,77 +138,3 @@ def loop_is_running() -> bool:
except RuntimeError:
return False
return True
-
-
-class APILoggingHandler(logging.StreamHandler):
- """Site API logging handler."""
-
- def __init__(self, client: APIClient):
- logging.StreamHandler.__init__(self)
- self.client = client
-
- # internal batch of shipoff tasks that must not be scheduled
- # on the event loop yet - scheduled when the event loop is ready.
- self.queue = []
-
- async def ship_off(self, payload: dict) -> None:
- """Ship log payload to the logging API."""
- try:
- await self.client.post('logs', json=payload)
- except ResponseCodeError as err:
- log.warning(
- "Cannot send logging record to the site, got code %d.",
- err.response.status,
- extra={'via_handler': True}
- )
- except Exception as err:
- log.warning(
- "Cannot send logging record to the site: %r",
- err,
- extra={'via_handler': True}
- )
-
- def emit(self, record: logging.LogRecord) -> None:
- """
- Determine if a log record should be shipped to the logging API.
-
- If the asyncio event loop is not yet running, log records will instead be put in a queue
- which will be consumed once the event loop is running.
-
- The following two conditions are set:
- 1. Do not log anything below DEBUG (only applies to the monkeypatched `TRACE` level)
- 2. Ignore log records originating from this logging handler itself to prevent infinite recursion
- """
- if (
- record.levelno >= logging.DEBUG
- and not record.__dict__.get('via_handler')
- ):
- payload = {
- 'application': 'bot',
- 'logger_name': record.name,
- 'level': record.levelname.lower(),
- 'module': record.module,
- 'line': record.lineno,
- 'message': self.format(record)
- }
-
- task = self.ship_off(payload)
- if not loop_is_running():
- self.queue.append(task)
- else:
- asyncio.create_task(task)
- self.schedule_queued_tasks()
-
- def schedule_queued_tasks(self) -> None:
- """Consume the queue and schedule the logging of each queued record."""
- for task in self.queue:
- asyncio.create_task(task)
-
- if self.queue:
- log.debug(
- "Scheduled %d pending logging tasks.",
- len(self.queue),
- extra={'via_handler': True}
- )
-
- self.queue.clear()
diff --git a/bot/bot.py b/bot/bot.py
new file mode 100644
index 000000000..19b9035c4
--- /dev/null
+++ b/bot/bot.py
@@ -0,0 +1,97 @@
+import asyncio
+import logging
+import socket
+from typing import Optional
+
+import aiohttp
+import discord
+from discord.ext import commands
+
+from bot import api
+from bot import constants
+
+log = logging.getLogger('bot')
+
+
+class Bot(commands.Bot):
+ """A subclass of `discord.ext.commands.Bot` with an aiohttp session and an API client."""
+
+ def __init__(self, *args, **kwargs):
+ # Use asyncio for DNS resolution instead of threads so threads aren't spammed.
+ # Use AF_INET as its socket family to prevent HTTPS related problems both locally
+ # and in production.
+ self._connector = aiohttp.TCPConnector(
+ resolver=aiohttp.AsyncResolver(),
+ family=socket.AF_INET,
+ )
+
+ super().__init__(*args, connector=self._connector, **kwargs)
+
+ self._guild_available = asyncio.Event()
+
+ self.http_session: Optional[aiohttp.ClientSession] = None
+ self.api_client = api.APIClient(loop=self.loop, connector=self._connector)
+
+ def add_cog(self, cog: commands.Cog) -> None:
+ """Adds a "cog" to the bot and logs the operation."""
+ super().add_cog(cog)
+ log.info(f"Cog loaded: {cog.qualified_name}")
+
+ def clear(self) -> None:
+ """Clears the internal state of the bot and resets the API client."""
+ super().clear()
+ self.api_client.recreate()
+
+ async def close(self) -> None:
+ """Close the aiohttp session after closing the Discord connection."""
+ await super().close()
+
+ await self.http_session.close()
+ await self.api_client.close()
+
+ async def start(self, *args, **kwargs) -> None:
+ """Open an aiohttp session before logging in and connecting to Discord."""
+ self.http_session = aiohttp.ClientSession(connector=self._connector)
+
+ await super().start(*args, **kwargs)
+
+ async def on_guild_available(self, guild: discord.Guild) -> None:
+ """
+ Set the internal guild available event when constants.Guild.id becomes available.
+
+ If the cache appears to still be empty (no members, no channels, or no roles), the event
+ will not be set.
+ """
+ if guild.id != constants.Guild.id:
+ return
+
+ if not guild.roles or not guild.members or not guild.channels:
+ msg = "Guild available event was dispatched but the cache appears to still be empty!"
+ log.warning(msg)
+
+ try:
+ webhook = await self.fetch_webhook(constants.Webhooks.dev_log)
+ except discord.HTTPException as e:
+ log.error(f"Failed to fetch webhook to send empty cache warning: status {e.status}")
+ else:
+ await webhook.send(f"<@&{constants.Roles.admin}> {msg}")
+
+ return
+
+ self._guild_available.set()
+
+ async def on_guild_unavailable(self, guild: discord.Guild) -> None:
+ """Clear the internal guild available event when constants.Guild.id becomes unavailable."""
+ if guild.id != constants.Guild.id:
+ return
+
+ self._guild_available.clear()
+
+ async def wait_until_guild_available(self) -> None:
+ """
+ Wait until the constants.Guild.id guild is available (and the cache is ready).
+
+ The on_ready event is inadequate because it only waits 2 seconds for a GUILD_CREATE
+ gateway event before giving up and thus not populating the cache for unavailable guilds.
+ """
+ await self._guild_available.wait()
diff --git a/bot/cogs/alias.py b/bot/cogs/alias.py
index 5190c559b..0b800575f 100644
--- a/bot/cogs/alias.py
+++ b/bot/cogs/alias.py
@@ -1,13 +1,15 @@
import inspect
import logging
-from typing import Union
-from discord import Colour, Embed, Member, User
-from discord.ext.commands import Bot, Cog, Command, Context, clean_content, command, group
+from discord import Colour, Embed
+from discord.ext.commands import (
+ Cog, Command, Context, Greedy,
+ clean_content, command, group,
+)
+from bot.bot import Bot
from bot.cogs.extensions import Extension
-from bot.cogs.watchchannels.watchchannel import proxy_user
-from bot.converters import TagNameConverter
+from bot.converters import FetchedMember, TagNameConverter
from bot.pagination import LinePaginator
log = logging.getLogger(__name__)
@@ -60,12 +62,12 @@ class Alias (Cog):
await self.invoke(ctx, "site tools")
@command(name="watch", hidden=True)
- async def bigbrother_watch_alias(self, ctx: Context, user: Union[Member, User, proxy_user], *, reason: str) -> None:
+ async def bigbrother_watch_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""Alias for invoking <prefix>bigbrother watch [user] [reason]."""
await self.invoke(ctx, "bigbrother watch", user, reason=reason)
@command(name="unwatch", hidden=True)
- async def bigbrother_unwatch_alias(self, ctx: Context, user: Union[User, proxy_user], *, reason: str) -> None:
+ async def bigbrother_unwatch_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""Alias for invoking <prefix>bigbrother unwatch [user] [reason]."""
await self.invoke(ctx, "bigbrother unwatch", user, reason=reason)
@@ -80,7 +82,7 @@ class Alias (Cog):
await self.invoke(ctx, "site faq")
@command(name="rules", aliases=("rule",), hidden=True)
- async def site_rules_alias(self, ctx: Context, *rules: int) -> None:
+ async def site_rules_alias(self, ctx: Context, rules: Greedy[int], *_: str) -> None:
"""Alias for invoking <prefix>site rules."""
await self.invoke(ctx, "site rules", *rules)
@@ -131,12 +133,12 @@ class Alias (Cog):
await self.invoke(ctx, "docs get", symbol)
@command(name="nominate", hidden=True)
- async def nomination_add_alias(self, ctx: Context, user: Union[Member, User, proxy_user], *, reason: str) -> None:
+ async def nomination_add_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""Alias for invoking <prefix>talentpool add [user] [reason]."""
await self.invoke(ctx, "talentpool add", user, reason=reason)
@command(name="unnominate", hidden=True)
- async def nomination_end_alias(self, ctx: Context, user: Union[User, proxy_user], *, reason: str) -> None:
+ async def nomination_end_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""Alias for invoking <prefix>nomination end [user] [reason]."""
await self.invoke(ctx, "nomination end", user, reason=reason)
@@ -147,6 +149,5 @@ class Alias (Cog):
def setup(bot: Bot) -> None:
- """Alias cog load."""
+ """Load the Alias cog."""
bot.add_cog(Alias(bot))
- log.info("Cog loaded: Alias")
diff --git a/bot/cogs/antimalware.py b/bot/cogs/antimalware.py
index ababd6f18..9e9e81364 100644
--- a/bot/cogs/antimalware.py
+++ b/bot/cogs/antimalware.py
@@ -1,9 +1,10 @@
import logging
-from discord import Message, NotFound
-from discord.ext.commands import Bot, Cog
+from discord import Embed, Message, NotFound
+from discord.ext.commands import Cog
-from bot.constants import AntiMalware as AntiMalwareConfig, Channels
+from bot.bot import Bot
+from bot.constants import AntiMalware as AntiMalwareConfig, Channels, STAFF_ROLES, URLs
log = logging.getLogger(__name__)
@@ -17,31 +18,35 @@ class AntiMalware(Cog):
@Cog.listener()
async def on_message(self, message: Message) -> None:
"""Identify messages with prohibited attachments."""
- rejected_attachments = False
- detected_pyfile = False
+ # Return when message don't have attachment and don't moderate DMs
+ if not message.attachments or not message.guild:
+ return
+
+ # Check if user is staff, if is, return
+ # Since we only care that roles exist to iterate over, check for the attr rather than a User/Member instance
+ if hasattr(message.author, "roles") and any(role.id in STAFF_ROLES for role in message.author.roles):
+ return
+
+ embed = Embed()
for attachment in message.attachments:
- if attachment.filename.lower().endswith('.py'):
- detected_pyfile = True
- break # Other detections irrelevant because we prioritize the .py message.
- if not attachment.filename.lower().endswith(tuple(AntiMalwareConfig.whitelist)):
- rejected_attachments = True
-
- if detected_pyfile or rejected_attachments:
- # Send a message to the user indicating the problem (with special treatment for .py)
- author = message.author
- if detected_pyfile:
- msg = (
- f"{author.mention}, it looks like you tried to attach a Python file - please "
- f"use a code-pasting service such as https://paste.pythondiscord.com/ instead."
+ filename = attachment.filename.lower()
+ if filename.endswith('.py'):
+ embed.description = (
+ f"It looks like you tried to attach a Python file - please "
+ f"use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}"
)
- else:
+ break # Other detections irrelevant because we prioritize the .py message.
+ if not filename.endswith(tuple(AntiMalwareConfig.whitelist)):
+ whitelisted_types = ', '.join(AntiMalwareConfig.whitelist)
meta_channel = self.bot.get_channel(Channels.meta)
- msg = (
- f"{author.mention}, it looks like you tried to attach a file type we don't "
- f"allow. Feel free to ask in {meta_channel.mention} if you think this is a mistake."
+ embed.description = (
+ f"It looks like you tried to attach a file type that we "
+ f"do not allow. We currently allow the following file "
+ f"types: **{whitelisted_types}**. \n\n Feel free to ask "
+ f"in {meta_channel.mention} if you think this is a mistake."
)
-
- await message.channel.send(msg)
+ if embed.description:
+ await message.channel.send(f"Hey {message.author.mention}!", embed=embed)
# Delete the offending message:
try:
@@ -51,6 +56,5 @@ class AntiMalware(Cog):
def setup(bot: Bot) -> None:
- """Antimalware cog load."""
+ """Load the AntiMalware cog."""
bot.add_cog(AntiMalware(bot))
- log.info("Cog loaded: AntiMalware")
diff --git a/bot/cogs/antispam.py b/bot/cogs/antispam.py
index 1340eb608..baa6b9459 100644
--- a/bot/cogs/antispam.py
+++ b/bot/cogs/antispam.py
@@ -7,9 +7,10 @@ from operator import itemgetter
from typing import Dict, Iterable, List, Set
from discord import Colour, Member, Message, NotFound, Object, TextChannel
-from discord.ext.commands import Bot, Cog
+from discord.ext.commands import Cog
from bot import rules
+from bot.bot import Bot
from bot.cogs.moderation import ModLog
from bot.constants import (
AntiSpam as AntiSpamConfig, Channels,
@@ -18,6 +19,7 @@ from bot.constants import (
STAFF_ROLES,
)
from bot.converters import Duration
+from bot.utils.messages import send_attachments
log = logging.getLogger(__name__)
@@ -44,8 +46,9 @@ class DeletionContext:
members: Dict[int, Member] = field(default_factory=dict)
rules: Set[str] = field(default_factory=set)
messages: Dict[int, Message] = field(default_factory=dict)
+ attachments: List[List[str]] = field(default_factory=list)
- def add(self, rule_name: str, members: Iterable[Member], messages: Iterable[Message]) -> None:
+ async def add(self, rule_name: str, members: Iterable[Member], messages: Iterable[Message]) -> None:
"""Adds new rule violation events to the deletion context."""
self.rules.add(rule_name)
@@ -57,6 +60,11 @@ class DeletionContext:
if message.id not in self.messages:
self.messages[message.id] = message
+ # Re-upload attachments
+ destination = message.guild.get_channel(Channels.attachment_log)
+ urls = await send_attachments(message, destination, link_large=False)
+ self.attachments.append(urls)
+
async def upload_messages(self, actor_id: int, modlog: ModLog) -> None:
"""Method that takes care of uploading the queue and posting modlog alert."""
triggered_by_users = ", ".join(f"{m} (`{m.id}`)" for m in self.members.values())
@@ -69,7 +77,7 @@ class DeletionContext:
# For multiple messages or those with excessive newlines, use the logs API
if len(self.messages) > 1 or 'newlines' in self.rules:
- url = await modlog.upload_log(self.messages.values(), actor_id)
+ url = await modlog.upload_log(self.messages.values(), actor_id, self.attachments)
mod_alert_message += f"A complete log of the offending messages can be found [here]({url})"
else:
mod_alert_message += "Message:\n"
@@ -97,7 +105,7 @@ class DeletionContext:
class AntiSpam(Cog):
"""Cog that controls our anti-spam measures."""
- def __init__(self, bot: Bot, validation_errors: bool) -> None:
+ def __init__(self, bot: Bot, validation_errors: Dict[str, str]) -> None:
self.bot = bot
self.validation_errors = validation_errors
role_id = AntiSpamConfig.punishment['role_id']
@@ -105,7 +113,6 @@ class AntiSpam(Cog):
self.expiration_date_converter = Duration()
self.message_deletion_queue = dict()
- self.queue_consumption_tasks = dict()
self.bot.loop.create_task(self.alert_on_validation_error())
@@ -116,7 +123,7 @@ class AntiSpam(Cog):
async def alert_on_validation_error(self) -> None:
"""Unloads the cog and alerts admins if configuration validation failed."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
if self.validation_errors:
body = "**The following errors were encountered:**\n"
body += "\n".join(f"- {error}" for error in self.validation_errors.values())
@@ -179,15 +186,14 @@ class AntiSpam(Cog):
full_reason = f"`{rule_name}` rule: {reason}"
# If there's no spam event going on for this channel, start a new Message Deletion Context
- if message.channel.id not in self.message_deletion_queue:
- log.trace(f"Creating queue for channel `{message.channel.id}`")
- self.message_deletion_queue[message.channel.id] = DeletionContext(channel=message.channel)
- self.queue_consumption_tasks = self.bot.loop.create_task(
- self._process_deletion_context(message.channel.id)
- )
+ channel = message.channel
+ if channel.id not in self.message_deletion_queue:
+ log.trace(f"Creating queue for channel `{channel.id}`")
+ self.message_deletion_queue[message.channel.id] = DeletionContext(channel)
+ self.bot.loop.create_task(self._process_deletion_context(message.channel.id))
# Add the relevant of this trigger to the Deletion Context
- self.message_deletion_queue[message.channel.id].add(
+ await self.message_deletion_queue[message.channel.id].add(
rule_name=rule_name,
members=members,
messages=relevant_messages
@@ -201,7 +207,7 @@ class AntiSpam(Cog):
self.punish(message, member, full_reason)
)
- await self.maybe_delete_messages(message.channel, relevant_messages)
+ await self.maybe_delete_messages(channel, relevant_messages)
break
async def punish(self, msg: Message, member: Member, reason: str) -> None:
@@ -254,10 +260,10 @@ class AntiSpam(Cog):
await deletion_context.upload_messages(self.bot.user.id, self.mod_log)
-def validate_config(rules: Mapping = AntiSpamConfig.rules) -> Dict[str, str]:
+def validate_config(rules_: Mapping = AntiSpamConfig.rules) -> Dict[str, str]:
"""Validates the antispam configs."""
validation_errors = {}
- for name, config in rules.items():
+ for name, config in rules_.items():
if name not in RULE_FUNCTION_MAPPING:
log.error(
f"Unrecognized antispam rule `{name}`. "
@@ -276,7 +282,6 @@ def validate_config(rules: Mapping = AntiSpamConfig.rules) -> Dict[str, str]:
def setup(bot: Bot) -> None:
- """Antispam cog load."""
+ """Validate the AntiSpam configs and load the AntiSpam cog."""
validation_errors = validate_config()
bot.add_cog(AntiSpam(bot, validation_errors))
- log.info("Cog loaded: AntiSpam")
diff --git a/bot/cogs/bot.py b/bot/cogs/bot.py
index 7583b2f2d..73b1e8f41 100644
--- a/bot/cogs/bot.py
+++ b/bot/cogs/bot.py
@@ -4,9 +4,11 @@ import re
import time
from typing import Optional, Tuple
-from discord import Embed, Message, RawMessageUpdateEvent
-from discord.ext.commands import Bot, Cog, Context, command, group
+from discord import Embed, Message, RawMessageUpdateEvent, TextChannel
+from discord.ext.commands import Cog, Context, command, group
+from bot.bot import Bot
+from bot.cogs.token_remover import TokenRemover
from bot.constants import Channels, DEBUG_MODE, Guild, MODERATION_ROLES, Roles, URLs
from bot.decorators import with_role
from bot.utils.messages import wait_for_deletion
@@ -16,7 +18,7 @@ log = logging.getLogger(__name__)
RE_MARKDOWN = re.compile(r'([*_~`|>])')
-class Bot(Cog):
+class BotCog(Cog, name="Bot"):
"""Bot information commands."""
def __init__(self, bot: Bot):
@@ -71,9 +73,12 @@ class Bot(Cog):
@command(name='echo', aliases=('print',))
@with_role(*MODERATION_ROLES)
- async def echo_command(self, ctx: Context, *, text: str) -> None:
- """Send the input verbatim to the current channel."""
- await ctx.send(text)
+ async def echo_command(self, ctx: Context, channel: Optional[TextChannel], *, text: str) -> None:
+ """Repeat the given message in either a specified channel or the current channel."""
+ if channel is None:
+ await ctx.send(text)
+ else:
+ await channel.send(text)
@command(name='embed')
@with_role(*MODERATION_ROLES)
@@ -235,9 +240,10 @@ class Bot(Cog):
)
and not msg.author.bot
and len(msg.content.splitlines()) > 3
+ and not TokenRemover.is_token_in_message(msg)
)
- if parse_codeblock:
+ if parse_codeblock: # no token in the msg
on_cooldown = (time.time() - self.channel_cooldowns.get(msg.channel.id, 0)) < 300
if not on_cooldown or DEBUG_MODE:
try:
@@ -370,10 +376,9 @@ class Bot(Cog):
bot_message = await channel.fetch_message(self.codeblock_message_ids[payload.message_id])
await bot_message.delete()
del self.codeblock_message_ids[payload.message_id]
- log.trace("User's incorrect code block has been fixed. Removing bot formatting message.")
+ log.trace("User's incorrect code block has been fixed. Removing bot formatting message.")
def setup(bot: Bot) -> None:
- """Bot cog load."""
- bot.add_cog(Bot(bot))
- log.info("Cog loaded: Bot")
+ """Load the Bot cog."""
+ bot.add_cog(BotCog(bot))
diff --git a/bot/cogs/clean.py b/bot/cogs/clean.py
index dca411d01..2104efe57 100644
--- a/bot/cogs/clean.py
+++ b/bot/cogs/clean.py
@@ -3,9 +3,10 @@ import random
import re
from typing import Optional
-from discord import Colour, Embed, Message, User
-from discord.ext.commands import Bot, Cog, Context, group
+from discord import Colour, Embed, Message, TextChannel, User
+from discord.ext.commands import Cog, Context, group
+from bot.bot import Bot
from bot.cogs.moderation import ModLog
from bot.constants import (
Channels, CleanMessages, Colours, Event,
@@ -37,9 +38,13 @@ class Clean(Cog):
return self.bot.get_cog("ModLog")
async def _clean_messages(
- self, amount: int, ctx: Context,
- bots_only: bool = False, user: User = None,
- regex: Optional[str] = None
+ self,
+ amount: int,
+ ctx: Context,
+ bots_only: bool = False,
+ user: User = None,
+ regex: Optional[str] = None,
+ channel: Optional[TextChannel] = None
) -> None:
"""A helper function that does the actual message cleaning."""
def predicate_bots_only(message: Message) -> bool:
@@ -104,6 +109,10 @@ class Clean(Cog):
else:
predicate = None # Delete all messages
+ # Default to using the invoking context's channel
+ if not channel:
+ channel = ctx.channel
+
# Look through the history and retrieve message data
messages = []
message_ids = []
@@ -111,7 +120,7 @@ class Clean(Cog):
invocation_deleted = False
# To account for the invocation message, we index `amount + 1` messages.
- async for message in ctx.channel.history(limit=amount + 1):
+ async for message in channel.history(limit=amount + 1):
# If at any point the cancel command is invoked, we should stop.
if not self.cleaning:
@@ -135,7 +144,7 @@ class Clean(Cog):
self.mod_log.ignore(Event.message_delete, *message_ids)
# Use bulk delete to actually do the cleaning. It's far faster.
- await ctx.channel.purge(
+ await channel.purge(
limit=amount,
check=predicate
)
@@ -155,7 +164,7 @@ class Clean(Cog):
# Build the embed and send it
message = (
- f"**{len(message_ids)}** messages deleted in <#{ctx.channel.id}> by **{ctx.author.name}**\n\n"
+ f"**{len(message_ids)}** messages deleted in <#{channel.id}> by **{ctx.author.name}**\n\n"
f"A log of the deleted messages can be found [here]({log_url})."
)
@@ -167,7 +176,7 @@ class Clean(Cog):
channel_id=Channels.modlog,
)
- @group(invoke_without_command=True, name="clean", hidden=True)
+ @group(invoke_without_command=True, name="clean", aliases=["purge"])
@with_role(*MODERATION_ROLES)
async def clean_group(self, ctx: Context) -> None:
"""Commands for cleaning messages in channels."""
@@ -175,27 +184,49 @@ class Clean(Cog):
@clean_group.command(name="user", aliases=["users"])
@with_role(*MODERATION_ROLES)
- async def clean_user(self, ctx: Context, user: User, amount: int = 10) -> None:
+ async def clean_user(
+ self,
+ ctx: Context,
+ user: User,
+ amount: Optional[int] = 10,
+ channel: TextChannel = None
+ ) -> None:
"""Delete messages posted by the provided user, stop cleaning after traversing `amount` messages."""
- await self._clean_messages(amount, ctx, user=user)
+ await self._clean_messages(amount, ctx, user=user, channel=channel)
@clean_group.command(name="all", aliases=["everything"])
@with_role(*MODERATION_ROLES)
- async def clean_all(self, ctx: Context, amount: int = 10) -> None:
+ async def clean_all(
+ self,
+ ctx: Context,
+ amount: Optional[int] = 10,
+ channel: TextChannel = None
+ ) -> None:
"""Delete all messages, regardless of poster, stop cleaning after traversing `amount` messages."""
- await self._clean_messages(amount, ctx)
+ await self._clean_messages(amount, ctx, channel=channel)
@clean_group.command(name="bots", aliases=["bot"])
@with_role(*MODERATION_ROLES)
- async def clean_bots(self, ctx: Context, amount: int = 10) -> None:
+ async def clean_bots(
+ self,
+ ctx: Context,
+ amount: Optional[int] = 10,
+ channel: TextChannel = None
+ ) -> None:
"""Delete all messages posted by a bot, stop cleaning after traversing `amount` messages."""
- await self._clean_messages(amount, ctx, bots_only=True)
+ await self._clean_messages(amount, ctx, bots_only=True, channel=channel)
@clean_group.command(name="regex", aliases=["word", "expression"])
@with_role(*MODERATION_ROLES)
- async def clean_regex(self, ctx: Context, regex: str, amount: int = 10) -> None:
+ async def clean_regex(
+ self,
+ ctx: Context,
+ regex: str,
+ amount: Optional[int] = 10,
+ channel: TextChannel = None
+ ) -> None:
"""Delete all messages that match a certain regex, stop cleaning after traversing `amount` messages."""
- await self._clean_messages(amount, ctx, regex=regex)
+ await self._clean_messages(amount, ctx, regex=regex, channel=channel)
@clean_group.command(name="stop", aliases=["cancel", "abort"])
@with_role(*MODERATION_ROLES)
@@ -211,6 +242,5 @@ class Clean(Cog):
def setup(bot: Bot) -> None:
- """Clean cog load."""
+ """Load the Clean cog."""
bot.add_cog(Clean(bot))
- log.info("Cog loaded: Clean")
diff --git a/bot/cogs/config_verifier.py b/bot/cogs/config_verifier.py
new file mode 100644
index 000000000..d72c6c22e
--- /dev/null
+++ b/bot/cogs/config_verifier.py
@@ -0,0 +1,40 @@
+import logging
+
+from discord.ext.commands import Cog
+
+from bot import constants
+from bot.bot import Bot
+
+
+log = logging.getLogger(__name__)
+
+
+class ConfigVerifier(Cog):
+ """Verify config on startup."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.channel_verify_task = self.bot.loop.create_task(self.verify_channels())
+
+ async def verify_channels(self) -> None:
+ """
+ Verify channels.
+
+ If any channels in config aren't present in server, log them in a warning.
+ """
+ await self.bot.wait_until_guild_available()
+ server = self.bot.get_guild(constants.Guild.id)
+
+ server_channel_ids = {channel.id for channel in server.channels}
+ invalid_channels = [
+ channel_name for channel_name, channel_id in constants.Channels
+ if channel_id not in server_channel_ids
+ ]
+
+ if invalid_channels:
+ log.warning(f"Configured channels do not exist in server: {', '.join(invalid_channels)}.")
+
+
+def setup(bot: Bot) -> None:
+ """Load the ConfigVerifier cog."""
+ bot.add_cog(ConfigVerifier(bot))
diff --git a/bot/cogs/defcon.py b/bot/cogs/defcon.py
index bedd70c86..20961e0a2 100644
--- a/bot/cogs/defcon.py
+++ b/bot/cogs/defcon.py
@@ -6,8 +6,9 @@ from datetime import datetime, timedelta
from enum import Enum
from discord import Colour, Embed, Member
-from discord.ext.commands import Bot, Cog, Context, group
+from discord.ext.commands import Cog, Context, group
+from bot.bot import Bot
from bot.cogs.moderation import ModLog
from bot.constants import Channels, Colours, Emojis, Event, Icons, Roles
from bot.decorators import with_role
@@ -58,7 +59,7 @@ class Defcon(Cog):
async def sync_settings(self) -> None:
"""On cog load, try to synchronize DEFCON settings to the API."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
self.channel = await self.bot.fetch_channel(Channels.defcon)
try:
@@ -75,12 +76,12 @@ class Defcon(Cog):
if data["enabled"]:
self.enabled = True
self.days = timedelta(days=data["days"])
- log.warning(f"DEFCON enabled: {self.days.days} days")
+ log.info(f"DEFCON enabled: {self.days.days} days")
else:
self.enabled = False
self.days = timedelta(days=0)
- log.warning(f"DEFCON disabled")
+ log.info(f"DEFCON disabled")
await self.update_channel_topic()
@@ -236,6 +237,5 @@ class Defcon(Cog):
def setup(bot: Bot) -> None:
- """DEFCON cog load."""
+ """Load the Defcon cog."""
bot.add_cog(Defcon(bot))
- log.info("Cog loaded: Defcon")
diff --git a/bot/cogs/doc.py b/bot/cogs/doc.py
index e5b3a4062..204cffb37 100644
--- a/bot/cogs/doc.py
+++ b/bot/cogs/doc.py
@@ -5,6 +5,7 @@ import re
import textwrap
from collections import OrderedDict
from contextlib import suppress
+from types import SimpleNamespace
from typing import Any, Callable, Optional, Tuple
import discord
@@ -17,6 +18,7 @@ from requests import ConnectTimeout, ConnectionError, HTTPError
from sphinx.ext import intersphinx
from urllib3.exceptions import ProtocolError
+from bot.bot import Bot
from bot.constants import MODERATION_ROLES, RedirectOutput
from bot.converters import ValidPythonIdentifier, ValidURL
from bot.decorators import with_role
@@ -26,6 +28,16 @@ from bot.pagination import LinePaginator
log = logging.getLogger(__name__)
logging.getLogger('urllib3').setLevel(logging.WARNING)
+# Since Intersphinx is intended to be used with Sphinx,
+# we need to mock its configuration.
+SPHINX_MOCK_APP = SimpleNamespace(
+ config=SimpleNamespace(
+ intersphinx_timeout=3,
+ tls_verify=True,
+ user_agent="python3:python-discord/bot:1.0.0"
+ )
+)
+
NO_OVERRIDE_GROUPS = (
"2to3fixer",
"token",
@@ -101,18 +113,6 @@ def markdownify(html: str) -> DocMarkdownConverter:
return DocMarkdownConverter(bullets='•').convert(html)
-class DummyObject(object):
- """A dummy object which supports assigning anything, which the builtin `object()` does not support normally."""
-
-
-class SphinxConfiguration:
- """Dummy configuration for use with intersphinx."""
-
- config = DummyObject()
- config.intersphinx_timeout = 3
- config.tls_verify = True
-
-
class InventoryURL(commands.Converter):
"""
Represents an Intersphinx inventory URL.
@@ -127,7 +127,7 @@ class InventoryURL(commands.Converter):
async def convert(ctx: commands.Context, url: str) -> str:
"""Convert url to Intersphinx inventory URL."""
try:
- intersphinx.fetch_inventory(SphinxConfiguration(), '', url)
+ intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url)
except AttributeError:
raise commands.BadArgument(f"Failed to fetch Intersphinx inventory from URL `{url}`.")
except ConnectionError:
@@ -147,7 +147,7 @@ class InventoryURL(commands.Converter):
class Doc(commands.Cog):
"""A set of commands for querying & displaying documentation."""
- def __init__(self, bot: commands.Bot):
+ def __init__(self, bot: Bot):
self.base_urls = {}
self.bot = bot
self.inventories = {}
@@ -157,11 +157,11 @@ class Doc(commands.Cog):
async def init_refresh_inventory(self) -> None:
"""Refresh documentation inventory on cog initialization."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
await self.refresh_inventory()
async def update_single(
- self, package_name: str, base_url: str, inventory_url: str, config: SphinxConfiguration
+ self, package_name: str, base_url: str, inventory_url: str
) -> None:
"""
Rebuild the inventory for a single package.
@@ -172,12 +172,10 @@ class Doc(commands.Cog):
absolute paths that link to specific symbols
* `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running
`intersphinx.fetch_inventory` in an executor on the bot's event loop
- * `config` is a `SphinxConfiguration` instance to mock the regular sphinx
- project layout, required for use with intersphinx
"""
self.base_urls[package_name] = base_url
- package = await self._fetch_inventory(inventory_url, config)
+ package = await self._fetch_inventory(inventory_url)
if not package:
return None
@@ -219,15 +217,11 @@ class Doc(commands.Cog):
self.renamed_symbols.clear()
async_cache.cache = OrderedDict()
- # Since Intersphinx is intended to be used with Sphinx,
- # we need to mock its configuration.
- config = SphinxConfiguration()
-
# Run all coroutines concurrently - since each of them performs a HTTP
# request, this speeds up fetching the inventory data heavily.
coros = [
self.update_single(
- package["package"], package["base_url"], package["inventory_url"], config
+ package["package"], package["base_url"], package["inventory_url"]
) for package in await self.bot.api_client.get('bot/documentation-links')
]
await asyncio.gather(*coros)
@@ -305,10 +299,17 @@ class Doc(commands.Cog):
# of a double newline (interpreted as a paragraph) before index 1000.
if len(description) > 1000:
shortened = description[:1000]
- last_paragraph_end = shortened.rfind('\n\n', 100)
- if last_paragraph_end == -1:
- last_paragraph_end = shortened.rfind('. ')
- description = description[:last_paragraph_end]
+ description_cutoff = shortened.rfind('\n\n', 100)
+ if description_cutoff == -1:
+ # Search the shortened version for cutoff points in decreasing desirability,
+ # cutoff at 1000 if none are found.
+ for string in (". ", ", ", ",", " "):
+ description_cutoff = shortened.rfind(string)
+ if description_cutoff != -1:
+ break
+ else:
+ description_cutoff = 1000
+ description = description[:description_cutoff]
# If there is an incomplete code block, cut it out
if description.count("```") % 2:
@@ -317,7 +318,6 @@ class Doc(commands.Cog):
description += f"... [read more]({permalink})"
description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description)
-
if signatures is None:
# If symbol is a module, don't show signature.
embed_description = description
@@ -469,9 +469,9 @@ class Doc(commands.Cog):
)
await ctx.send(embed=embed)
- async def _fetch_inventory(self, inventory_url: str, config: SphinxConfiguration) -> Optional[dict]:
+ async def _fetch_inventory(self, inventory_url: str) -> Optional[dict]:
"""Get and return inventory from `inventory_url`. If fetching fails, return None."""
- fetch_func = functools.partial(intersphinx.fetch_inventory, config, '', inventory_url)
+ fetch_func = functools.partial(intersphinx.fetch_inventory, SPHINX_MOCK_APP, '', inventory_url)
for retry in range(1, FAILED_REQUEST_RETRY_AMOUNT+1):
try:
package = await self.bot.loop.run_in_executor(None, fetch_func)
@@ -506,7 +506,6 @@ class Doc(commands.Cog):
return tag.name == "table"
-def setup(bot: commands.Bot) -> None:
- """Doc cog load."""
+def setup(bot: Bot) -> None:
+ """Load the Doc cog."""
bot.add_cog(Doc(bot))
- log.info("Cog loaded: Doc")
diff --git a/bot/cogs/duck_pond.py b/bot/cogs/duck_pond.py
new file mode 100644
index 000000000..1f84a0609
--- /dev/null
+++ b/bot/cogs/duck_pond.py
@@ -0,0 +1,182 @@
+import logging
+from typing import Optional, Union
+
+import discord
+from discord import Color, Embed, Member, Message, RawReactionActionEvent, User, errors
+from discord.ext.commands import Cog
+
+from bot import constants
+from bot.bot import Bot
+from bot.utils.messages import send_attachments
+
+log = logging.getLogger(__name__)
+
+
+class DuckPond(Cog):
+ """Relays messages to #duck-pond whenever a certain number of duck reactions have been achieved."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.webhook_id = constants.Webhooks.duck_pond
+ self.bot.loop.create_task(self.fetch_webhook())
+
+ async def fetch_webhook(self) -> None:
+ """Fetches the webhook object, so we can post to it."""
+ await self.bot.wait_until_guild_available()
+
+ try:
+ self.webhook = await self.bot.fetch_webhook(self.webhook_id)
+ except discord.HTTPException:
+ log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`")
+
+ @staticmethod
+ def is_staff(member: Union[User, Member]) -> bool:
+ """Check if a specific member or user is staff."""
+ if hasattr(member, "roles"):
+ for role in member.roles:
+ if role.id in constants.STAFF_ROLES:
+ return True
+ return False
+
+ async def has_green_checkmark(self, message: Message) -> bool:
+ """Check if the message has a green checkmark reaction."""
+ for reaction in message.reactions:
+ if reaction.emoji == "✅":
+ async for user in reaction.users():
+ if user == self.bot.user:
+ return True
+ return False
+
+ async def send_webhook(
+ self,
+ content: Optional[str] = None,
+ username: Optional[str] = None,
+ avatar_url: Optional[str] = None,
+ embed: Optional[Embed] = None,
+ ) -> None:
+ """Send a webhook to the duck_pond channel."""
+ try:
+ await self.webhook.send(
+ content=content,
+ username=username,
+ avatar_url=avatar_url,
+ embed=embed
+ )
+ except discord.HTTPException:
+ log.exception("Failed to send a message to the Duck Pool webhook")
+
+ async def count_ducks(self, message: Message) -> int:
+ """
+ Count the number of ducks in the reactions of a specific message.
+
+ Only counts ducks added by staff members.
+ """
+ duck_count = 0
+ duck_reactors = []
+
+ for reaction in message.reactions:
+ async for user in reaction.users():
+
+ # Is the user a staff member and not already counted as reactor?
+ if not self.is_staff(user) or user.id in duck_reactors:
+ continue
+
+ # Is the emoji a duck?
+ if hasattr(reaction.emoji, "id"):
+ if reaction.emoji.id in constants.DuckPond.custom_emojis:
+ duck_count += 1
+ duck_reactors.append(user.id)
+ elif isinstance(reaction.emoji, str):
+ if reaction.emoji == "🦆":
+ duck_count += 1
+ duck_reactors.append(user.id)
+ return duck_count
+
+ async def relay_message(self, message: Message) -> None:
+ """Relays the message's content and attachments to the duck pond channel."""
+ clean_content = message.clean_content
+
+ if clean_content:
+ await self.send_webhook(
+ content=message.clean_content,
+ username=message.author.display_name,
+ avatar_url=message.author.avatar_url
+ )
+
+ if message.attachments:
+ try:
+ await send_attachments(message, self.webhook)
+ except (errors.Forbidden, errors.NotFound):
+ e = Embed(
+ description=":x: **This message contained an attachment, but it could not be retrieved**",
+ color=Color.red()
+ )
+ await self.send_webhook(
+ embed=e,
+ username=message.author.display_name,
+ avatar_url=message.author.avatar_url
+ )
+ except discord.HTTPException:
+ log.exception(f"Failed to send an attachment to the webhook")
+
+ await message.add_reaction("✅")
+
+ @staticmethod
+ def _payload_has_duckpond_emoji(payload: RawReactionActionEvent) -> bool:
+ """Test if the RawReactionActionEvent payload contains a duckpond emoji."""
+ if payload.emoji.is_custom_emoji():
+ if payload.emoji.id in constants.DuckPond.custom_emojis:
+ return True
+ elif payload.emoji.name == "🦆":
+ return True
+
+ return False
+
+ @Cog.listener()
+ async def on_raw_reaction_add(self, payload: RawReactionActionEvent) -> None:
+ """
+ Determine if a message should be sent to the duck pond.
+
+ This will count the number of duck reactions on the message, and if this amount meets the
+ amount of ducks specified in the config under duck_pond/threshold, it will
+ send the message off to the duck pond.
+ """
+ # Is the emoji in the reaction a duck?
+ if not self._payload_has_duckpond_emoji(payload):
+ return
+
+ channel = discord.utils.get(self.bot.get_all_channels(), id=payload.channel_id)
+ message = await channel.fetch_message(payload.message_id)
+ member = discord.utils.get(message.guild.members, id=payload.user_id)
+
+ # Is the member a human and a staff member?
+ if not self.is_staff(member) or member.bot:
+ return
+
+ # Does the message already have a green checkmark?
+ if await self.has_green_checkmark(message):
+ return
+
+ # Time to count our ducks!
+ duck_count = await self.count_ducks(message)
+
+ # If we've got more than the required amount of ducks, send the message to the duck_pond.
+ if duck_count >= constants.DuckPond.threshold:
+ await self.relay_message(message)
+
+ @Cog.listener()
+ async def on_raw_reaction_remove(self, payload: RawReactionActionEvent) -> None:
+ """Ensure that people don't remove the green checkmark from duck ponded messages."""
+ channel = discord.utils.get(self.bot.get_all_channels(), id=payload.channel_id)
+
+ # Prevent the green checkmark from being removed
+ if payload.emoji.name == "✅":
+ message = await channel.fetch_message(payload.message_id)
+ duck_count = await self.count_ducks(message)
+ if duck_count >= constants.DuckPond.threshold:
+ await message.add_reaction("✅")
+
+
+def setup(bot: Bot) -> None:
+ """Load the DuckPond cog."""
+ bot.add_cog(DuckPond(bot))
diff --git a/bot/cogs/error_handler.py b/bot/cogs/error_handler.py
index 49411814c..0abb7e521 100644
--- a/bot/cogs/error_handler.py
+++ b/bot/cogs/error_handler.py
@@ -14,9 +14,11 @@ from discord.ext.commands import (
NoPrivateMessage,
UserInputError,
)
-from discord.ext.commands import Bot, Cog, Context
+from discord.ext.commands import Cog, Context
+from sentry_sdk import push_scope
from bot.api import ResponseCodeError
+from bot.bot import Bot
from bot.constants import Channels
from bot.decorators import InChannelCheckFailure
@@ -75,6 +77,16 @@ class ErrorHandler(Cog):
tags_get_command = self.bot.get_command("tags get")
ctx.invoked_from_error_handler = True
+ log_msg = "Cancelling attempt to fall back to a tag due to failed checks."
+ try:
+ if not await tags_get_command.can_run(ctx):
+ log.debug(log_msg)
+ return
+ except CommandError as tag_error:
+ log.debug(log_msg)
+ await self.on_command_error(ctx, tag_error)
+ return
+
# Return to not raise the exception
with contextlib.suppress(ResponseCodeError):
await ctx.invoke(tags_get_command, tag_name=ctx.invoked_with)
@@ -136,13 +148,28 @@ class ErrorHandler(Cog):
f"Sorry, an unexpected error occurred. Please let us know!\n\n"
f"```{e.__class__.__name__}: {e}```"
)
- log.error(
- f"Error executing command invoked by {ctx.message.author}: {ctx.message.content}"
- )
- raise e
+
+ with push_scope() as scope:
+ scope.user = {
+ "id": ctx.author.id,
+ "username": str(ctx.author)
+ }
+
+ scope.set_tag("command", ctx.command.qualified_name)
+ scope.set_tag("message_id", ctx.message.id)
+ scope.set_tag("channel_id", ctx.channel.id)
+
+ scope.set_extra("full_message", ctx.message.content)
+
+ if ctx.guild is not None:
+ scope.set_extra(
+ "jump_to",
+ f"https://discordapp.com/channels/{ctx.guild.id}/{ctx.channel.id}/{ctx.message.id}"
+ )
+
+ log.error(f"Error executing command invoked by {ctx.message.author}: {ctx.message.content}", exc_info=e)
def setup(bot: Bot) -> None:
- """Error handler cog load."""
+ """Load the ErrorHandler cog."""
bot.add_cog(ErrorHandler(bot))
- log.info("Cog loaded: Events")
diff --git a/bot/cogs/eval.py b/bot/cogs/eval.py
index 00b988dde..9c729f28a 100644
--- a/bot/cogs/eval.py
+++ b/bot/cogs/eval.py
@@ -9,8 +9,9 @@ from io import StringIO
from typing import Any, Optional, Tuple
import discord
-from discord.ext.commands import Bot, Cog, Context, group
+from discord.ext.commands import Cog, Context, group
+from bot.bot import Bot
from bot.constants import Roles
from bot.decorators import with_role
from bot.interpreter import Interpreter
@@ -197,6 +198,5 @@ async def func(): # (None,) -> Any
def setup(bot: Bot) -> None:
- """Code eval cog load."""
+ """Load the CodeEval cog."""
bot.add_cog(CodeEval(bot))
- log.info("Cog loaded: Eval")
diff --git a/bot/cogs/extensions.py b/bot/cogs/extensions.py
index bb66e0b8e..f16e79fb7 100644
--- a/bot/cogs/extensions.py
+++ b/bot/cogs/extensions.py
@@ -6,8 +6,9 @@ from pkgutil import iter_modules
from discord import Colour, Embed
from discord.ext import commands
-from discord.ext.commands import Bot, Context, group
+from discord.ext.commands import Context, group
+from bot.bot import Bot
from bot.constants import Emojis, MODERATION_ROLES, Roles, URLs
from bot.pagination import LinePaginator
from bot.utils.checks import with_role_check
@@ -233,4 +234,3 @@ class Extensions(commands.Cog):
def setup(bot: Bot) -> None:
"""Load the Extensions cog."""
bot.add_cog(Extensions(bot))
- log.info("Cog loaded: Extensions")
diff --git a/bot/cogs/filtering.py b/bot/cogs/filtering.py
index 3d02073d6..38c28dd00 100644
--- a/bot/cogs/filtering.py
+++ b/bot/cogs/filtering.py
@@ -5,8 +5,9 @@ from typing import Optional, Union
import discord.errors
from dateutil.relativedelta import relativedelta
from discord import Colour, DMChannel, Member, Message, TextChannel
-from discord.ext.commands import Bot, Cog
+from discord.ext.commands import Cog
+from bot.bot import Bot
from bot.cogs.moderation import ModLog
from bot.constants import (
Channels, Colours,
@@ -381,6 +382,5 @@ class Filtering(Cog):
def setup(bot: Bot) -> None:
- """Filtering cog load."""
+ """Load the Filtering cog."""
bot.add_cog(Filtering(bot))
- log.info("Cog loaded: Filtering")
diff --git a/bot/cogs/free.py b/bot/cogs/free.py
index 82285656b..49cab6172 100644
--- a/bot/cogs/free.py
+++ b/bot/cogs/free.py
@@ -3,8 +3,9 @@ from datetime import datetime
from operator import itemgetter
from discord import Colour, Embed, Member, utils
-from discord.ext.commands import Bot, Cog, Context, command
+from discord.ext.commands import Cog, Context, command
+from bot.bot import Bot
from bot.constants import Categories, Channels, Free, STAFF_ROLES
from bot.decorators import redirect_output
@@ -98,6 +99,5 @@ class Free(Cog):
def setup(bot: Bot) -> None:
- """Free cog load."""
+ """Load the Free cog."""
bot.add_cog(Free())
- log.info("Cog loaded: Free")
diff --git a/bot/cogs/help.py b/bot/cogs/help.py
index 9607dbd8d..fd5bbc3ca 100644
--- a/bot/cogs/help.py
+++ b/bot/cogs/help.py
@@ -6,24 +6,26 @@ from typing import Union
from discord import Colour, Embed, HTTPException, Message, Reaction, User
from discord.ext import commands
-from discord.ext.commands import Bot, CheckFailure, Cog as DiscordCog, Command, Context
+from discord.ext.commands import CheckFailure, Cog as DiscordCog, Command, Context
from fuzzywuzzy import fuzz, process
from bot import constants
-from bot.constants import Channels, STAFF_ROLES
+from bot.bot import Bot
+from bot.constants import Channels, Emojis, STAFF_ROLES
from bot.decorators import redirect_output
from bot.pagination import (
- DELETE_EMOJI, FIRST_EMOJI, LAST_EMOJI,
+ FIRST_EMOJI, LAST_EMOJI,
LEFT_EMOJI, LinePaginator, RIGHT_EMOJI,
)
+DELETE_EMOJI = Emojis.trashcan
REACTIONS = {
FIRST_EMOJI: 'first',
LEFT_EMOJI: 'back',
RIGHT_EMOJI: 'next',
LAST_EMOJI: 'end',
- DELETE_EMOJI: 'stop'
+ DELETE_EMOJI: 'stop',
}
Cog = namedtuple('Cog', ['name', 'description', 'commands'])
diff --git a/bot/cogs/information.py b/bot/cogs/information.py
index 530453600..13c8aabaa 100644
--- a/bot/cogs/information.py
+++ b/bot/cogs/information.py
@@ -2,17 +2,16 @@ import colorsys
import logging
import pprint
import textwrap
-import typing
-from collections import defaultdict
-from typing import Any, Mapping, Optional
-
-import discord
-from discord import CategoryChannel, Colour, Embed, Member, Role, TextChannel, VoiceChannel, utils
-from discord.ext import commands
-from discord.ext.commands import Bot, BucketType, Cog, Context, command, group
+from collections import Counter, defaultdict
+from string import Template
+from typing import Any, Mapping, Optional, Union
+
+from discord import Colour, Embed, Member, Message, Role, Status, utils
+from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group
from discord.utils import escape_markdown
from bot import constants
+from bot.bot import Bot
from bot.decorators import InChannelCheckFailure, in_channel, with_role
from bot.utils.checks import cooldown_with_role_bypass, with_role_check
from bot.utils.time import time_since
@@ -31,8 +30,7 @@ class Information(Cog):
async def roles_info(self, ctx: Context) -> None:
"""Returns a list of all roles and their corresponding IDs."""
# Sort the roles alphabetically and remove the @everyone role
- roles = sorted(ctx.guild.roles, key=lambda role: role.name)
- roles = [role for role in roles if role.name != "@everyone"]
+ roles = sorted(ctx.guild.roles[1:], key=lambda role: role.name)
# Build a string
role_string = ""
@@ -45,20 +43,20 @@ class Information(Cog):
colour=Colour.blurple(),
description=role_string
)
-
embed.set_footer(text=f"Total roles: {len(roles)}")
await ctx.send(embed=embed)
@with_role(*constants.MODERATION_ROLES)
@command(name="role")
- async def role_info(self, ctx: Context, *roles: typing.Union[Role, str]) -> None:
+ async def role_info(self, ctx: Context, *roles: Union[Role, str]) -> None:
"""
Return information on a role or list of roles.
To specify multiple roles just add to the arguments, delimit roles with spaces in them using quotation marks.
"""
parsed_roles = []
+ failed_roles = []
for role_name in roles:
if isinstance(role_name, Role):
@@ -69,29 +67,29 @@ class Information(Cog):
role = utils.find(lambda r: r.name.lower() == role_name.lower(), ctx.guild.roles)
if not role:
- await ctx.send(f":x: Could not convert `{role_name}` to a role")
+ failed_roles.append(role_name)
continue
parsed_roles.append(role)
+ if failed_roles:
+ await ctx.send(
+ ":x: I could not convert the following role names to a role: \n- "
+ "\n- ".join(failed_roles)
+ )
+
for role in parsed_roles:
+ h, s, v = colorsys.rgb_to_hsv(*role.colour.to_rgb())
+
embed = Embed(
title=f"{role.name} info",
colour=role.colour,
)
-
embed.add_field(name="ID", value=role.id, inline=True)
-
embed.add_field(name="Colour (RGB)", value=f"#{role.colour.value:0>6x}", inline=True)
-
- h, s, v = colorsys.rgb_to_hsv(*role.colour.to_rgb())
-
embed.add_field(name="Colour (HSV)", value=f"{h:.2f} {s:.2f} {v}", inline=True)
-
embed.add_field(name="Member count", value=len(role.members), inline=True)
-
embed.add_field(name="Position", value=role.position)
-
embed.add_field(name="Permission code", value=role.permissions.value, inline=True)
await ctx.send(embed=embed)
@@ -103,40 +101,23 @@ class Information(Cog):
features = ", ".join(ctx.guild.features)
region = ctx.guild.region
- # How many of each type of channel?
roles = len(ctx.guild.roles)
- channels = ctx.guild.channels
- text_channels = 0
- category_channels = 0
- voice_channels = 0
- for channel in channels:
- if type(channel) == TextChannel:
- text_channels += 1
- elif type(channel) == CategoryChannel:
- category_channels += 1
- elif type(channel) == VoiceChannel:
- voice_channels += 1
-
- # How many of each user status?
member_count = ctx.guild.member_count
- members = ctx.guild.members
- online = 0
- dnd = 0
- idle = 0
- offline = 0
- for member in members:
- if str(member.status) == "online":
- online += 1
- elif str(member.status) == "offline":
- offline += 1
- elif str(member.status) == "idle":
- idle += 1
- elif str(member.status) == "dnd":
- dnd += 1
- embed = Embed(
- colour=Colour.blurple(),
- description=textwrap.dedent(f"""
+ # How many of each type of channel?
+ channels = Counter(c.type for c in ctx.guild.channels)
+ channel_counts = "".join(sorted(f"{str(ch).title()} channels: {channels[ch]}\n" for ch in channels)).strip()
+
+ # How many of each user status?
+ statuses = Counter(member.status for member in ctx.guild.members)
+ embed = Embed(colour=Colour.blurple())
+
+ # Because channel_counts lacks leading whitespace, it breaks the dedent if it's inserted directly by the
+ # f-string. While this is correctly formated by Discord, it makes unit testing difficult. To keep the formatting
+ # without joining a tuple of strings we can use a Template string to insert the already-formatted channel_counts
+ # after the dedent is made.
+ embed.description = Template(
+ textwrap.dedent(f"""
**Server information**
Created: {created}
Voice region: {region}
@@ -145,18 +126,15 @@ class Information(Cog):
**Counts**
Members: {member_count:,}
Roles: {roles}
- Text: {text_channels}
- Voice: {voice_channels}
- Channel categories: {category_channels}
+ $channel_counts
**Members**
- {constants.Emojis.status_online} {online}
- {constants.Emojis.status_idle} {idle}
- {constants.Emojis.status_dnd} {dnd}
- {constants.Emojis.status_offline} {offline}
+ {constants.Emojis.status_online} {statuses[Status.online]:,}
+ {constants.Emojis.status_idle} {statuses[Status.idle]:,}
+ {constants.Emojis.status_dnd} {statuses[Status.dnd]:,}
+ {constants.Emojis.status_offline} {statuses[Status.offline]:,}
""")
- )
-
+ ).substitute({"channel_counts": channel_counts})
embed.set_thumbnail(url=ctx.guild.icon_url)
await ctx.send(embed=embed)
@@ -168,7 +146,7 @@ class Information(Cog):
user = ctx.author
# Do a role check if this is being executed on someone other than the caller
- if user != ctx.author and not with_role_check(ctx, *constants.MODERATION_ROLES):
+ elif user != ctx.author and not with_role_check(ctx, *constants.MODERATION_ROLES):
await ctx.send("You may not use this command on users other than yourself.")
return
@@ -188,7 +166,11 @@ class Information(Cog):
# Custom status
custom_status = ''
for activity in user.activities:
- if activity.name == 'Custom Status':
+ # Check activity.state for None value if user has a custom status set
+ # This guards against a custom status with an emoji but no text, which will cause
+ # escape_markdown to raise an exception
+ # This can be reworked after a move to d.py 1.3.0+, which adds a CustomActivity class
+ if activity.name == 'Custom Status' and activity.state:
state = escape_markdown(activity.state)
custom_status = f'Status: {state}\n'
@@ -197,7 +179,7 @@ class Information(Cog):
name = f"{user.nick} ({name})"
joined = time_since(user.joined_at, precision="days")
- roles = ", ".join(role.mention for role in user.roles if role.name != "@everyone")
+ roles = ", ".join(role.mention for role in user.roles[1:])
description = [
textwrap.dedent(f"""
@@ -351,13 +333,13 @@ class Information(Cog):
@cooldown_with_role_bypass(2, 60 * 3, BucketType.member, bypass_roles=constants.STAFF_ROLES)
@group(invoke_without_command=True)
@in_channel(constants.Channels.bot, bypass_roles=constants.STAFF_ROLES)
- async def raw(self, ctx: Context, *, message: discord.Message, json: bool = False) -> None:
+ async def raw(self, ctx: Context, *, message: Message, json: bool = False) -> None:
"""Shows information about the raw API response."""
# I *guess* it could be deleted right as the command is invoked but I felt like it wasn't worth handling
# doing this extra request is also much easier than trying to convert everything back into a dictionary again
raw_data = await ctx.bot.http.get_message(message.channel.id, message.id)
- paginator = commands.Paginator()
+ paginator = Paginator()
def add_content(title: str, content: str) -> None:
paginator.add_line(f'== {title} ==\n')
@@ -385,12 +367,11 @@ class Information(Cog):
await ctx.send(page)
@raw.command()
- async def json(self, ctx: Context, message: discord.Message) -> None:
+ async def json(self, ctx: Context, message: Message) -> None:
"""Shows information about the raw API response in a copy-pasteable Python format."""
await ctx.invoke(self.raw, message=message, json=True)
def setup(bot: Bot) -> None:
- """Information cog load."""
+ """Load the Information cog."""
bot.add_cog(Information(bot))
- log.info("Cog loaded: Information")
diff --git a/bot/cogs/jams.py b/bot/cogs/jams.py
index be9d33e3e..985f28ce5 100644
--- a/bot/cogs/jams.py
+++ b/bot/cogs/jams.py
@@ -4,6 +4,7 @@ from discord import Member, PermissionOverwrite, utils
from discord.ext import commands
from more_itertools import unique_everseen
+from bot.bot import Bot
from bot.constants import Roles
from bot.decorators import with_role
@@ -13,7 +14,7 @@ log = logging.getLogger(__name__)
class CodeJams(commands.Cog):
"""Manages the code-jam related parts of our server."""
- def __init__(self, bot: commands.Bot):
+ def __init__(self, bot: Bot):
self.bot = bot
@commands.command()
@@ -108,7 +109,6 @@ class CodeJams(commands.Cog):
)
-def setup(bot: commands.Bot) -> None:
- """Code Jams cog load."""
+def setup(bot: Bot) -> None:
+ """Load the CodeJams cog."""
bot.add_cog(CodeJams(bot))
- log.info("Cog loaded: CodeJams")
diff --git a/bot/cogs/logging.py b/bot/cogs/logging.py
index c92b619ff..dbd76672f 100644
--- a/bot/cogs/logging.py
+++ b/bot/cogs/logging.py
@@ -1,8 +1,9 @@
import logging
from discord import Embed
-from discord.ext.commands import Bot, Cog
+from discord.ext.commands import Cog
+from bot.bot import Bot
from bot.constants import Channels, DEBUG_MODE
@@ -19,7 +20,7 @@ class Logging(Cog):
async def startup_greeting(self) -> None:
"""Announce our presence to the configured devlog channel."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
log.info("Bot connected!")
embed = Embed(description="Connected!")
@@ -37,6 +38,5 @@ class Logging(Cog):
def setup(bot: Bot) -> None:
- """Logging cog load."""
+ """Load the Logging cog."""
bot.add_cog(Logging(bot))
- log.info("Cog loaded: Logging")
diff --git a/bot/cogs/moderation/__init__.py b/bot/cogs/moderation/__init__.py
index 7383ed44e..5243cb92d 100644
--- a/bot/cogs/moderation/__init__.py
+++ b/bot/cogs/moderation/__init__.py
@@ -1,25 +1,13 @@
-import logging
-
-from discord.ext.commands import Bot
-
+from bot.bot import Bot
from .infractions import Infractions
from .management import ModManagement
from .modlog import ModLog
from .superstarify import Superstarify
-log = logging.getLogger(__name__)
-
def setup(bot: Bot) -> None:
- """Load the moderation extension (Infractions, ModManagement, ModLog, & Superstarify cogs)."""
+ """Load the Infractions, ModManagement, ModLog, and Superstarify cogs."""
bot.add_cog(Infractions(bot))
- log.info("Cog loaded: Infractions")
-
bot.add_cog(ModLog(bot))
- log.info("Cog loaded: ModLog")
-
bot.add_cog(ModManagement(bot))
- log.info("Cog loaded: ModManagement")
-
bot.add_cog(Superstarify(bot))
- log.info("Cog loaded: Superstarify")
diff --git a/bot/cogs/moderation/infractions.py b/bot/cogs/moderation/infractions.py
index 2713a1b68..f4e296df9 100644
--- a/bot/cogs/moderation/infractions.py
+++ b/bot/cogs/moderation/infractions.py
@@ -7,17 +7,17 @@ from discord.ext import commands
from discord.ext.commands import Context, command
from bot import constants
+from bot.bot import Bot
from bot.constants import Event
+from bot.converters import Expiry, FetchedMember
from bot.decorators import respect_role_hierarchy
from bot.utils.checks import with_role_check
from . import utils
from .scheduler import InfractionScheduler
-from .utils import MemberObject
+from .utils import UserSnowflake
log = logging.getLogger(__name__)
-MemberConverter = t.Union[utils.UserTypes, utils.proxy_user]
-
class Infractions(InfractionScheduler, commands.Cog):
"""Apply and pardon infractions on users for moderation purposes."""
@@ -25,7 +25,7 @@ class Infractions(InfractionScheduler, commands.Cog):
category = "Moderation"
category_description = "Server moderation tools."
- def __init__(self, bot: commands.Bot):
+ def __init__(self, bot: Bot):
super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning"})
self.category = "Moderation"
@@ -66,7 +66,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_kick(ctx, user, reason, active=False)
@command()
- async def ban(self, ctx: Context, user: MemberConverter, *, reason: str = None) -> None:
+ async def ban(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None:
"""Permanently ban a user for the given reason."""
await self.apply_ban(ctx, user, reason)
@@ -74,7 +74,7 @@ class Infractions(InfractionScheduler, commands.Cog):
# region: Temporary infractions
@command(aliases=["mute"])
- async def tempmute(self, ctx: Context, user: Member, duration: utils.Expiry, *, reason: str = None) -> None:
+ async def tempmute(self, ctx: Context, user: Member, duration: Expiry, *, reason: str = None) -> None:
"""
Temporarily mute a user for the given reason and duration.
@@ -93,7 +93,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_mute(ctx, user, reason, expires_at=duration)
@command()
- async def tempban(self, ctx: Context, user: MemberConverter, duration: utils.Expiry, *, reason: str = None) -> None:
+ async def tempban(self, ctx: Context, user: FetchedMember, duration: Expiry, *, reason: str = None) -> None:
"""
Temporarily ban a user for the given reason and duration.
@@ -115,7 +115,7 @@ class Infractions(InfractionScheduler, commands.Cog):
# region: Permanent shadow infractions
@command(hidden=True)
- async def note(self, ctx: Context, user: MemberConverter, *, reason: str = None) -> None:
+ async def note(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None:
"""Create a private note for a user with the given reason without notifying the user."""
infraction = await utils.post_infraction(ctx, user, "note", reason, hidden=True, active=False)
if infraction is None:
@@ -129,7 +129,7 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_kick(ctx, user, reason, hidden=True, active=False)
@command(hidden=True, aliases=['shadowban', 'sban'])
- async def shadow_ban(self, ctx: Context, user: MemberConverter, *, reason: str = None) -> None:
+ async def shadow_ban(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None:
"""Permanently ban a user for the given reason without notifying the user."""
await self.apply_ban(ctx, user, reason, hidden=True)
@@ -137,7 +137,7 @@ class Infractions(InfractionScheduler, commands.Cog):
# region: Temporary shadow infractions
@command(hidden=True, aliases=["shadowtempmute, stempmute", "shadowmute", "smute"])
- async def shadow_tempmute(self, ctx: Context, user: Member, duration: utils.Expiry, *, reason: str = None) -> None:
+ async def shadow_tempmute(self, ctx: Context, user: Member, duration: Expiry, *, reason: str = None) -> None:
"""
Temporarily mute a user for the given reason and duration without notifying the user.
@@ -159,8 +159,8 @@ class Infractions(InfractionScheduler, commands.Cog):
async def shadow_tempban(
self,
ctx: Context,
- user: MemberConverter,
- duration: utils.Expiry,
+ user: FetchedMember,
+ duration: Expiry,
*,
reason: str = None
) -> None:
@@ -185,12 +185,12 @@ class Infractions(InfractionScheduler, commands.Cog):
# region: Remove infractions (un- commands)
@command()
- async def unmute(self, ctx: Context, user: MemberConverter) -> None:
+ async def unmute(self, ctx: Context, user: FetchedMember) -> None:
"""Prematurely end the active mute infraction for the user."""
await self.pardon_infraction(ctx, "mute", user)
@command()
- async def unban(self, ctx: Context, user: MemberConverter) -> None:
+ async def unban(self, ctx: Context, user: FetchedMember) -> None:
"""Prematurely end the active ban infraction for the user."""
await self.pardon_infraction(ctx, "ban", user)
@@ -202,19 +202,24 @@ class Infractions(InfractionScheduler, commands.Cog):
if await utils.has_active_infraction(ctx, user, "mute"):
return
- infraction = await utils.post_infraction(ctx, user, "mute", reason, **kwargs)
+ infraction = await utils.post_infraction(ctx, user, "mute", reason, active=True, **kwargs)
if infraction is None:
return
self.mod_log.ignore(Event.member_update, user.id)
- action = user.add_roles(self._muted_role, reason=reason)
- await self.apply_infraction(ctx, infraction, user, action)
+ async def action() -> None:
+ await user.add_roles(self._muted_role, reason=reason)
+
+ log.trace(f"Attempting to kick {user} from voice because they've been muted.")
+ await user.move_to(None, reason=reason)
+
+ await self.apply_infraction(ctx, infraction, user, action())
@respect_role_hierarchy()
async def apply_kick(self, ctx: Context, user: Member, reason: str, **kwargs) -> None:
"""Apply a kick infraction with kwargs passed to `post_infraction`."""
- infraction = await utils.post_infraction(ctx, user, "kick", reason, **kwargs)
+ infraction = await utils.post_infraction(ctx, user, "kick", reason, active=False, **kwargs)
if infraction is None:
return
@@ -224,12 +229,12 @@ class Infractions(InfractionScheduler, commands.Cog):
await self.apply_infraction(ctx, infraction, user, action)
@respect_role_hierarchy()
- async def apply_ban(self, ctx: Context, user: MemberObject, reason: str, **kwargs) -> None:
+ async def apply_ban(self, ctx: Context, user: UserSnowflake, reason: str, **kwargs) -> None:
"""Apply a ban infraction with kwargs passed to `post_infraction`."""
if await utils.has_active_infraction(ctx, user, "ban"):
return
- infraction = await utils.post_infraction(ctx, user, "ban", reason, **kwargs)
+ infraction = await utils.post_infraction(ctx, user, "ban", reason, active=True, **kwargs)
if infraction is None:
return
diff --git a/bot/cogs/moderation/management.py b/bot/cogs/moderation/management.py
index 44a508436..f74089056 100644
--- a/bot/cogs/moderation/management.py
+++ b/bot/cogs/moderation/management.py
@@ -2,13 +2,15 @@ import asyncio
import logging
import textwrap
import typing as t
+from datetime import datetime
import discord
from discord.ext import commands
from discord.ext.commands import Context
from bot import constants
-from bot.converters import InfractionSearchQuery
+from bot.bot import Bot
+from bot.converters import Expiry, InfractionSearchQuery, allowed_strings, proxy_user
from bot.pagination import LinePaginator
from bot.utils import time
from bot.utils.checks import in_channel_check, with_role_check
@@ -18,24 +20,13 @@ from .modlog import ModLog
log = logging.getLogger(__name__)
-UserConverter = t.Union[discord.User, utils.proxy_user]
-
-
-def permanent_duration(expires_at: str) -> str:
- """Only allow an expiration to be 'permanent' if it is a string."""
- expires_at = expires_at.lower()
- if expires_at != "permanent":
- raise commands.BadArgument
- else:
- return expires_at
-
class ModManagement(commands.Cog):
"""Management of infractions."""
category = "Moderation"
- def __init__(self, bot: commands.Bot):
+ def __init__(self, bot: Bot):
self.bot = bot
@property
@@ -59,8 +50,8 @@ class ModManagement(commands.Cog):
async def infraction_edit(
self,
ctx: Context,
- infraction_id: int,
- duration: t.Union[utils.Expiry, permanent_duration, None],
+ infraction_id: t.Union[int, allowed_strings("l", "last", "recent")],
+ duration: t.Union[Expiry, allowed_strings("p", "permanent"), None],
*,
reason: str = None
) -> None:
@@ -77,26 +68,45 @@ class ModManagement(commands.Cog):
\u2003`M` - minutes∗
\u2003`s` - seconds
- Use "permanent" to mark the infraction as permanent. Alternatively, an ISO 8601 timestamp
- can be provided for the duration.
+ Use "l", "last", or "recent" as the infraction ID to specify that the most recent infraction
+ authored by the command invoker should be edited.
+
+ Use "p" or "permanent" to mark the infraction as permanent. Alternatively, an ISO 8601
+ timestamp can be provided for the duration.
"""
if duration is None and reason is None:
# Unlike UserInputError, the error handler will show a specified message for BadArgument
raise commands.BadArgument("Neither a new expiry nor a new reason was specified.")
# Retrieve the previous infraction for its information.
- old_infraction = await self.bot.api_client.get(f'bot/infractions/{infraction_id}')
+ if isinstance(infraction_id, str):
+ params = {
+ "actor__id": ctx.author.id,
+ "ordering": "-inserted_at"
+ }
+ infractions = await self.bot.api_client.get(f"bot/infractions", params=params)
+
+ if infractions:
+ old_infraction = infractions[0]
+ infraction_id = old_infraction["id"]
+ else:
+ await ctx.send(
+ f":x: Couldn't find most recent infraction; you have never given an infraction."
+ )
+ return
+ else:
+ old_infraction = await self.bot.api_client.get(f"bot/infractions/{infraction_id}")
request_data = {}
confirm_messages = []
log_text = ""
- if duration == "permanent":
+ if isinstance(duration, str):
request_data['expires_at'] = None
confirm_messages.append("marked as permanent")
elif duration is not None:
request_data['expires_at'] = duration.isoformat()
- expiry = duration.strftime(time.INFRACTION_FORMAT)
+ expiry = time.format_infraction_with_duration(request_data['expires_at'])
confirm_messages.append(f"set to expire on {expiry}")
else:
confirm_messages.append("expiry unchanged")
@@ -119,16 +129,22 @@ class ModManagement(commands.Cog):
# Re-schedule infraction if the expiration has been updated
if 'expires_at' in request_data:
- self.infractions_cog.cancel_task(new_infraction['id'])
- loop = asyncio.get_event_loop()
- self.infractions_cog.schedule_task(loop, new_infraction['id'], new_infraction)
+ # A scheduled task should only exist if the old infraction wasn't permanent
+ if old_infraction['expires_at']:
+ self.infractions_cog.cancel_task(new_infraction['id'])
+
+ # If the infraction was not marked as permanent, schedule a new expiration task
+ if request_data['expires_at']:
+ loop = asyncio.get_event_loop()
+ self.infractions_cog.schedule_task(loop, new_infraction['id'], new_infraction)
log_text += f"""
Previous expiry: {old_infraction['expires_at'] or "Permanent"}
New expiry: {new_infraction['expires_at'] or "Permanent"}
""".rstrip()
- await ctx.send(f":ok_hand: Updated infraction: {' & '.join(confirm_messages)}")
+ changes = ' & '.join(confirm_messages)
+ await ctx.send(f":ok_hand: Updated infraction #{infraction_id}: {changes}")
# Get information about the infraction's user
user_id = new_infraction['user']
@@ -169,7 +185,7 @@ class ModManagement(commands.Cog):
await ctx.invoke(self.search_reason, query)
@infraction_search_group.command(name="user", aliases=("member", "id"))
- async def search_user(self, ctx: Context, user: UserConverter) -> None:
+ async def search_user(self, ctx: Context, user: t.Union[discord.User, proxy_user]) -> None:
"""Search for infractions by member."""
infraction_list = await self.bot.api_client.get(
'bot/infractions',
@@ -231,10 +247,17 @@ class ModManagement(commands.Cog):
user_id = infraction["user"]
hidden = infraction["hidden"]
created = time.format_infraction(infraction["inserted_at"])
+
+ if active:
+ remaining = time.until_expiration(infraction["expires_at"]) or "Expired"
+ else:
+ remaining = "Inactive"
+
if infraction["expires_at"] is None:
expires = "*Permanent*"
else:
- expires = time.format_infraction(infraction["expires_at"])
+ date_from = datetime.strptime(created, time.INFRACTION_FORMAT)
+ expires = time.format_infraction_with_duration(infraction["expires_at"], date_from)
lines = textwrap.dedent(f"""
{"**===============**" if active else "==============="}
@@ -245,6 +268,7 @@ class ModManagement(commands.Cog):
Reason: {infraction["reason"] or "*None*"}
Created: {created}
Expires: {expires}
+ Remaining: {remaining}
Actor: {actor.mention if actor else actor_id}
ID: `{infraction["id"]}`
{"**===============**" if active else "==============="}
diff --git a/bot/cogs/moderation/modlog.py b/bot/cogs/moderation/modlog.py
index 88f2b6c67..e8ae0dbe6 100644
--- a/bot/cogs/moderation/modlog.py
+++ b/bot/cogs/moderation/modlog.py
@@ -1,18 +1,21 @@
import asyncio
+import difflib
+import itertools
import logging
import typing as t
from datetime import datetime
+from itertools import zip_longest
import discord
from dateutil.relativedelta import relativedelta
from deepdiff import DeepDiff
from discord import Colour
from discord.abc import GuildChannel
-from discord.ext.commands import Bot, Cog, Context
+from discord.ext.commands import Cog, Context
+from bot.bot import Bot
from bot.constants import Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, URLs
from bot.utils.time import humanize_delta
-from .utils import UserTypes
log = logging.getLogger(__name__)
@@ -23,6 +26,12 @@ CHANNEL_CHANGES_SUPPRESSED = ("_overwrites", "position")
MEMBER_CHANGES_SUPPRESSED = ("status", "activities", "_client_status", "nick")
ROLE_CHANGES_UNSUPPORTED = ("colour", "permissions")
+VOICE_STATE_ATTRIBUTES = {
+ "channel.name": "Channel",
+ "self_stream": "Streaming",
+ "self_video": "Broadcasting",
+}
+
class ModLog(Cog, name="ModLog"):
"""Logging for server events and staff actions."""
@@ -34,14 +43,16 @@ class ModLog(Cog, name="ModLog"):
self._cached_deletes = []
self._cached_edits = []
- async def upload_log(self, messages: t.List[discord.Message], actor_id: int) -> str:
- """
- Uploads the log data to the database via an API endpoint for uploading logs.
-
- Used in several mod log embeds.
+ async def upload_log(
+ self,
+ messages: t.Iterable[discord.Message],
+ actor_id: int,
+ attachments: t.Iterable[t.List[str]] = None
+ ) -> str:
+ """Upload message logs to the database and return a URL to a page for viewing the logs."""
+ if attachments is None:
+ attachments = []
- Returns a URL that can be used to view the log.
- """
response = await self.bot.api_client.post(
'bot/deleted-messages',
json={
@@ -53,9 +64,10 @@ class ModLog(Cog, name="ModLog"):
'author': message.author.id,
'channel_id': message.channel.id,
'content': message.content,
- 'embeds': [embed.to_dict() for embed in message.embeds]
+ 'embeds': [embed.to_dict() for embed in message.embeds],
+ 'attachments': attachment,
}
- for message in messages
+ for message, attachment in zip_longest(messages, attachments)
]
}
)
@@ -203,7 +215,7 @@ class ModLog(Cog, name="ModLog"):
new = value["new_value"]
old = value["old_value"]
- changes.append(f"**{key.title()}:** `{old}` **->** `{new}`")
+ changes.append(f"**{key.title()}:** `{old}` **→** `{new}`")
done.append(key)
@@ -281,7 +293,7 @@ class ModLog(Cog, name="ModLog"):
new = value["new_value"]
old = value["old_value"]
- changes.append(f"**{key.title()}:** `{old}` **->** `{new}`")
+ changes.append(f"**{key.title()}:** `{old}` **→** `{new}`")
done.append(key)
@@ -331,7 +343,7 @@ class ModLog(Cog, name="ModLog"):
new = value["new_value"]
old = value["old_value"]
- changes.append(f"**{key.title()}:** `{old}` **->** `{new}`")
+ changes.append(f"**{key.title()}:** `{old}` **→** `{new}`")
done.append(key)
@@ -352,7 +364,7 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_member_ban(self, guild: discord.Guild, member: UserTypes) -> None:
+ async def on_member_ban(self, guild: discord.Guild, member: discord.Member) -> None:
"""Log ban event to user log."""
if guild.id != GuildConstant.id:
return
@@ -484,23 +496,23 @@ class ModLog(Cog, name="ModLog"):
old = value.get("old_value")
if new and old:
- changes.append(f"**{key.title()}:** `{old}` **->** `{new}`")
+ changes.append(f"**{key.title()}:** `{old}` **→** `{new}`")
done.append(key)
if before.name != after.name:
changes.append(
- f"**Username:** `{before.name}` **->** `{after.name}`"
+ f"**Username:** `{before.name}` **→** `{after.name}`"
)
if before.discriminator != after.discriminator:
changes.append(
- f"**Discriminator:** `{before.discriminator}` **->** `{after.discriminator}`"
+ f"**Discriminator:** `{before.discriminator}` **→** `{after.discriminator}`"
)
if before.display_name != after.display_name:
changes.append(
- f"**Display name:** `{before.display_name}` **->** `{after.display_name}`"
+ f"**Display name:** `{before.display_name}` **→** `{after.display_name}`"
)
if not changes:
@@ -618,80 +630,81 @@ class ModLog(Cog, name="ModLog"):
)
@Cog.listener()
- async def on_message_edit(self, before: discord.Message, after: discord.Message) -> None:
+ async def on_message_edit(self, msg_before: discord.Message, msg_after: discord.Message) -> None:
"""Log message edit event to message change log."""
if (
- not before.guild
- or before.guild.id != GuildConstant.id
- or before.channel.id in GuildConstant.ignored
- or before.author.bot
+ not msg_before.guild
+ or msg_before.guild.id != GuildConstant.id
+ or msg_before.channel.id in GuildConstant.ignored
+ or msg_before.author.bot
):
return
- self._cached_edits.append(before.id)
+ self._cached_edits.append(msg_before.id)
- if before.content == after.content:
+ if msg_before.content == msg_after.content:
return
- author = before.author
- channel = before.channel
+ author = msg_before.author
+ channel = msg_before.channel
+ channel_name = f"{channel.category}/#{channel.name}" if channel.category else f"#{channel.name}"
- if channel.category:
- before_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{before.id}`\n"
- "\n"
- f"{before.clean_content}"
- )
-
- after_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{before.id}`\n"
- "\n"
- f"{after.clean_content}"
- )
- else:
- before_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** #{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{before.id}`\n"
- "\n"
- f"{before.clean_content}"
- )
+ # Getting the difference per words and group them by type - add, remove, same
+ # Note that this is intended grouping without sorting
+ diff = difflib.ndiff(msg_before.clean_content.split(), msg_after.clean_content.split())
+ diff_groups = tuple(
+ (diff_type, tuple(s[2:] for s in diff_words))
+ for diff_type, diff_words in itertools.groupby(diff, key=lambda s: s[0])
+ )
- after_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** #{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{before.id}`\n"
- "\n"
- f"{after.clean_content}"
- )
+ content_before: t.List[str] = []
+ content_after: t.List[str] = []
+
+ for index, (diff_type, words) in enumerate(diff_groups):
+ sub = ' '.join(words)
+ if diff_type == '-':
+ content_before.append(f"[{sub}](http://o.hi)")
+ elif diff_type == '+':
+ content_after.append(f"[{sub}](http://o.hi)")
+ elif diff_type == ' ':
+ if len(words) > 2:
+ sub = (
+ f"{words[0] if index > 0 else ''}"
+ " ... "
+ f"{words[-1] if index < len(diff_groups) - 1 else ''}"
+ )
+ content_before.append(sub)
+ content_after.append(sub)
+
+ response = (
+ f"**Author:** {author} (`{author.id}`)\n"
+ f"**Channel:** {channel_name} (`{channel.id}`)\n"
+ f"**Message ID:** `{msg_before.id}`\n"
+ "\n"
+ f"**Before**:\n{' '.join(content_before)}\n"
+ f"**After**:\n{' '.join(content_after)}\n"
+ "\n"
+ f"[Jump to message]({msg_after.jump_url})"
+ )
- if before.edited_at:
+ if msg_before.edited_at:
# Message was previously edited, to assist with self-bot detection, use the edited_at
# datetime as the baseline and create a human-readable delta between this edit event
# and the last time the message was edited
- timestamp = before.edited_at
- delta = humanize_delta(relativedelta(after.edited_at, before.edited_at))
+ timestamp = msg_before.edited_at
+ delta = humanize_delta(relativedelta(msg_after.edited_at, msg_before.edited_at))
footer = f"Last edited {delta} ago"
else:
# Message was not previously edited, use the created_at datetime as the baseline, no
# delta calculation needed
- timestamp = before.created_at
+ timestamp = msg_before.created_at
footer = None
await self.send_log_message(
- Icons.message_edit, Colour.blurple(), "Message edited (Before)", before_response,
+ Icons.message_edit, Colour.blurple(), "Message edited", response,
channel_id=Channels.message_log, timestamp_override=timestamp, footer=footer
)
- await self.send_log_message(
- Icons.message_edit, Colour.blurple(), "Message edited (After)", after_response,
- channel_id=Channels.message_log, timestamp_override=after.edited_at
- )
-
@Cog.listener()
async def on_raw_message_edit(self, event: discord.RawMessageUpdateEvent) -> None:
"""Log raw message edit event to message change log."""
@@ -718,39 +731,23 @@ class ModLog(Cog, name="ModLog"):
author = message.author
channel = message.channel
+ channel_name = f"{channel.category}/#{channel.name}" if channel.category else f"#{channel.name}"
+
+ before_response = (
+ f"**Author:** {author} (`{author.id}`)\n"
+ f"**Channel:** {channel_name} (`{channel.id}`)\n"
+ f"**Message ID:** `{message.id}`\n"
+ "\n"
+ "This message was not cached, so the message content cannot be displayed."
+ )
- if channel.category:
- before_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{message.id}`\n"
- "\n"
- "This message was not cached, so the message content cannot be displayed."
- )
-
- after_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{message.id}`\n"
- "\n"
- f"{message.clean_content}"
- )
- else:
- before_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** #{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{message.id}`\n"
- "\n"
- "This message was not cached, so the message content cannot be displayed."
- )
-
- after_response = (
- f"**Author:** {author} (`{author.id}`)\n"
- f"**Channel:** #{channel.name} (`{channel.id}`)\n"
- f"**Message ID:** `{message.id}`\n"
- "\n"
- f"{message.clean_content}"
- )
+ after_response = (
+ f"**Author:** {author} (`{author.id}`)\n"
+ f"**Channel:** {channel_name} (`{channel.id}`)\n"
+ f"**Message ID:** `{message.id}`\n"
+ "\n"
+ f"{message.clean_content}"
+ )
await self.send_log_message(
Icons.message_edit, Colour.blurple(), "Message edited (Before)",
@@ -761,3 +758,76 @@ class ModLog(Cog, name="ModLog"):
Icons.message_edit, Colour.blurple(), "Message edited (After)",
after_response, channel_id=Channels.message_log
)
+
+ @Cog.listener()
+ async def on_voice_state_update(
+ self,
+ member: discord.Member,
+ before: discord.VoiceState,
+ after: discord.VoiceState
+ ) -> None:
+ """Log member voice state changes to the voice log channel."""
+ if (
+ member.guild.id != GuildConstant.id
+ or (before.channel and before.channel.id in GuildConstant.ignored)
+ ):
+ return
+
+ if member.id in self._ignored[Event.voice_state_update]:
+ self._ignored[Event.voice_state_update].remove(member.id)
+ return
+
+ # Exclude all channel attributes except the name.
+ diff = DeepDiff(
+ before,
+ after,
+ exclude_paths=("root.session_id", "root.afk"),
+ exclude_regex_paths=r"root\.channel\.(?!name)",
+ )
+
+ # A type change seems to always take precedent over a value change. Furthermore, it will
+ # include the value change along with the type change anyway. Therefore, it's OK to
+ # "overwrite" values_changed; in practice there will never even be anything to overwrite.
+ diff_values = {**diff.get("values_changed", {}), **diff.get("type_changes", {})}
+
+ icon = Icons.voice_state_blue
+ colour = Colour.blurple()
+ changes = []
+
+ for attr, values in diff_values.items():
+ if not attr: # Not sure why, but it happens.
+ continue
+
+ old = values["old_value"]
+ new = values["new_value"]
+
+ attr = attr[5:] # Remove "root." prefix.
+ attr = VOICE_STATE_ATTRIBUTES.get(attr, attr.replace("_", " ").capitalize())
+
+ changes.append(f"**{attr}:** `{old}` **→** `{new}`")
+
+ # Set the embed icon and colour depending on which attribute changed.
+ if any(name in attr for name in ("Channel", "deaf", "mute")):
+ if new is None or new is True:
+ # Left a channel or was muted/deafened.
+ icon = Icons.voice_state_red
+ colour = Colours.soft_red
+ elif old is None or old is True:
+ # Joined a channel or was unmuted/undeafened.
+ icon = Icons.voice_state_green
+ colour = Colours.soft_green
+
+ if not changes:
+ return
+
+ message = "\n".join(f"{Emojis.bullet} {item}" for item in sorted(changes))
+ message = f"**{member}** (`{member.id}`)\n{message}"
+
+ await self.send_log_message(
+ icon_url=icon,
+ colour=colour,
+ title="Voice state updated",
+ text=message,
+ thumbnail=member.avatar_url_as(static_format="png"),
+ channel_id=Channels.voice_log
+ )
diff --git a/bot/cogs/moderation/scheduler.py b/bot/cogs/moderation/scheduler.py
index 49b61f35e..3c5185468 100644
--- a/bot/cogs/moderation/scheduler.py
+++ b/bot/cogs/moderation/scheduler.py
@@ -7,16 +7,17 @@ from gettext import ngettext
import dateutil.parser
import discord
-from discord.ext.commands import Bot, Context
+from discord.ext.commands import Context
from bot import constants
from bot.api import ResponseCodeError
+from bot.bot import Bot
from bot.constants import Colours, STAFF_CHANNELS
from bot.utils import time
from bot.utils.scheduling import Scheduler
from . import utils
from .modlog import ModLog
-from .utils import MemberObject
+from .utils import UserSnowflake
log = logging.getLogger(__name__)
@@ -37,7 +38,7 @@ class InfractionScheduler(Scheduler):
async def reschedule_infractions(self, supported_infractions: t.Container[str]) -> None:
"""Schedule expiration for previous infractions."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
log.trace(f"Rescheduling infractions for {self.__class__.__name__}.")
@@ -76,21 +77,18 @@ class InfractionScheduler(Scheduler):
self,
ctx: Context,
infraction: utils.Infraction,
- user: MemberObject,
+ user: UserSnowflake,
action_coro: t.Optional[t.Awaitable] = None
) -> None:
"""Apply an infraction to the user, log the infraction, and optionally notify the user."""
infr_type = infraction["type"]
icon = utils.INFRACTION_ICONS[infr_type][0]
reason = infraction["reason"]
- expiry = infraction["expires_at"]
+ expiry = time.format_infraction_with_duration(infraction["expires_at"])
id_ = infraction['id']
log.trace(f"Applying {infr_type} infraction #{id_} to {user}.")
- if expiry:
- expiry = time.format_infraction(expiry)
-
# Default values for the confirmation message and mod log.
confirm_msg = f":ok_hand: applied"
@@ -108,16 +106,20 @@ class InfractionScheduler(Scheduler):
# DM the user about the infraction if it's not a shadow/hidden infraction.
if not infraction["hidden"]:
- # Sometimes user is a discord.Object; make it a proper user.
- user = await self.bot.fetch_user(user.id)
+ dm_result = f"{constants.Emojis.failmail} "
+ dm_log_text = "\nDM: **Failed**"
- # Accordingly display whether the user was successfully notified via DM.
- if await utils.notify_infraction(user, infr_type, expiry, reason, icon):
- dm_result = ":incoming_envelope: "
- dm_log_text = "\nDM: Sent"
+ # Sometimes user is a discord.Object; make it a proper user.
+ try:
+ if not isinstance(user, (discord.Member, discord.User)):
+ user = await self.bot.fetch_user(user.id)
+ except discord.HTTPException as e:
+ log.error(f"Failed to DM {user.id}: could not fetch user (status {e.status})")
else:
- dm_log_text = "\nDM: **Failed**"
- log_content = ctx.author.mention
+ # Accordingly display whether the user was successfully notified via DM.
+ if await utils.notify_infraction(user, infr_type, expiry, reason, icon):
+ dm_result = ":incoming_envelope: "
+ dm_log_text = "\nDM: Sent"
if infraction["actor"] == self.bot.user.id:
log.trace(
@@ -149,14 +151,18 @@ class InfractionScheduler(Scheduler):
if expiry:
# Schedule the expiration of the infraction.
self.schedule_task(ctx.bot.loop, infraction["id"], infraction)
- except discord.Forbidden:
+ except discord.HTTPException as e:
# Accordingly display that applying the infraction failed.
confirm_msg = f":x: failed to apply"
expiry_msg = ""
log_content = ctx.author.mention
log_title = "failed to apply"
- log.warning(f"Failed to apply {infr_type} infraction #{id_} to {user}.")
+ log_msg = f"Failed to apply {infr_type} infraction #{id_} to {user}"
+ if isinstance(e, discord.Forbidden):
+ log.warning(f"{log_msg}: bot lacks permissions.")
+ else:
+ log.exception(log_msg)
# Send a confirmation message to the invoking context.
log.trace(f"Sending infraction #{id_} confirmation message.")
@@ -183,7 +189,7 @@ class InfractionScheduler(Scheduler):
log.info(f"Applied {infr_type} infraction #{id_} to {user}.")
- async def pardon_infraction(self, ctx: Context, infr_type: str, user: MemberObject) -> None:
+ async def pardon_infraction(self, ctx: Context, infr_type: str, user: UserSnowflake) -> None:
"""Prematurely end an infraction for a user and log the action in the mod log."""
log.trace(f"Pardoning {infr_type} infraction for {user}.")
@@ -253,8 +259,7 @@ class InfractionScheduler(Scheduler):
if log_text.get("DM") == "Sent":
dm_emoji = ":incoming_envelope: "
elif "DM" in log_text:
- # Mention the actor because the DM failed to send.
- log_content = ctx.author.mention
+ dm_emoji = f"{constants.Emojis.failmail} "
# Accordingly display whether the pardon failed.
if "Failure" in log_text:
@@ -304,16 +309,23 @@ class InfractionScheduler(Scheduler):
guild = self.bot.get_guild(constants.Guild.id)
mod_role = guild.get_role(constants.Roles.moderator)
user_id = infraction["user"]
+ actor = infraction["actor"]
type_ = infraction["type"]
id_ = infraction["id"]
+ inserted_at = infraction["inserted_at"]
+ expiry = infraction["expires_at"]
log.info(f"Marking infraction #{id_} as inactive (expired).")
+ expiry = dateutil.parser.isoparse(expiry).replace(tzinfo=None) if expiry else None
+ created = time.format_infraction_with_duration(inserted_at, expiry)
+
log_content = None
log_text = {
- "Member": str(user_id),
- "Actor": str(self.bot.user),
- "Reason": infraction["reason"]
+ "Member": f"<@{user_id}>",
+ "Actor": str(self.bot.get_user(actor) or actor),
+ "Reason": infraction["reason"],
+ "Created": created,
}
try:
@@ -327,12 +339,12 @@ class InfractionScheduler(Scheduler):
f"Attempted to deactivate an unsupported infraction #{id_} ({type_})!"
)
except discord.Forbidden:
- log.warning(f"Failed to deactivate infraction #{id_} ({type_}): bot lacks permissions")
+ log.warning(f"Failed to deactivate infraction #{id_} ({type_}): bot lacks permissions.")
log_text["Failure"] = f"The bot lacks permissions to do this (role hierarchy?)"
log_content = mod_role.mention
except discord.HTTPException as e:
log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
- log_text["Failure"] = f"HTTPException with code {e.code}."
+ log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}."
log_content = mod_role.mention
# Check if the user is currently being watched by Big Brother.
@@ -379,14 +391,19 @@ class InfractionScheduler(Scheduler):
if send_log:
log_title = f"expiration failed" if "Failure" in log_text else "expired"
+ user = self.bot.get_user(user_id)
+ avatar = user.avatar_url_as(static_format="png") if user else None
+
log.trace(f"Sending deactivation mod log for infraction #{id_}.")
await self.mod_log.send_log_message(
icon_url=utils.INFRACTION_ICONS[type_][1],
colour=Colours.soft_green,
title=f"Infraction {log_title}: {type_}",
+ thumbnail=avatar,
text="\n".join(f"{k}: {v}" for k, v in log_text.items()),
footer=f"ID: {id_}",
content=log_content,
+
)
return log_text
diff --git a/bot/cogs/moderation/superstarify.py b/bot/cogs/moderation/superstarify.py
index 9b3c62403..c41874a95 100644
--- a/bot/cogs/moderation/superstarify.py
+++ b/bot/cogs/moderation/superstarify.py
@@ -6,9 +6,11 @@ import typing as t
from pathlib import Path
from discord import Colour, Embed, Member
-from discord.ext.commands import Bot, Cog, Context, command
+from discord.ext.commands import Cog, Context, command
from bot import constants
+from bot.bot import Bot
+from bot.converters import Expiry
from bot.utils.checks import with_role_check
from bot.utils.time import format_infraction
from . import utils
@@ -106,8 +108,9 @@ class Superstarify(InfractionScheduler, Cog):
self,
ctx: Context,
member: Member,
- duration: utils.Expiry,
- reason: str = None
+ duration: Expiry,
+ *,
+ reason: str = None,
) -> None:
"""
Temporarily force a random superstar name (like Taylor Swift) to be the user's nickname.
@@ -132,7 +135,7 @@ class Superstarify(InfractionScheduler, Cog):
# Post the infraction to the API
reason = reason or f"old nick: {member.display_name}"
- infraction = await utils.post_infraction(ctx, member, "superstar", reason, duration)
+ infraction = await utils.post_infraction(ctx, member, "superstar", reason, duration, active=True)
id_ = infraction["id"]
old_nick = member.display_name
diff --git a/bot/cogs/moderation/utils.py b/bot/cogs/moderation/utils.py
index 325b9567a..5052b9048 100644
--- a/bot/cogs/moderation/utils.py
+++ b/bot/cogs/moderation/utils.py
@@ -4,12 +4,10 @@ import typing as t
from datetime import datetime
import discord
-from discord.ext import commands
from discord.ext.commands import Context
from bot.api import ResponseCodeError
from bot.constants import Colours, Icons
-from bot.converters import Duration, ISODateTime
log = logging.getLogger(__name__)
@@ -25,40 +23,49 @@ INFRACTION_ICONS = {
RULES_URL = "https://pythondiscord.com/pages/rules"
APPEALABLE_INFRACTIONS = ("ban", "mute")
-UserTypes = t.Union[discord.Member, discord.User]
-MemberObject = t.Union[UserTypes, discord.Object]
+# Type aliases
+UserObject = t.Union[discord.Member, discord.User]
+UserSnowflake = t.Union[UserObject, discord.Object]
Infraction = t.Dict[str, t.Union[str, int, bool]]
-Expiry = t.Union[Duration, ISODateTime]
-def proxy_user(user_id: str) -> discord.Object:
+async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]:
"""
- Create a proxy user object from the given id.
+ Create a new user in the database.
- Used when a Member or User object cannot be resolved.
+ Used when an infraction needs to be applied on a user absent in the guild.
"""
- log.trace(f"Attempting to create a proxy user for the user id {user_id}.")
+ log.trace(f"Attempting to add user {user.id} to the database.")
- try:
- user_id = int(user_id)
- except ValueError:
- raise commands.BadArgument
+ if not isinstance(user, (discord.Member, discord.User)):
+ log.warning("The user being added to the DB is not a Member or User object.")
- user = discord.Object(user_id)
- user.mention = user.id
- user.avatar_url_as = lambda static_format: None
+ payload = {
+ 'avatar_hash': getattr(user, 'avatar', 0),
+ 'discriminator': int(getattr(user, 'discriminator', 0)),
+ 'id': user.id,
+ 'in_guild': False,
+ 'name': getattr(user, 'name', 'Name unknown'),
+ 'roles': []
+ }
- return user
+ try:
+ response = await ctx.bot.api_client.post('bot/users', json=payload)
+ log.info(f"User {user.id} added to the DB.")
+ return response
+ except ResponseCodeError as e:
+ log.error(f"Failed to add user {user.id} to the DB. {e}")
+ await ctx.send(f":x: The attempt to add the user to the DB failed: status {e.status}")
async def post_infraction(
ctx: Context,
- user: MemberObject,
+ user: UserSnowflake,
infr_type: str,
reason: str,
expires_at: datetime = None,
hidden: bool = False,
- active: bool = True,
+ active: bool = True
) -> t.Optional[dict]:
"""Posts an infraction to the API."""
log.trace(f"Posting {infr_type} infraction for {user} to the API.")
@@ -74,27 +81,23 @@ async def post_infraction(
if expires_at:
payload['expires_at'] = expires_at.isoformat()
- try:
- response = await ctx.bot.api_client.post('bot/infractions', json=payload)
- except ResponseCodeError as exp:
- if exp.status == 400 and 'user' in exp.response_json:
- log.info(
- f"{ctx.author} tried to add a {infr_type} infraction to `{user.id}`, "
- "but that user id was not found in the database."
- )
- await ctx.send(
- f":x: Cannot add infraction, the specified user is not known to the database."
- )
- return
- else:
- log.exception("An unexpected ResponseCodeError occurred while adding an infraction:")
- await ctx.send(":x: There was an error adding the infraction.")
- return
-
- return response
-
-
-async def has_active_infraction(ctx: Context, user: MemberObject, infr_type: str) -> bool:
+ # Try to apply the infraction. If it fails because the user doesn't exist, try to add it.
+ for should_post_user in (True, False):
+ try:
+ response = await ctx.bot.api_client.post('bot/infractions', json=payload)
+ return response
+ except ResponseCodeError as e:
+ if e.status == 400 and 'user' in e.response_json:
+ # Only one attempt to add the user to the database, not two:
+ if not should_post_user or await post_user(ctx, user) is None:
+ return
+ else:
+ log.exception(f"Unexpected error while adding an infraction for {user}:")
+ await ctx.send(f":x: There was an error adding the infraction: status {e.status}.")
+ return
+
+
+async def has_active_infraction(ctx: Context, user: UserSnowflake, infr_type: str) -> bool:
"""Checks if a user already has an active infraction of the given type."""
log.trace(f"Checking if {user} has active infractions of type {infr_type}.")
@@ -119,7 +122,7 @@ async def has_active_infraction(ctx: Context, user: MemberObject, infr_type: str
async def notify_infraction(
- user: UserTypes,
+ user: UserObject,
infr_type: str,
expires_at: t.Optional[str] = None,
reason: t.Optional[str] = None,
@@ -150,7 +153,7 @@ async def notify_infraction(
async def notify_pardon(
- user: UserTypes,
+ user: UserObject,
title: str,
content: str,
icon_url: str = Icons.user_verified
@@ -168,7 +171,7 @@ async def notify_pardon(
return await send_private_embed(user, embed)
-async def send_private_embed(user: UserTypes, embed: discord.Embed) -> bool:
+async def send_private_embed(user: UserObject, embed: discord.Embed) -> bool:
"""
A helper method for sending an embed to a user's DMs.
diff --git a/bot/cogs/off_topic_names.py b/bot/cogs/off_topic_names.py
index 78792240f..81511f99d 100644
--- a/bot/cogs/off_topic_names.py
+++ b/bot/cogs/off_topic_names.py
@@ -4,9 +4,10 @@ import logging
from datetime import datetime, timedelta
from discord import Colour, Embed
-from discord.ext.commands import BadArgument, Bot, Cog, Context, Converter, group
+from discord.ext.commands import BadArgument, Cog, Context, Converter, group
from bot.api import ResponseCodeError
+from bot.bot import Bot
from bot.constants import Channels, MODERATION_ROLES
from bot.decorators import with_role
from bot.pagination import LinePaginator
@@ -87,7 +88,7 @@ class OffTopicNames(Cog):
async def init_offtopic_updater(self) -> None:
"""Start off-topic channel updating event loop if it hasn't already started."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
if self.updater_task is None:
coro = update_names(self.bot)
self.updater_task = self.bot.loop.create_task(coro)
@@ -184,6 +185,5 @@ class OffTopicNames(Cog):
def setup(bot: Bot) -> None:
- """Off topic names cog load."""
+ """Load the OffTopicNames cog."""
bot.add_cog(OffTopicNames(bot))
- log.info("Cog loaded: OffTopicNames")
diff --git a/bot/cogs/reddit.py b/bot/cogs/reddit.py
index 0d06e9c26..5a7fa100f 100644
--- a/bot/cogs/reddit.py
+++ b/bot/cogs/reddit.py
@@ -2,13 +2,16 @@ import asyncio
import logging
import random
import textwrap
+from collections import namedtuple
from datetime import datetime, timedelta
from typing import List
+from aiohttp import BasicAuth, ClientError
from discord import Colour, Embed, TextChannel
-from discord.ext.commands import Bot, Cog, Context, group
+from discord.ext.commands import Cog, Context, group
from discord.ext.tasks import loop
+from bot.bot import Bot
from bot.constants import Channels, ERROR_REPLIES, Emojis, Reddit as RedditConfig, STAFF_ROLES, Webhooks
from bot.converters import Subreddit
from bot.decorators import with_role
@@ -16,29 +19,36 @@ from bot.pagination import LinePaginator
log = logging.getLogger(__name__)
+AccessToken = namedtuple("AccessToken", ["token", "expires_at"])
+
class Reddit(Cog):
"""Track subreddit posts and show detailed statistics about them."""
- HEADERS = {"User-Agent": "Discord Bot: PythonDiscord (https://pythondiscord.com/)"}
+ HEADERS = {"User-Agent": "python3:python-discord/bot:1.0.0 (by /u/PythonDiscord)"}
URL = "https://www.reddit.com"
- MAX_FETCH_RETRIES = 3
+ OAUTH_URL = "https://oauth.reddit.com"
+ MAX_RETRIES = 3
def __init__(self, bot: Bot):
self.bot = bot
- self.webhook = None # set in on_ready
- bot.loop.create_task(self.init_reddit_ready())
+ self.webhook = None
+ self.access_token = None
+ self.client_auth = BasicAuth(RedditConfig.client_id, RedditConfig.secret)
+ bot.loop.create_task(self.init_reddit_ready())
self.auto_poster_loop.start()
def cog_unload(self) -> None:
- """Stops the loops when the cog is unloaded."""
+ """Stop the loop task and revoke the access token when the cog is unloaded."""
self.auto_poster_loop.cancel()
+ if self.access_token and self.access_token.expires_at > datetime.utcnow():
+ asyncio.create_task(self.revoke_access_token())
async def init_reddit_ready(self) -> None:
"""Sets the reddit webhook when the cog is loaded."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
if not self.webhook:
self.webhook = await self.bot.fetch_webhook(Webhooks.reddit)
@@ -47,20 +57,82 @@ class Reddit(Cog):
"""Get the #reddit channel object from the bot's cache."""
return self.bot.get_channel(Channels.reddit)
+ async def get_access_token(self) -> None:
+ """
+ Get a Reddit API OAuth2 access token and assign it to self.access_token.
+
+ A token is valid for 1 hour. There will be MAX_RETRIES to get a token, after which the cog
+ will be unloaded and a ClientError raised if retrieval was still unsuccessful.
+ """
+ for i in range(1, self.MAX_RETRIES + 1):
+ response = await self.bot.http_session.post(
+ url=f"{self.URL}/api/v1/access_token",
+ headers=self.HEADERS,
+ auth=self.client_auth,
+ data={
+ "grant_type": "client_credentials",
+ "duration": "temporary"
+ }
+ )
+
+ if response.status == 200 and response.content_type == "application/json":
+ content = await response.json()
+ expiration = int(content["expires_in"]) - 60 # Subtract 1 minute for leeway.
+ self.access_token = AccessToken(
+ token=content["access_token"],
+ expires_at=datetime.utcnow() + timedelta(seconds=expiration)
+ )
+
+ log.debug(f"New token acquired; expires on UTC {self.access_token.expires_at}")
+ return
+ else:
+ log.debug(
+ f"Failed to get an access token: "
+ f"status {response.status} & content type {response.content_type}; "
+ f"retrying ({i}/{self.MAX_RETRIES})"
+ )
+
+ await asyncio.sleep(3)
+
+ self.bot.remove_cog(self.qualified_name)
+ raise ClientError("Authentication with the Reddit API failed. Unloading the cog.")
+
+ async def revoke_access_token(self) -> None:
+ """
+ Revoke the OAuth2 access token for the Reddit API.
+
+ For security reasons, it's good practice to revoke the token when it's no longer being used.
+ """
+ response = await self.bot.http_session.post(
+ url=f"{self.URL}/api/v1/revoke_token",
+ headers=self.HEADERS,
+ auth=self.client_auth,
+ data={
+ "token": self.access_token.token,
+ "token_type_hint": "access_token"
+ }
+ )
+
+ if response.status == 204 and response.content_type == "application/json":
+ self.access_token = None
+ else:
+ log.warning(f"Unable to revoke access token: status {response.status}.")
+
async def fetch_posts(self, route: str, *, amount: int = 25, params: dict = None) -> List[dict]:
"""A helper method to fetch a certain amount of Reddit posts at a given route."""
# Reddit's JSON responses only provide 25 posts at most.
if not 25 >= amount > 0:
raise ValueError("Invalid amount of subreddit posts requested.")
- if params is None:
- params = {}
+ # Renew the token if necessary.
+ if not self.access_token or self.access_token.expires_at < datetime.utcnow():
+ await self.get_access_token()
- url = f"{self.URL}/{route}.json"
- for _ in range(self.MAX_FETCH_RETRIES):
+ url = f"{self.OAUTH_URL}/{route}"
+ for _ in range(self.MAX_RETRIES):
response = await self.bot.http_session.get(
url=url,
- headers=self.HEADERS,
+ headers={**self.HEADERS, "Authorization": f"bearer {self.access_token.token}"},
params=params
)
if response.status == 200 and response.content_type == 'application/json':
@@ -136,7 +208,7 @@ class Reddit(Cog):
await asyncio.sleep(seconds_until)
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
if not self.webhook:
await self.bot.fetch_webhook(Webhooks.reddit)
@@ -217,6 +289,8 @@ class Reddit(Cog):
def setup(bot: Bot) -> None:
- """Reddit cog load."""
+ """Load the Reddit cog."""
+ if not RedditConfig.secret or not RedditConfig.client_id:
+ log.error("Credentials not provided, cog not loaded.")
+ return
bot.add_cog(Reddit(bot))
- log.info("Cog loaded: Reddit")
diff --git a/bot/cogs/reminders.py b/bot/cogs/reminders.py
index 81990704b..041791056 100644
--- a/bot/cogs/reminders.py
+++ b/bot/cogs/reminders.py
@@ -2,15 +2,17 @@ import asyncio
import logging
import random
import textwrap
+import typing as t
from datetime import datetime, timedelta
from operator import itemgetter
-from typing import Optional
+import discord
+from dateutil.parser import isoparse
from dateutil.relativedelta import relativedelta
-from discord import Colour, Embed, Message
-from discord.ext.commands import Bot, Cog, Context, group
+from discord.ext.commands import Cog, Context, group
-from bot.constants import Channels, Icons, NEGATIVE_REPLIES, POSITIVE_REPLIES, STAFF_ROLES
+from bot.bot import Bot
+from bot.constants import Guild, Icons, NEGATIVE_REPLIES, POSITIVE_REPLIES, STAFF_ROLES
from bot.converters import Duration
from bot.pagination import LinePaginator
from bot.utils.checks import without_role_check
@@ -19,7 +21,7 @@ from bot.utils.time import humanize_delta, wait_until
log = logging.getLogger(__name__)
-WHITELISTED_CHANNELS = (Channels.bot,)
+WHITELISTED_CHANNELS = Guild.reminder_whitelist
MAXIMUM_REMINDERS = 5
@@ -34,7 +36,7 @@ class Reminders(Scheduler, Cog):
async def reschedule_reminders(self) -> None:
"""Get all current reminders from the API and reschedule them."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
response = await self.bot.api_client.get(
'bot/reminders',
params={'active': 'true'}
@@ -44,29 +46,64 @@ class Reminders(Scheduler, Cog):
loop = asyncio.get_event_loop()
for reminder in response:
- remind_at = datetime.fromisoformat(reminder['expiration'][:-1])
+ is_valid, *_ = self.ensure_valid_reminder(reminder, cancel_task=False)
+ if not is_valid:
+ continue
+
+ remind_at = isoparse(reminder['expiration']).replace(tzinfo=None)
# If the reminder is already overdue ...
if remind_at < now:
late = relativedelta(now, remind_at)
await self.send_reminder(reminder, late)
-
else:
self.schedule_task(loop, reminder["id"], reminder)
+ def ensure_valid_reminder(
+ self,
+ reminder: dict,
+ cancel_task: bool = True
+ ) -> t.Tuple[bool, discord.User, discord.TextChannel]:
+ """Ensure reminder author and channel can be fetched otherwise delete the reminder."""
+ user = self.bot.get_user(reminder['author'])
+ channel = self.bot.get_channel(reminder['channel_id'])
+ is_valid = True
+ if not user or not channel:
+ is_valid = False
+ log.info(
+ f"Reminder {reminder['id']} invalid: "
+ f"User {reminder['author']}={user}, Channel {reminder['channel_id']}={channel}."
+ )
+ asyncio.create_task(self._delete_reminder(reminder['id'], cancel_task))
+
+ return is_valid, user, channel
+
@staticmethod
- async def _send_confirmation(ctx: Context, on_success: str) -> None:
+ async def _send_confirmation(
+ ctx: Context,
+ on_success: str,
+ reminder_id: str,
+ delivery_dt: t.Optional[datetime],
+ ) -> None:
"""Send an embed confirming the reminder change was made successfully."""
- embed = Embed()
- embed.colour = Colour.green()
+ embed = discord.Embed()
+ embed.colour = discord.Colour.green()
embed.title = random.choice(POSITIVE_REPLIES)
embed.description = on_success
+
+ footer_str = f"ID: {reminder_id}"
+ if delivery_dt:
+ # Reminder deletion will have a `None` `delivery_dt`
+ footer_str = f"{footer_str}, Due: {delivery_dt.strftime('%Y-%m-%dT%H:%M:%S')}"
+
+ embed.set_footer(text=footer_str)
+
await ctx.send(embed=embed)
async def _scheduled_task(self, reminder: dict) -> None:
"""A coroutine which sends the reminder once the time is reached, and cancels the running task."""
reminder_id = reminder["id"]
- reminder_datetime = datetime.fromisoformat(reminder['expiration'][:-1])
+ reminder_datetime = isoparse(reminder['expiration']).replace(tzinfo=None)
# Send the reminder message once the desired duration has passed
await wait_until(reminder_datetime)
@@ -78,12 +115,13 @@ class Reminders(Scheduler, Cog):
# Now we can begone with it from our schedule list.
self.cancel_task(reminder_id)
- async def _delete_reminder(self, reminder_id: str) -> None:
+ async def _delete_reminder(self, reminder_id: str, cancel_task: bool = True) -> None:
"""Delete a reminder from the database, given its ID, and cancel the running task."""
await self.bot.api_client.delete('bot/reminders/' + str(reminder_id))
- # Now we can remove it from the schedule list
- self.cancel_task(reminder_id)
+ if cancel_task:
+ # Now we can remove it from the schedule list
+ self.cancel_task(reminder_id)
async def _reschedule_reminder(self, reminder: dict) -> None:
"""Reschedule a reminder object."""
@@ -94,11 +132,12 @@ class Reminders(Scheduler, Cog):
async def send_reminder(self, reminder: dict, late: relativedelta = None) -> None:
"""Send the reminder."""
- channel = self.bot.get_channel(reminder["channel_id"])
- user = self.bot.get_user(reminder["author"])
+ is_valid, user, channel = self.ensure_valid_reminder(reminder)
+ if not is_valid:
+ return
- embed = Embed()
- embed.colour = Colour.blurple()
+ embed = discord.Embed()
+ embed.colour = discord.Colour.blurple()
embed.set_author(
icon_url=Icons.remind_blurple,
name="It has arrived!"
@@ -110,7 +149,7 @@ class Reminders(Scheduler, Cog):
embed.description += f"\n[Jump back to when you created the reminder]({reminder['jump_url']})"
if late:
- embed.colour = Colour.red()
+ embed.colour = discord.Colour.red()
embed.set_author(
icon_url=Icons.remind_red,
name=f"Sorry it arrived {humanize_delta(late, max_units=2)} late!"
@@ -128,20 +167,20 @@ class Reminders(Scheduler, Cog):
await ctx.invoke(self.new_reminder, expiration=expiration, content=content)
@remind_group.command(name="new", aliases=("add", "create"))
- async def new_reminder(self, ctx: Context, expiration: Duration, *, content: str) -> Optional[Message]:
+ async def new_reminder(self, ctx: Context, expiration: Duration, *, content: str) -> t.Optional[discord.Message]:
"""
Set yourself a simple reminder.
Expiration is parsed per: http://strftime.org/
"""
- embed = Embed()
+ embed = discord.Embed()
# If the user is not staff, we need to verify whether or not to make a reminder at all.
if without_role_check(ctx, *STAFF_ROLES):
# If they don't have permission to set a reminder in this channel
if ctx.channel.id not in WHITELISTED_CHANNELS:
- embed.colour = Colour.red()
+ embed.colour = discord.Colour.red()
embed.title = random.choice(NEGATIVE_REPLIES)
embed.description = "Sorry, you can't do that here!"
@@ -158,7 +197,7 @@ class Reminders(Scheduler, Cog):
# Let's limit this, so we don't get 10 000
# reminders from kip or something like that :P
if len(active_reminders) > MAXIMUM_REMINDERS:
- embed.colour = Colour.red()
+ embed.colour = discord.Colour.red()
embed.title = random.choice(NEGATIVE_REPLIES)
embed.description = "You have too many active reminders!"
@@ -177,18 +216,21 @@ class Reminders(Scheduler, Cog):
)
now = datetime.utcnow() - timedelta(seconds=1)
+ humanized_delta = humanize_delta(relativedelta(expiration, now))
# Confirm to the user that it worked.
await self._send_confirmation(
ctx,
- on_success=f"Your reminder will arrive in {humanize_delta(relativedelta(expiration, now))}!"
+ on_success=f"Your reminder will arrive in {humanized_delta}!",
+ reminder_id=reminder["id"],
+ delivery_dt=expiration,
)
loop = asyncio.get_event_loop()
self.schedule_task(loop, reminder["id"], reminder)
@remind_group.command(name="list")
- async def list_reminders(self, ctx: Context) -> Optional[Message]:
+ async def list_reminders(self, ctx: Context) -> t.Optional[discord.Message]:
"""View a paginated embed of all reminders for your user."""
# Get all the user's reminders from the database.
data = await self.bot.api_client.get(
@@ -211,7 +253,7 @@ class Reminders(Scheduler, Cog):
for content, remind_at, id_ in reminders:
# Parse and humanize the time, make it pretty :D
- remind_datetime = datetime.fromisoformat(remind_at[:-1])
+ remind_datetime = isoparse(remind_at).replace(tzinfo=None)
time = humanize_delta(relativedelta(remind_datetime, now))
text = textwrap.dedent(f"""
@@ -221,8 +263,8 @@ class Reminders(Scheduler, Cog):
lines.append(text)
- embed = Embed()
- embed.colour = Colour.blurple()
+ embed = discord.Embed()
+ embed.colour = discord.Colour.blurple()
embed.title = f"Reminders for {ctx.author}"
# Remind the user that they have no reminders :^)
@@ -231,7 +273,7 @@ class Reminders(Scheduler, Cog):
return await ctx.send(embed=embed)
# Construct the embed and paginate it.
- embed.colour = Colour.blurple()
+ embed.colour = discord.Colour.blurple()
await LinePaginator.paginate(
lines,
@@ -260,7 +302,10 @@ class Reminders(Scheduler, Cog):
# Send a confirmation message to the channel
await self._send_confirmation(
- ctx, on_success="That reminder has been edited successfully!"
+ ctx,
+ on_success="That reminder has been edited successfully!",
+ reminder_id=id_,
+ delivery_dt=expiration,
)
await self._reschedule_reminder(reminder)
@@ -274,22 +319,30 @@ class Reminders(Scheduler, Cog):
json={'content': content}
)
+ # Parse the reminder expiration back into a datetime for the confirmation message
+ expiration = isoparse(reminder['expiration']).replace(tzinfo=None)
+
# Send a confirmation message to the channel
await self._send_confirmation(
- ctx, on_success="That reminder has been edited successfully!"
+ ctx,
+ on_success="That reminder has been edited successfully!",
+ reminder_id=id_,
+ delivery_dt=expiration,
)
await self._reschedule_reminder(reminder)
- @remind_group.command("delete", aliases=("remove",))
+ @remind_group.command("delete", aliases=("remove", "cancel"))
async def delete_reminder(self, ctx: Context, id_: int) -> None:
"""Delete one of your active reminders."""
await self._delete_reminder(id_)
await self._send_confirmation(
- ctx, on_success="That reminder has been deleted successfully!"
+ ctx,
+ on_success="That reminder has been deleted successfully!",
+ reminder_id=id_,
+ delivery_dt=None,
)
def setup(bot: Bot) -> None:
- """Reminders cog load."""
+ """Load the Reminders cog."""
bot.add_cog(Reminders(bot))
- log.info("Cog loaded: Reminders")
diff --git a/bot/cogs/security.py b/bot/cogs/security.py
index 316b33d6b..c680c5e27 100644
--- a/bot/cogs/security.py
+++ b/bot/cogs/security.py
@@ -1,6 +1,8 @@
import logging
-from discord.ext.commands import Bot, Cog, Context, NoPrivateMessage
+from discord.ext.commands import Cog, Context, NoPrivateMessage
+
+from bot.bot import Bot
log = logging.getLogger(__name__)
@@ -25,6 +27,5 @@ class Security(Cog):
def setup(bot: Bot) -> None:
- """Security cog load."""
+ """Load the Security cog."""
bot.add_cog(Security(bot))
- log.info("Cog loaded: Security")
diff --git a/bot/cogs/site.py b/bot/cogs/site.py
index 683613788..853e29568 100644
--- a/bot/cogs/site.py
+++ b/bot/cogs/site.py
@@ -1,8 +1,9 @@
import logging
from discord import Colour, Embed
-from discord.ext.commands import Bot, Cog, Context, group
+from discord.ext.commands import Cog, Context, group
+from bot.bot import Bot
from bot.constants import URLs
from bot.pagination import LinePaginator
@@ -58,7 +59,7 @@ class Site(Cog):
@site_group.command(name="tools")
async def site_tools(self, ctx: Context) -> None:
"""Info about the site's Tools page."""
- tools_url = f"{PAGES_URL}/tools"
+ tools_url = f"{PAGES_URL}/resources/tools"
embed = Embed(title="Tools")
embed.set_footer(text=f"{tools_url}")
@@ -73,7 +74,7 @@ class Site(Cog):
@site_group.command(name="help")
async def site_help(self, ctx: Context) -> None:
"""Info about the site's Getting Help page."""
- url = f"{PAGES_URL}/asking-good-questions"
+ url = f"{PAGES_URL}/resources/guides/asking-good-questions"
embed = Embed(title="Asking Good Questions")
embed.set_footer(text=url)
@@ -138,6 +139,5 @@ class Site(Cog):
def setup(bot: Bot) -> None:
- """Site cog load."""
+ """Load the Site cog."""
bot.add_cog(Site(bot))
- log.info("Cog loaded: Site")
diff --git a/bot/cogs/snekbox.py b/bot/cogs/snekbox.py
index 362968bd0..da33e27b2 100644
--- a/bot/cogs/snekbox.py
+++ b/bot/cogs/snekbox.py
@@ -5,8 +5,9 @@ import textwrap
from signal import Signals
from typing import Optional, Tuple
-from discord.ext.commands import Bot, Cog, Context, command, guild_only
+from discord.ext.commands import Cog, Context, command, guild_only
+from bot.bot import Bot
from bot.constants import Channels, Roles, URLs
from bot.decorators import in_channel
from bot.utils.messages import wait_for_deletion
@@ -176,7 +177,7 @@ class Snekbox(Cog):
@command(name="eval", aliases=("e",))
@guild_only()
- @in_channel(Channels.bot, bypass_roles=EVAL_ROLES)
+ @in_channel(Channels.bot, hidden_channels=(Channels.esoteric,), bypass_roles=EVAL_ROLES)
async def eval_command(self, ctx: Context, *, code: str = None) -> None:
"""
Run Python code and get the results.
@@ -227,6 +228,5 @@ class Snekbox(Cog):
def setup(bot: Bot) -> None:
- """Snekbox cog load."""
+ """Load the Snekbox cog."""
bot.add_cog(Snekbox(bot))
- log.info("Cog loaded: Snekbox")
diff --git a/bot/cogs/sync/__init__.py b/bot/cogs/sync/__init__.py
index d4565f848..fe7df4e9b 100644
--- a/bot/cogs/sync/__init__.py
+++ b/bot/cogs/sync/__init__.py
@@ -1,13 +1,7 @@
-import logging
-
-from discord.ext.commands import Bot
-
+from bot.bot import Bot
from .cog import Sync
-log = logging.getLogger(__name__)
-
def setup(bot: Bot) -> None:
- """Sync cog load."""
+ """Load the Sync cog."""
bot.add_cog(Sync(bot))
- log.info("Cog loaded: Sync")
diff --git a/bot/cogs/sync/cog.py b/bot/cogs/sync/cog.py
index aaa581f96..5708be3f4 100644
--- a/bot/cogs/sync/cog.py
+++ b/bot/cogs/sync/cog.py
@@ -1,12 +1,13 @@
import logging
-from typing import Callable, Iterable
+from typing import Any, Dict
-from discord import Guild, Member, Role
+from discord import Member, Role, User
from discord.ext import commands
-from discord.ext.commands import Bot, Cog, Context
+from discord.ext.commands import Cog, Context
from bot import constants
from bot.api import ResponseCodeError
+from bot.bot import Bot
from bot.cogs.sync import syncers
log = logging.getLogger(__name__)
@@ -15,40 +16,32 @@ log = logging.getLogger(__name__)
class Sync(Cog):
"""Captures relevant events and sends them to the site."""
- # The server to synchronize events on.
- # Note that setting this wrongly will result in things getting deleted
- # that possibly shouldn't be.
- SYNC_SERVER_ID = constants.Guild.id
-
- # An iterable of callables that are called when the bot is ready.
- ON_READY_SYNCERS: Iterable[Callable[[Bot, Guild], None]] = (
- syncers.sync_roles,
- syncers.sync_users
- )
-
def __init__(self, bot: Bot) -> None:
self.bot = bot
+ self.role_syncer = syncers.RoleSyncer(self.bot)
+ self.user_syncer = syncers.UserSyncer(self.bot)
self.bot.loop.create_task(self.sync_guild())
async def sync_guild(self) -> None:
"""Syncs the roles/users of the guild with the database."""
- await self.bot.wait_until_ready()
- guild = self.bot.get_guild(self.SYNC_SERVER_ID)
- if guild is not None:
- for syncer in self.ON_READY_SYNCERS:
- syncer_name = syncer.__name__[5:] # drop off `sync_`
- log.info("Starting `%s` syncer.", syncer_name)
- total_created, total_updated, total_deleted = await syncer(self.bot, guild)
- if total_deleted is None:
- log.info(
- f"`{syncer_name}` syncer finished, created `{total_created}`, updated `{total_updated}`."
- )
- else:
- log.info(
- f"`{syncer_name}` syncer finished, created `{total_created}`, updated `{total_updated}`, "
- f"deleted `{total_deleted}`."
- )
+ await self.bot.wait_until_guild_available()
+
+ guild = self.bot.get_guild(constants.Guild.id)
+ if guild is None:
+ return
+
+ for syncer in (self.role_syncer, self.user_syncer):
+ await syncer.sync(guild)
+
+ async def patch_user(self, user_id: int, updated_information: Dict[str, Any]) -> None:
+ """Send a PATCH request to partially update a user in the database."""
+ try:
+ await self.bot.api_client.patch(f"bot/users/{user_id}", json=updated_information)
+ except ResponseCodeError as e:
+ if e.response.status != 404:
+ raise
+ log.warning("Unable to update user, got 404. Assuming race condition from join event.")
@Cog.listener()
async def on_guild_role_create(self, role: Role) -> None:
@@ -72,12 +65,14 @@ class Sync(Cog):
@Cog.listener()
async def on_guild_role_update(self, before: Role, after: Role) -> None:
"""Syncs role with the database if any of the stored attributes were updated."""
- if (
- before.name != after.name
- or before.colour != after.colour
- or before.permissions != after.permissions
- or before.position != after.position
- ):
+ was_updated = (
+ before.name != after.name
+ or before.colour != after.colour
+ or before.permissions != after.permissions
+ or before.position != after.position
+ )
+
+ if was_updated:
await self.bot.api_client.put(
f'bot/roles/{after.id}',
json={
@@ -127,48 +122,27 @@ class Sync(Cog):
@Cog.listener()
async def on_member_remove(self, member: Member) -> None:
- """Updates the user information when a member leaves the guild."""
- await self.bot.api_client.put(
- f'bot/users/{member.id}',
- json={
- 'avatar_hash': member.avatar,
- 'discriminator': int(member.discriminator),
- 'id': member.id,
- 'in_guild': False,
- 'name': member.name,
- 'roles': sorted(role.id for role in member.roles)
- }
- )
+ """Set the in_guild field to False when a member leaves the guild."""
+ await self.patch_user(member.id, updated_information={"in_guild": False})
@Cog.listener()
async def on_member_update(self, before: Member, after: Member) -> None:
- """Updates the user information if any of relevant attributes have changed."""
- if (
- before.name != after.name
- or before.avatar != after.avatar
- or before.discriminator != after.discriminator
- or before.roles != after.roles
- ):
- try:
- await self.bot.api_client.put(
- 'bot/users/' + str(after.id),
- json={
- 'avatar_hash': after.avatar,
- 'discriminator': int(after.discriminator),
- 'id': after.id,
- 'in_guild': True,
- 'name': after.name,
- 'roles': sorted(role.id for role in after.roles)
- }
- )
- except ResponseCodeError as e:
- if e.response.status != 404:
- raise
-
- log.warning(
- "Unable to update user, got 404. "
- "Assuming race condition from join event."
- )
+ """Update the roles of the member in the database if a change is detected."""
+ if before.roles != after.roles:
+ updated_information = {"roles": sorted(role.id for role in after.roles)}
+ await self.patch_user(after.id, updated_information=updated_information)
+
+ @Cog.listener()
+ async def on_user_update(self, before: User, after: User) -> None:
+ """Update the user information in the database if a relevant change is detected."""
+ attrs = ("name", "discriminator", "avatar")
+ if any(getattr(before, attr) != getattr(after, attr) for attr in attrs):
+ updated_information = {
+ "name": after.name,
+ "discriminator": int(after.discriminator),
+ "avatar_hash": after.avatar,
+ }
+ await self.patch_user(after.id, updated_information=updated_information)
@commands.group(name='sync')
@commands.has_permissions(administrator=True)
@@ -178,25 +152,11 @@ class Sync(Cog):
@sync_group.command(name='roles')
@commands.has_permissions(administrator=True)
async def sync_roles_command(self, ctx: Context) -> None:
- """Manually synchronize the guild's roles with the roles on the site."""
- initial_response = await ctx.send("📊 Synchronizing roles.")
- total_created, total_updated, total_deleted = await syncers.sync_roles(self.bot, ctx.guild)
- await initial_response.edit(
- content=(
- f"👌 Role synchronization complete, created **{total_created}** "
- f", updated **{total_created}** roles, and deleted **{total_deleted}** roles."
- )
- )
+ """Manually synchronise the guild's roles with the roles on the site."""
+ await self.role_syncer.sync(ctx.guild, ctx)
@sync_group.command(name='users')
@commands.has_permissions(administrator=True)
async def sync_users_command(self, ctx: Context) -> None:
- """Manually synchronize the guild's users with the users on the site."""
- initial_response = await ctx.send("📊 Synchronizing users.")
- total_created, total_updated, total_deleted = await syncers.sync_users(self.bot, ctx.guild)
- await initial_response.edit(
- content=(
- f"👌 User synchronization complete, created **{total_created}** "
- f"and updated **{total_created}** users."
- )
- )
+ """Manually synchronise the guild's users with the users on the site."""
+ await self.user_syncer.sync(ctx.guild, ctx)
diff --git a/bot/cogs/sync/syncers.py b/bot/cogs/sync/syncers.py
index 2cc5a66e1..6715ad6fb 100644
--- a/bot/cogs/sync/syncers.py
+++ b/bot/cogs/sync/syncers.py
@@ -1,234 +1,342 @@
+import abc
+import logging
+import typing as t
from collections import namedtuple
-from typing import Dict, Set, Tuple
+from functools import partial
-from discord import Guild
-from discord.ext.commands import Bot
+from discord import Guild, HTTPException, Member, Message, Reaction, User
+from discord.ext.commands import Context
+
+from bot import constants
+from bot.api import ResponseCodeError
+from bot.bot import Bot
+
+log = logging.getLogger(__name__)
# These objects are declared as namedtuples because tuples are hashable,
# something that we make use of when diffing site roles against guild roles.
-Role = namedtuple('Role', ('id', 'name', 'colour', 'permissions', 'position'))
-User = namedtuple('User', ('id', 'name', 'discriminator', 'avatar_hash', 'roles', 'in_guild'))
-
-
-def get_roles_for_sync(
- guild_roles: Set[Role], api_roles: Set[Role]
-) -> Tuple[Set[Role], Set[Role], Set[Role]]:
- """
- Determine which roles should be created or updated on the site.
-
- Arguments:
- guild_roles (Set[Role]):
- Roles that were found on the guild at startup.
-
- api_roles (Set[Role]):
- Roles that were retrieved from the API at startup.
-
- Returns:
- Tuple[Set[Role], Set[Role]. Set[Role]]:
- A tuple with three elements. The first element represents
- roles to be created on the site, meaning that they were
- present on the cached guild but not on the API. The second
- element represents roles to be updated, meaning they were
- present on both the cached guild and the API but non-ID
- fields have changed inbetween. The third represents roles
- to be deleted on the site, meaning the roles are present on
- the API but not in the cached guild.
- """
- guild_role_ids = {role.id for role in guild_roles}
- api_role_ids = {role.id for role in api_roles}
- new_role_ids = guild_role_ids - api_role_ids
- deleted_role_ids = api_role_ids - guild_role_ids
-
- # New roles are those which are on the cached guild but not on the
- # API guild, going by the role ID. We need to send them in for creation.
- roles_to_create = {role for role in guild_roles if role.id in new_role_ids}
- roles_to_update = guild_roles - api_roles - roles_to_create
- roles_to_delete = {role for role in api_roles if role.id in deleted_role_ids}
- return roles_to_create, roles_to_update, roles_to_delete
-
-
-async def sync_roles(bot: Bot, guild: Guild) -> Tuple[int, int, int]:
- """
- Synchronize roles found on the given `guild` with the ones on the API.
-
- Arguments:
- bot (discord.ext.commands.Bot):
- The bot instance that we're running with.
-
- guild (discord.Guild):
- The guild instance from the bot's cache
- to synchronize roles with.
-
- Returns:
- Tuple[int, int, int]:
- A tuple with three integers representing how many roles were created
- (element `0`) , how many roles were updated (element `1`), and how many
- roles were deleted (element `2`) on the API.
- """
- roles = await bot.api_client.get('bot/roles')
-
- # Pack API roles and guild roles into one common format,
- # which is also hashable. We need hashability to be able
- # to compare these easily later using sets.
- api_roles = {Role(**role_dict) for role_dict in roles}
- guild_roles = {
- Role(
- id=role.id, name=role.name,
- colour=role.colour.value, permissions=role.permissions.value,
- position=role.position,
- )
- for role in guild.roles
- }
- roles_to_create, roles_to_update, roles_to_delete = get_roles_for_sync(guild_roles, api_roles)
-
- for role in roles_to_create:
- await bot.api_client.post(
- 'bot/roles',
- json={
- 'id': role.id,
- 'name': role.name,
- 'colour': role.colour,
- 'permissions': role.permissions,
- 'position': role.position,
- }
- )
+_Role = namedtuple('Role', ('id', 'name', 'colour', 'permissions', 'position'))
+_User = namedtuple('User', ('id', 'name', 'discriminator', 'avatar_hash', 'roles', 'in_guild'))
+_Diff = namedtuple('Diff', ('created', 'updated', 'deleted'))
- for role in roles_to_update:
- await bot.api_client.put(
- f'bot/roles/{role.id}',
- json={
- 'id': role.id,
- 'name': role.name,
- 'colour': role.colour,
- 'permissions': role.permissions,
- 'position': role.position,
- }
- )
- for role in roles_to_delete:
- await bot.api_client.delete(f'bot/roles/{role.id}')
-
- return len(roles_to_create), len(roles_to_update), len(roles_to_delete)
-
-
-def get_users_for_sync(
- guild_users: Dict[int, User], api_users: Dict[int, User]
-) -> Tuple[Set[User], Set[User]]:
- """
- Determine which users should be created or updated on the website.
-
- Arguments:
- guild_users (Dict[int, User]):
- A mapping of user IDs to user data, populated from the
- guild cached on the running bot instance.
-
- api_users (Dict[int, User]):
- A mapping of user IDs to user data, populated from the API's
- current inventory of all users.
-
- Returns:
- Tuple[Set[User], Set[User]]:
- Two user sets as a tuple. The first element represents users
- to be created on the website, these are users that are present
- in the cached guild data but not in the API at all, going by
- their ID. The second element represents users to update. It is
- populated by users which are present on both the API and the
- guild, but where the attribute of a user on the API is not
- equal to the attribute of the user on the guild.
- """
- users_to_create = set()
- users_to_update = set()
-
- for api_user in api_users.values():
- guild_user = guild_users.get(api_user.id)
- if guild_user is not None:
- if api_user != guild_user:
- users_to_update.add(guild_user)
-
- elif api_user.in_guild:
- # The user is known on the API but not the guild, and the
- # API currently specifies that the user is a member of the guild.
- # This means that the user has left since the last sync.
- # Update the `in_guild` attribute of the user on the site
- # to signify that the user left.
- new_api_user = api_user._replace(in_guild=False)
- users_to_update.add(new_api_user)
-
- new_user_ids = set(guild_users.keys()) - set(api_users.keys())
- for user_id in new_user_ids:
- # The user is known on the guild but not on the API. This means
- # that the user has joined since the last sync. Create it.
- new_user = guild_users[user_id]
- users_to_create.add(new_user)
-
- return users_to_create, users_to_update
-
-
-async def sync_users(bot: Bot, guild: Guild) -> Tuple[int, int, None]:
- """
- Synchronize users found in the given `guild` with the ones in the API.
-
- Arguments:
- bot (discord.ext.commands.Bot):
- The bot instance that we're running with.
-
- guild (discord.Guild):
- The guild instance from the bot's cache
- to synchronize roles with.
-
- Returns:
- Tuple[int, int, None]:
- A tuple with two integers, representing how many users were created
- (element `0`) and how many users were updated (element `1`), and `None`
- to indicate that a user sync never deletes entries from the API.
- """
- current_users = await bot.api_client.get('bot/users')
-
- # Pack API users and guild users into one common format,
- # which is also hashable. We need hashability to be able
- # to compare these easily later using sets.
- api_users = {
- user_dict['id']: User(
- roles=tuple(sorted(user_dict.pop('roles'))),
- **user_dict
- )
- for user_dict in current_users
- }
- guild_users = {
- member.id: User(
- id=member.id, name=member.name,
- discriminator=int(member.discriminator), avatar_hash=member.avatar,
- roles=tuple(sorted(role.id for role in member.roles)), in_guild=True
- )
- for member in guild.members
- }
-
- users_to_create, users_to_update = get_users_for_sync(guild_users, api_users)
-
- for user in users_to_create:
- await bot.api_client.post(
- 'bot/users',
- json={
- 'avatar_hash': user.avatar_hash,
- 'discriminator': user.discriminator,
- 'id': user.id,
- 'in_guild': user.in_guild,
- 'name': user.name,
- 'roles': list(user.roles)
- }
+class Syncer(abc.ABC):
+ """Base class for synchronising the database with objects in the Discord cache."""
+
+ _CORE_DEV_MENTION = f"<@&{constants.Roles.core_developer}> "
+ _REACTION_EMOJIS = (constants.Emojis.check_mark, constants.Emojis.cross_mark)
+
+ def __init__(self, bot: Bot) -> None:
+ self.bot = bot
+
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """The name of the syncer; used in output messages and logging."""
+ raise NotImplementedError # pragma: no cover
+
+ async def _send_prompt(self, message: t.Optional[Message] = None) -> t.Optional[Message]:
+ """
+ Send a prompt to confirm or abort a sync using reactions and return the sent message.
+
+ If a message is given, it is edited to display the prompt and reactions. Otherwise, a new
+ message is sent to the dev-core channel and mentions the core developers role. If the
+ channel cannot be retrieved, return None.
+ """
+ log.trace(f"Sending {self.name} sync confirmation prompt.")
+
+ msg_content = (
+ f'Possible cache issue while syncing {self.name}s. '
+ f'More than {constants.Sync.max_diff} {self.name}s were changed. '
+ f'React to confirm or abort the sync.'
)
- for user in users_to_update:
- await bot.api_client.put(
- f'bot/users/{user.id}',
- json={
- 'avatar_hash': user.avatar_hash,
- 'discriminator': user.discriminator,
- 'id': user.id,
- 'in_guild': user.in_guild,
- 'name': user.name,
- 'roles': list(user.roles)
- }
+ # Send to core developers if it's an automatic sync.
+ if not message:
+ log.trace("Message not provided for confirmation; creating a new one in dev-core.")
+ channel = self.bot.get_channel(constants.Channels.devcore)
+
+ if not channel:
+ log.debug("Failed to get the dev-core channel from cache; attempting to fetch it.")
+ try:
+ channel = await self.bot.fetch_channel(constants.Channels.devcore)
+ except HTTPException:
+ log.exception(
+ f"Failed to fetch channel for sending sync confirmation prompt; "
+ f"aborting {self.name} sync."
+ )
+ return None
+
+ message = await channel.send(f"{self._CORE_DEV_MENTION}{msg_content}")
+ else:
+ await message.edit(content=msg_content)
+
+ # Add the initial reactions.
+ log.trace(f"Adding reactions to {self.name} syncer confirmation prompt.")
+ for emoji in self._REACTION_EMOJIS:
+ await message.add_reaction(emoji)
+
+ return message
+
+ def _reaction_check(
+ self,
+ author: Member,
+ message: Message,
+ reaction: Reaction,
+ user: t.Union[Member, User]
+ ) -> bool:
+ """
+ Return True if the `reaction` is a valid confirmation or abort reaction on `message`.
+
+ If the `author` of the prompt is a bot, then a reaction by any core developer will be
+ considered valid. Otherwise, the author of the reaction (`user`) will have to be the
+ `author` of the prompt.
+ """
+ # For automatic syncs, check for the core dev role instead of an exact author
+ has_role = any(constants.Roles.core_developer == role.id for role in user.roles)
+ return (
+ reaction.message.id == message.id
+ and not user.bot
+ and (has_role if author.bot else user == author)
+ and str(reaction.emoji) in self._REACTION_EMOJIS
)
- return len(users_to_create), len(users_to_update), None
+ async def _wait_for_confirmation(self, author: Member, message: Message) -> bool:
+ """
+ Wait for a confirmation reaction by `author` on `message` and return True if confirmed.
+
+ Uses the `_reaction_check` function to determine if a reaction is valid.
+
+ If there is no reaction within `bot.constants.Sync.confirm_timeout` seconds, return False.
+ To acknowledge the reaction (or lack thereof), `message` will be edited.
+ """
+ # Preserve the core-dev role mention in the message edits so users aren't confused about
+ # where notifications came from.
+ mention = self._CORE_DEV_MENTION if author.bot else ""
+
+ reaction = None
+ try:
+ log.trace(f"Waiting for a reaction to the {self.name} syncer confirmation prompt.")
+ reaction, _ = await self.bot.wait_for(
+ 'reaction_add',
+ check=partial(self._reaction_check, author, message),
+ timeout=constants.Sync.confirm_timeout
+ )
+ except TimeoutError:
+ # reaction will remain none thus sync will be aborted in the finally block below.
+ log.debug(f"The {self.name} syncer confirmation prompt timed out.")
+ finally:
+ if str(reaction) == constants.Emojis.check_mark:
+ log.trace(f"The {self.name} syncer was confirmed.")
+ await message.edit(content=f':ok_hand: {mention}{self.name} sync will proceed.')
+ return True
+ else:
+ log.warning(f"The {self.name} syncer was aborted or timed out!")
+ await message.edit(
+ content=f':warning: {mention}{self.name} sync aborted or timed out!'
+ )
+ return False
+
+ @abc.abstractmethod
+ async def _get_diff(self, guild: Guild) -> _Diff:
+ """Return the difference between the cache of `guild` and the database."""
+ raise NotImplementedError # pragma: no cover
+
+ @abc.abstractmethod
+ async def _sync(self, diff: _Diff) -> None:
+ """Perform the API calls for synchronisation."""
+ raise NotImplementedError # pragma: no cover
+
+ async def _get_confirmation_result(
+ self,
+ diff_size: int,
+ author: Member,
+ message: t.Optional[Message] = None
+ ) -> t.Tuple[bool, t.Optional[Message]]:
+ """
+ Prompt for confirmation and return a tuple of the result and the prompt message.
+
+ `diff_size` is the size of the diff of the sync. If it is greater than
+ `bot.constants.Sync.max_diff`, the prompt will be sent. The `author` is the invoked of the
+ sync and the `message` is an extant message to edit to display the prompt.
+
+ If confirmed or no confirmation was needed, the result is True. The returned message will
+ either be the given `message` or a new one which was created when sending the prompt.
+ """
+ log.trace(f"Determining if confirmation prompt should be sent for {self.name} syncer.")
+ if diff_size > constants.Sync.max_diff:
+ message = await self._send_prompt(message)
+ if not message:
+ return False, None # Couldn't get channel.
+
+ confirmed = await self._wait_for_confirmation(author, message)
+ if not confirmed:
+ return False, message # Sync aborted.
+
+ return True, message
+
+ async def sync(self, guild: Guild, ctx: t.Optional[Context] = None) -> None:
+ """
+ Synchronise the database with the cache of `guild`.
+
+ If the differences between the cache and the database are greater than
+ `bot.constants.Sync.max_diff`, then a confirmation prompt will be sent to the dev-core
+ channel. The confirmation can be optionally redirect to `ctx` instead.
+ """
+ log.info(f"Starting {self.name} syncer.")
+
+ message = None
+ author = self.bot.user
+ if ctx:
+ message = await ctx.send(f"📊 Synchronising {self.name}s.")
+ author = ctx.author
+
+ diff = await self._get_diff(guild)
+ diff_dict = diff._asdict() # Ugly method for transforming the NamedTuple into a dict
+ totals = {k: len(v) for k, v in diff_dict.items() if v is not None}
+ diff_size = sum(totals.values())
+
+ confirmed, message = await self._get_confirmation_result(diff_size, author, message)
+ if not confirmed:
+ return
+
+ # Preserve the core-dev role mention in the message edits so users aren't confused about
+ # where notifications came from.
+ mention = self._CORE_DEV_MENTION if author.bot else ""
+
+ try:
+ await self._sync(diff)
+ except ResponseCodeError as e:
+ log.exception(f"{self.name} syncer failed!")
+
+ # Don't show response text because it's probably some really long HTML.
+ results = f"status {e.status}\n```{e.response_json or 'See log output for details'}```"
+ content = f":x: {mention}Synchronisation of {self.name}s failed: {results}"
+ else:
+ results = ", ".join(f"{name} `{total}`" for name, total in totals.items())
+ log.info(f"{self.name} syncer finished: {results}.")
+ content = f":ok_hand: {mention}Synchronisation of {self.name}s complete: {results}"
+
+ if message:
+ await message.edit(content=content)
+
+
+class RoleSyncer(Syncer):
+ """Synchronise the database with roles in the cache."""
+
+ name = "role"
+
+ async def _get_diff(self, guild: Guild) -> _Diff:
+ """Return the difference of roles between the cache of `guild` and the database."""
+ log.trace("Getting the diff for roles.")
+ roles = await self.bot.api_client.get('bot/roles')
+
+ # Pack DB roles and guild roles into one common, hashable format.
+ # They're hashable so that they're easily comparable with sets later.
+ db_roles = {_Role(**role_dict) for role_dict in roles}
+ guild_roles = {
+ _Role(
+ id=role.id,
+ name=role.name,
+ colour=role.colour.value,
+ permissions=role.permissions.value,
+ position=role.position,
+ )
+ for role in guild.roles
+ }
+
+ guild_role_ids = {role.id for role in guild_roles}
+ api_role_ids = {role.id for role in db_roles}
+ new_role_ids = guild_role_ids - api_role_ids
+ deleted_role_ids = api_role_ids - guild_role_ids
+
+ # New roles are those which are on the cached guild but not on the
+ # DB guild, going by the role ID. We need to send them in for creation.
+ roles_to_create = {role for role in guild_roles if role.id in new_role_ids}
+ roles_to_update = guild_roles - db_roles - roles_to_create
+ roles_to_delete = {role for role in db_roles if role.id in deleted_role_ids}
+
+ return _Diff(roles_to_create, roles_to_update, roles_to_delete)
+
+ async def _sync(self, diff: _Diff) -> None:
+ """Synchronise the database with the role cache of `guild`."""
+ log.trace("Syncing created roles...")
+ for role in diff.created:
+ await self.bot.api_client.post('bot/roles', json=role._asdict())
+
+ log.trace("Syncing updated roles...")
+ for role in diff.updated:
+ await self.bot.api_client.put(f'bot/roles/{role.id}', json=role._asdict())
+
+ log.trace("Syncing deleted roles...")
+ for role in diff.deleted:
+ await self.bot.api_client.delete(f'bot/roles/{role.id}')
+
+
+class UserSyncer(Syncer):
+ """Synchronise the database with users in the cache."""
+
+ name = "user"
+
+ async def _get_diff(self, guild: Guild) -> _Diff:
+ """Return the difference of users between the cache of `guild` and the database."""
+ log.trace("Getting the diff for users.")
+ users = await self.bot.api_client.get('bot/users')
+
+ # Pack DB roles and guild roles into one common, hashable format.
+ # They're hashable so that they're easily comparable with sets later.
+ db_users = {
+ user_dict['id']: _User(
+ roles=tuple(sorted(user_dict.pop('roles'))),
+ **user_dict
+ )
+ for user_dict in users
+ }
+ guild_users = {
+ member.id: _User(
+ id=member.id,
+ name=member.name,
+ discriminator=int(member.discriminator),
+ avatar_hash=member.avatar,
+ roles=tuple(sorted(role.id for role in member.roles)),
+ in_guild=True
+ )
+ for member in guild.members
+ }
+
+ users_to_create = set()
+ users_to_update = set()
+
+ for db_user in db_users.values():
+ guild_user = guild_users.get(db_user.id)
+ if guild_user is not None:
+ if db_user != guild_user:
+ users_to_update.add(guild_user)
+
+ elif db_user.in_guild:
+ # The user is known in the DB but not the guild, and the
+ # DB currently specifies that the user is a member of the guild.
+ # This means that the user has left since the last sync.
+ # Update the `in_guild` attribute of the user on the site
+ # to signify that the user left.
+ new_api_user = db_user._replace(in_guild=False)
+ users_to_update.add(new_api_user)
+
+ new_user_ids = set(guild_users.keys()) - set(db_users.keys())
+ for user_id in new_user_ids:
+ # The user is known on the guild but not on the API. This means
+ # that the user has joined since the last sync. Create it.
+ new_user = guild_users[user_id]
+ users_to_create.add(new_user)
+
+ return _Diff(users_to_create, users_to_update, None)
+
+ async def _sync(self, diff: _Diff) -> None:
+ """Synchronise the database with the user cache of `guild`."""
+ log.trace("Syncing created users...")
+ for user in diff.created:
+ await self.bot.api_client.post('bot/users', json=user._asdict())
+
+ log.trace("Syncing updated users...")
+ for user in diff.updated:
+ await self.bot.api_client.put(f'bot/users/{user.id}', json=user._asdict())
diff --git a/bot/cogs/tags.py b/bot/cogs/tags.py
index cd70e783a..b6360dfae 100644
--- a/bot/cogs/tags.py
+++ b/bot/cogs/tags.py
@@ -1,15 +1,17 @@
import logging
+import re
import time
+from typing import Dict, List, Optional
from discord import Colour, Embed
-from discord.ext.commands import Bot, Cog, Context, group
+from discord.ext.commands import Cog, Context, group
+from bot.bot import Bot
from bot.constants import Channels, Cooldowns, MODERATION_ROLES, Roles
from bot.converters import TagContentConverter, TagNameConverter
from bot.decorators import with_role
from bot.pagination import LinePaginator
-
log = logging.getLogger(__name__)
TEST_CHANNELS = (
@@ -18,6 +20,8 @@ TEST_CHANNELS = (
Channels.helpers
)
+REGEX_NON_ALPHABET = re.compile(r"[^a-z]", re.MULTILINE & re.IGNORECASE)
+
class Tags(Cog):
"""Save new tags and fetch existing tags."""
@@ -26,6 +30,63 @@ class Tags(Cog):
self.bot = bot
self.tag_cooldowns = {}
+ self._cache = {}
+ self._last_fetch: float = 0.0
+
+ async def _get_tags(self, is_forced: bool = False) -> None:
+ """Get all tags."""
+ # refresh only when there's a more than 5m gap from last call.
+ time_now: float = time.time()
+ if is_forced or not self._last_fetch or time_now - self._last_fetch > 5 * 60:
+ tags = await self.bot.api_client.get('bot/tags')
+ self._cache = {tag['title'].lower(): tag for tag in tags}
+ self._last_fetch = time_now
+
+ @staticmethod
+ def _fuzzy_search(search: str, target: str) -> int:
+ """A simple scoring algorithm based on how many letters are found / total, with order in mind."""
+ current, index = 0, 0
+ _search = REGEX_NON_ALPHABET.sub('', search.lower())
+ _targets = iter(REGEX_NON_ALPHABET.split(target.lower()))
+ _target = next(_targets)
+ try:
+ while True:
+ while index < len(_target) and _search[current] == _target[index]:
+ current += 1
+ index += 1
+ index, _target = 0, next(_targets)
+ except (StopIteration, IndexError):
+ pass
+ return current / len(_search) * 100
+
+ def _get_suggestions(self, tag_name: str, thresholds: Optional[List[int]] = None) -> List[str]:
+ """Return a list of suggested tags."""
+ scores: Dict[str, int] = {
+ tag_title: Tags._fuzzy_search(tag_name, tag['title'])
+ for tag_title, tag in self._cache.items()
+ }
+
+ thresholds = thresholds or [100, 90, 80, 70, 60]
+
+ for threshold in thresholds:
+ suggestions = [
+ self._cache[tag_title]
+ for tag_title, matching_score in scores.items()
+ if matching_score >= threshold
+ ]
+ if suggestions:
+ return suggestions
+
+ return []
+
+ async def _get_tag(self, tag_name: str) -> list:
+ """Get a specific tag."""
+ await self._get_tags()
+ found = [self._cache.get(tag_name.lower(), None)]
+ if not found[0]:
+ return self._get_suggestions(tag_name)
+ return found
+
@group(name='tags', aliases=('tag', 't'), invoke_without_command=True)
async def tags_group(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None:
"""Show all known tags, a single tag, or run a subcommand."""
@@ -55,21 +116,33 @@ class Tags(Cog):
if _command_on_cooldown(tag_name):
time_left = Cooldowns.tags - (time.time() - self.tag_cooldowns[tag_name]["time"])
- log.warning(f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. "
- f"Cooldown ends in {time_left:.1f} seconds.")
+ log.info(
+ f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. "
+ f"Cooldown ends in {time_left:.1f} seconds."
+ )
return
+ await self._get_tags()
+
if tag_name is not None:
- tag = await self.bot.api_client.get(f'bot/tags/{tag_name}')
- if ctx.channel.id not in TEST_CHANNELS:
- self.tag_cooldowns[tag_name] = {
- "time": time.time(),
- "channel": ctx.channel.id
- }
- await ctx.send(embed=Embed.from_dict(tag['embed']))
+ founds = await self._get_tag(tag_name)
+
+ if len(founds) == 1:
+ tag = founds[0]
+ if ctx.channel.id not in TEST_CHANNELS:
+ self.tag_cooldowns[tag_name] = {
+ "time": time.time(),
+ "channel": ctx.channel.id
+ }
+ await ctx.send(embed=Embed.from_dict(tag['embed']))
+ elif founds and len(tag_name) >= 3:
+ await ctx.send(embed=Embed(
+ title='Did you mean ...',
+ description='\n'.join(tag['title'] for tag in founds[:10])
+ ))
else:
- tags = await self.bot.api_client.get('bot/tags')
+ tags = self._cache.values()
if not tags:
await ctx.send(embed=Embed(
description="**There are no tags in the database!**",
@@ -105,6 +178,7 @@ class Tags(Cog):
}
await self.bot.api_client.post('bot/tags', json=body)
+ self._cache[tag_name.lower()] = await self.bot.api_client.get(f'bot/tags/{tag_name}')
log.debug(f"{ctx.author} successfully added the following tag to our database: \n"
f"tag_name: {tag_name}\n"
@@ -134,6 +208,7 @@ class Tags(Cog):
}
await self.bot.api_client.patch(f'bot/tags/{tag_name}', json=body)
+ self._cache[tag_name.lower()] = await self.bot.api_client.get(f'bot/tags/{tag_name}')
log.debug(f"{ctx.author} successfully edited the following tag in our database: \n"
f"tag_name: {tag_name}\n"
@@ -150,6 +225,7 @@ class Tags(Cog):
async def delete_command(self, ctx: Context, *, tag_name: TagNameConverter) -> None:
"""Remove a tag from the database."""
await self.bot.api_client.delete(f'bot/tags/{tag_name}')
+ self._cache.pop(tag_name.lower(), None)
log.debug(f"{ctx.author} successfully deleted the tag called '{tag_name}'")
await ctx.send(embed=Embed(
@@ -160,6 +236,5 @@ class Tags(Cog):
def setup(bot: Bot) -> None:
- """Tags cog load."""
+ """Load the Tags cog."""
bot.add_cog(Tags(bot))
- log.info("Cog loaded: Tags")
diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py
index 5a0d20e57..82c01ae96 100644
--- a/bot/cogs/token_remover.py
+++ b/bot/cogs/token_remover.py
@@ -6,9 +6,10 @@ import struct
from datetime import datetime
from discord import Colour, Message
-from discord.ext.commands import Bot, Cog
+from discord.ext.commands import Cog
from discord.utils import snowflake_time
+from bot.bot import Bot
from bot.cogs.moderation import ModLog
from bot.constants import Channels, Colours, Event, Icons
@@ -52,39 +53,60 @@ class TokenRemover(Cog):
See: https://discordapp.com/developers/docs/reference#snowflakes
"""
+ if self.is_token_in_message(msg):
+ await self.take_action(msg)
+
+ @Cog.listener()
+ async def on_message_edit(self, before: Message, after: Message) -> None:
+ """
+ Check each edit for a string that matches Discord's token pattern.
+
+ See: https://discordapp.com/developers/docs/reference#snowflakes
+ """
+ if self.is_token_in_message(after):
+ await self.take_action(after)
+
+ async def take_action(self, msg: Message) -> None:
+ """Remove the `msg` containing a token an send a mod_log message."""
+ user_id, creation_timestamp, hmac = TOKEN_RE.search(msg.content).group(0).split('.')
+ self.mod_log.ignore(Event.message_delete, msg.id)
+ await msg.delete()
+ await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention))
+
+ message = (
+ "Censored a seemingly valid token sent by "
+ f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was "
+ f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`"
+ )
+ log.debug(message)
+
+ # Send pretty mod log embed to mod-alerts
+ await self.mod_log.send_log_message(
+ icon_url=Icons.token_removed,
+ colour=Colour(Colours.soft_red),
+ title="Token removed!",
+ text=message,
+ thumbnail=msg.author.avatar_url_as(static_format="png"),
+ channel_id=Channels.mod_alerts,
+ )
+
+ @classmethod
+ def is_token_in_message(cls, msg: Message) -> bool:
+ """Check if `msg` contains a seemly valid token."""
if msg.author.bot:
- return
+ return False
maybe_match = TOKEN_RE.search(msg.content)
if maybe_match is None:
- return
+ return False
try:
user_id, creation_timestamp, hmac = maybe_match.group(0).split('.')
except ValueError:
- return
-
- if self.is_valid_user_id(user_id) and self.is_valid_timestamp(creation_timestamp):
- self.mod_log.ignore(Event.message_delete, msg.id)
- await msg.delete()
- await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention))
-
- message = (
- "Censored a seemingly valid token sent by "
- f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was "
- f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`"
- )
- log.debug(message)
-
- # Send pretty mod log embed to mod-alerts
- await self.mod_log.send_log_message(
- icon_url=Icons.token_removed,
- colour=Colour(Colours.soft_red),
- title="Token removed!",
- text=message,
- thumbnail=msg.author.avatar_url_as(static_format="png"),
- channel_id=Channels.mod_alerts,
- )
+ return False
+
+ if cls.is_valid_user_id(user_id) and cls.is_valid_timestamp(creation_timestamp):
+ return True
@staticmethod
def is_valid_user_id(b64_content: str) -> bool:
@@ -119,6 +141,5 @@ class TokenRemover(Cog):
def setup(bot: Bot) -> None:
- """Token Remover cog load."""
+ """Load the TokenRemover cog."""
bot.add_cog(TokenRemover(bot))
- log.info("Cog loaded: TokenRemover")
diff --git a/bot/cogs/utils.py b/bot/cogs/utils.py
index 793fe4c1a..da278011a 100644
--- a/bot/cogs/utils.py
+++ b/bot/cogs/utils.py
@@ -8,8 +8,9 @@ from typing import Tuple
from dateutil import relativedelta
from discord import Colour, Embed, Message, Role
-from discord.ext.commands import Bot, Cog, Context, command
+from discord.ext.commands import Cog, Context, command
+from bot.bot import Bot
from bot.constants import Channels, MODERATION_ROLES, Mention, STAFF_ROLES
from bot.decorators import in_channel, with_role
from bot.utils.time import humanize_delta
@@ -61,14 +62,12 @@ class Utils(Cog):
pep_embed.set_thumbnail(url="https://www.python.org/static/opengraph-icon-200x200.png")
# Add the interesting information
- if "Status" in pep_header:
- pep_embed.add_field(name="Status", value=pep_header["Status"])
- if "Python-Version" in pep_header:
- pep_embed.add_field(name="Python-Version", value=pep_header["Python-Version"])
- if "Created" in pep_header:
- pep_embed.add_field(name="Created", value=pep_header["Created"])
- if "Type" in pep_header:
- pep_embed.add_field(name="Type", value=pep_header["Type"])
+ fields_to_check = ("Status", "Python-Version", "Created", "Type")
+ for field in fields_to_check:
+ # Check for a PEP metadata field that is present but has an empty value
+ # embed field values can't contain an empty string
+ if pep_header.get(field, ""):
+ pep_embed.add_field(name=field, value=pep_header[field])
elif response.status != 404:
# any response except 200 and 404 is expected
@@ -176,6 +175,5 @@ class Utils(Cog):
def setup(bot: Bot) -> None:
- """Utils cog load."""
+ """Load the Utils cog."""
bot.add_cog(Utils(bot))
- log.info("Cog loaded: Utils")
diff --git a/bot/cogs/verification.py b/bot/cogs/verification.py
index b5e8d4357..e3c396863 100644
--- a/bot/cogs/verification.py
+++ b/bot/cogs/verification.py
@@ -1,17 +1,20 @@
import logging
+from contextlib import suppress
from datetime import datetime
-from discord import Colour, Message, NotFound, Object
+from discord import Colour, Forbidden, Message, NotFound, Object
from discord.ext import tasks
-from discord.ext.commands import Bot, Cog, Context, command
+from discord.ext.commands import Cog, Context, command
+from bot.bot import Bot
from bot.cogs.moderation import ModLog
from bot.constants import (
Bot as BotConfig,
Channels, Colours, Event,
- Filter, Icons, Roles
+ Filter, Icons, MODERATION_ROLES, Roles
)
from bot.decorators import InChannelCheckFailure, in_channel, without_role
+from bot.utils.checks import without_role_check
log = logging.getLogger(__name__)
@@ -37,6 +40,7 @@ PERIODIC_PING = (
f"@everyone To verify that you have read our rules, please type `{BotConfig.prefix}accept`."
f" If you encounter any problems during the verification process, ping the <@&{Roles.admin}> role in this channel."
)
+BOT_MESSAGE_DELETE_DELAY = 10
class Verification(Cog):
@@ -54,12 +58,16 @@ class Verification(Cog):
@Cog.listener()
async def on_message(self, message: Message) -> None:
"""Check new message event for messages to the checkpoint channel & process."""
- if message.author.bot:
- return # They're a bot, ignore
-
if message.channel.id != Channels.verification:
return # Only listen for #checkpoint messages
+ if message.author.bot:
+ # They're a bot, delete their message after the delay.
+ # But not the periodic ping; we like that one.
+ if message.content != PERIODIC_PING:
+ await message.delete(delay=BOT_MESSAGE_DELETE_DELAY)
+ return
+
# if a user mentions a role or guild member
# alert the mods in mod-alerts channel
if message.mentions or message.role_mentions:
@@ -85,19 +93,21 @@ class Verification(Cog):
ping_everyone=Filter.ping_everyone,
)
- ctx = await self.bot.get_context(message) # type: Context
-
+ ctx: Context = await self.bot.get_context(message)
if ctx.command is not None and ctx.command.name == "accept":
- return # They used the accept command
+ return
- for role in ctx.author.roles:
- if role.id == Roles.verified:
- log.warning(f"{ctx.author} posted '{ctx.message.content}' "
- "in the verification channel, but is already verified.")
- return # They're already verified
+ if any(r.id == Roles.verified for r in ctx.author.roles):
+ log.info(
+ f"{ctx.author} posted '{ctx.message.content}' "
+ "in the verification channel, but is already verified."
+ )
+ return
- log.debug(f"{ctx.author} posted '{ctx.message.content}' in the verification "
- "channel. We are providing instructions how to verify.")
+ log.debug(
+ f"{ctx.author} posted '{ctx.message.content}' in the verification "
+ "channel. We are providing instructions how to verify."
+ )
await ctx.send(
f"{ctx.author.mention} Please type `!accept` to verify that you accept our rules, "
f"and gain access to the rest of the server.",
@@ -105,11 +115,8 @@ class Verification(Cog):
)
log.trace(f"Deleting the message posted by {ctx.author}")
-
- try:
+ with suppress(NotFound):
await ctx.message.delete()
- except NotFound:
- log.trace("No message found, it must have been deleted by another bot.")
@command(name='accept', aliases=('verify', 'verified', 'accepted'), hidden=True)
@without_role(Roles.verified)
@@ -120,17 +127,13 @@ class Verification(Cog):
await ctx.author.add_roles(Object(Roles.verified), reason="Accepted the rules")
try:
await ctx.author.send(WELCOME_MESSAGE)
- except Exception:
- # Catch the exception, in case they have DMs off or something
- log.exception(f"Unable to send welcome message to user {ctx.author}.")
-
- log.trace(f"Deleting the message posted by {ctx.author}.")
-
- try:
- self.mod_log.ignore(Event.message_delete, ctx.message.id)
- await ctx.message.delete()
- except NotFound:
- log.trace("No message found, it must have been deleted by another bot.")
+ except Forbidden:
+ log.info(f"Sending welcome message failed for {ctx.author}.")
+ finally:
+ log.trace(f"Deleting accept message by {ctx.author}.")
+ with suppress(NotFound):
+ self.mod_log.ignore(Event.message_delete, ctx.message.id)
+ await ctx.message.delete()
@command(name='subscribe')
@in_channel(Channels.bot)
@@ -189,7 +192,7 @@ class Verification(Cog):
@staticmethod
def bot_check(ctx: Context) -> bool:
"""Block any command within the verification channel that is not !accept."""
- if ctx.channel.id == Channels.verification:
+ if ctx.channel.id == Channels.verification and without_role_check(ctx, *MODERATION_ROLES):
return ctx.command.name == "accept"
else:
return True
@@ -216,7 +219,7 @@ class Verification(Cog):
@periodic_ping.before_loop
async def before_ping(self) -> None:
"""Only start the loop when the bot is ready."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
def cog_unload(self) -> None:
"""Cancel the periodic ping task when the cog is unloaded."""
@@ -224,6 +227,5 @@ class Verification(Cog):
def setup(bot: Bot) -> None:
- """Verification cog load."""
+ """Load the Verification cog."""
bot.add_cog(Verification(bot))
- log.info("Cog loaded: Verification")
diff --git a/bot/cogs/watchchannels/__init__.py b/bot/cogs/watchchannels/__init__.py
index 86e1050fa..69d118df6 100644
--- a/bot/cogs/watchchannels/__init__.py
+++ b/bot/cogs/watchchannels/__init__.py
@@ -1,18 +1,9 @@
-import logging
-
-from discord.ext.commands import Bot
-
+from bot.bot import Bot
from .bigbrother import BigBrother
from .talentpool import TalentPool
-log = logging.getLogger(__name__)
-
-
def setup(bot: Bot) -> None:
- """Monitoring cogs load."""
+ """Load the BigBrother and TalentPool cogs."""
bot.add_cog(BigBrother(bot))
- log.info("Cog loaded: BigBrother")
-
bot.add_cog(TalentPool(bot))
- log.info("Cog loaded: TalentPool")
diff --git a/bot/cogs/watchchannels/bigbrother.py b/bot/cogs/watchchannels/bigbrother.py
index 49783bb09..c601e0d4d 100644
--- a/bot/cogs/watchchannels/bigbrother.py
+++ b/bot/cogs/watchchannels/bigbrother.py
@@ -1,14 +1,14 @@
import logging
from collections import ChainMap
-from typing import Union
-from discord import User
-from discord.ext.commands import Bot, Cog, Context, group
+from discord.ext.commands import Cog, Context, group
+from bot.bot import Bot
from bot.cogs.moderation.utils import post_infraction
from bot.constants import Channels, MODERATION_ROLES, Webhooks
+from bot.converters import FetchedMember
from bot.decorators import with_role
-from .watchchannel import WatchChannel, proxy_user
+from .watchchannel import WatchChannel
log = logging.getLogger(__name__)
@@ -45,7 +45,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):
@bigbrother_group.command(name='watch', aliases=('w',))
@with_role(*MODERATION_ROLES)
- async def watch_command(self, ctx: Context, user: Union[User, proxy_user], *, reason: str) -> None:
+ async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""
Relay messages sent by the given `user` to the `#big-brother` channel.
@@ -61,10 +61,10 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):
return
if user.id in self.watched_users:
- await ctx.send(":x: The specified user is already being watched.")
+ await ctx.send(f":x: {user} is already being watched.")
return
- response = await post_infraction(ctx, user, 'watch', reason, hidden=True)
+ response = await post_infraction(ctx, user, 'watch', reason, hidden=True, active=True)
if response is not None:
self.watched_users[user.id] = response
@@ -92,7 +92,7 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):
@bigbrother_group.command(name='unwatch', aliases=('uw',))
@with_role(*MODERATION_ROLES)
- async def unwatch_command(self, ctx: Context, user: Union[User, proxy_user], *, reason: str) -> None:
+ async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""Stop relaying messages by the given `user`."""
active_watches = await self.bot.api_client.get(
self.api_endpoint,
diff --git a/bot/cogs/watchchannels/talentpool.py b/bot/cogs/watchchannels/talentpool.py
index 4ec42dcc1..ad0c51fa6 100644
--- a/bot/cogs/watchchannels/talentpool.py
+++ b/bot/cogs/watchchannels/talentpool.py
@@ -1,17 +1,18 @@
import logging
import textwrap
from collections import ChainMap
-from typing import Union
-from discord import Color, Embed, Member, User
-from discord.ext.commands import Bot, Cog, Context, group
+from discord import Color, Embed, Member
+from discord.ext.commands import Cog, Context, group
from bot.api import ResponseCodeError
+from bot.bot import Bot
from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks
+from bot.converters import FetchedMember
from bot.decorators import with_role
from bot.pagination import LinePaginator
from bot.utils import time
-from .watchchannel import WatchChannel, proxy_user
+from .watchchannel import WatchChannel
log = logging.getLogger(__name__)
@@ -48,7 +49,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
@nomination_group.command(name='watch', aliases=('w', 'add', 'a'))
@with_role(*STAFF_ROLES)
- async def watch_command(self, ctx: Context, user: Union[Member, User, proxy_user], *, reason: str) -> None:
+ async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""
Relay messages sent by the given `user` to the `#talent-pool` channel.
@@ -68,7 +69,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
return
if user.id in self.watched_users:
- await ctx.send(":x: The specified user is already being watched in the talent pool")
+ await ctx.send(f":x: {user} is already being watched in the talent pool")
return
# Manual request with `raise_for_status` as False because we want the actual response
@@ -113,7 +114,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
@nomination_group.command(name='history', aliases=('info', 'search'))
@with_role(*MODERATION_ROLES)
- async def history_command(self, ctx: Context, user: Union[User, proxy_user]) -> None:
+ async def history_command(self, ctx: Context, user: FetchedMember) -> None:
"""Shows the specified user's nomination history."""
result = await self.bot.api_client.get(
self.api_endpoint,
@@ -142,7 +143,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
@nomination_group.command(name='unwatch', aliases=('end', ))
@with_role(*MODERATION_ROLES)
- async def unwatch_command(self, ctx: Context, user: Union[User, proxy_user], *, reason: str) -> None:
+ async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
"""
Ends the active nomination of the specified user with the given reason.
diff --git a/bot/cogs/watchchannels/watchchannel.py b/bot/cogs/watchchannels/watchchannel.py
index 0bf75a924..3667a80e8 100644
--- a/bot/cogs/watchchannels/watchchannel.py
+++ b/bot/cogs/watchchannels/watchchannel.py
@@ -9,10 +9,11 @@ from typing import Optional
import dateutil.parser
import discord
-from discord import Color, Embed, HTTPException, Message, Object, errors
-from discord.ext.commands import BadArgument, Bot, Cog, Context
+from discord import Color, Embed, HTTPException, Message, errors
+from discord.ext.commands import Cog, Context
from bot.api import ResponseCodeError
+from bot.bot import Bot
from bot.cogs.moderation import ModLog
from bot.constants import BigBrother as BigBrotherConfig, Guild as GuildConfig, Icons
from bot.pagination import LinePaginator
@@ -24,22 +25,6 @@ log = logging.getLogger(__name__)
URL_RE = re.compile(r"(https?://[^\s]+)")
-def proxy_user(user_id: str) -> Object:
- """A proxy user object that mocks a real User instance for when the later is not available."""
- try:
- user_id = int(user_id)
- except ValueError:
- raise BadArgument
-
- user = Object(user_id)
- user.mention = user.id
- user.display_name = f"<@{user.id}>"
- user.avatar_url_as = lambda static_format: None
- user.bot = False
-
- return user
-
-
@dataclass
class MessageHistory:
"""Represents a watch channel's message history."""
@@ -106,7 +91,7 @@ class WatchChannel(metaclass=CogABCMeta):
async def start_watchchannel(self) -> None:
"""Starts the watch channel by getting the channel, webhook, and user cache ready."""
- await self.bot.wait_until_ready()
+ await self.bot.wait_until_guild_available()
try:
self.channel = await self.bot.fetch_channel(self.destination)
diff --git a/bot/cogs/wolfram.py b/bot/cogs/wolfram.py
index ab0ed2472..5d6b4630b 100644
--- a/bot/cogs/wolfram.py
+++ b/bot/cogs/wolfram.py
@@ -7,8 +7,9 @@ import discord
from dateutil.relativedelta import relativedelta
from discord import Embed
from discord.ext import commands
-from discord.ext.commands import Bot, BucketType, Cog, Context, check, group
+from discord.ext.commands import BucketType, Cog, Context, check, group
+from bot.bot import Bot
from bot.constants import Colours, STAFF_ROLES, Wolfram
from bot.pagination import ImagePaginator
from bot.utils.time import humanize_delta
@@ -151,7 +152,7 @@ async def get_pod_pages(ctx: Context, bot: Bot, query: str) -> Optional[List[Tup
class Wolfram(Cog):
"""Commands for interacting with the Wolfram|Alpha API."""
- def __init__(self, bot: commands.Bot):
+ def __init__(self, bot: Bot):
self.bot = bot
@group(name="wolfram", aliases=("wolf", "wa"), invoke_without_command=True)
@@ -266,7 +267,6 @@ class Wolfram(Cog):
await send_embed(ctx, message, color)
-def setup(bot: commands.Bot) -> None:
- """Wolfram cog load."""
+def setup(bot: Bot) -> None:
+ """Load the Wolfram cog."""
bot.add_cog(Wolfram(bot))
- log.info("Cog loaded: Wolfram")
diff --git a/bot/constants.py b/bot/constants.py
index 45f42cf81..ebd3b3d96 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -186,6 +186,11 @@ class YAMLGetter(type):
def __getitem__(cls, name):
return cls.__getattr__(name)
+ def __iter__(cls):
+ """Return generator of key: value pairs of current constants class' config values."""
+ for name in cls.__annotations__:
+ yield name, getattr(cls, name)
+
# Dataclasses
class Bot(metaclass=YAMLGetter):
@@ -193,7 +198,7 @@ class Bot(metaclass=YAMLGetter):
prefix: str
token: str
-
+ sentry_dsn: str
class Filter(metaclass=YAMLGetter):
section = "filter"
@@ -236,6 +241,13 @@ class Colours(metaclass=YAMLGetter):
soft_orange: int
+class DuckPond(metaclass=YAMLGetter):
+ section = "duck_pond"
+
+ threshold: int
+ custom_emojis: List[int]
+
+
class Emojis(metaclass=YAMLGetter):
section = "style"
subsection = "emojis"
@@ -244,20 +256,33 @@ class Emojis(metaclass=YAMLGetter):
defcon_enabled: str # noqa: E704
defcon_updated: str # noqa: E704
- green_chevron: str
- red_chevron: str
- white_chevron: str
- bb_message: str
-
status_online: str
status_offline: str
status_idle: str
status_dnd: str
+ failmail: str
+ trashcan: str
+
bullet: str
new: str
pencil: str
cross_mark: str
+ check_mark: str
+
+ ducky_yellow: int
+ ducky_blurple: int
+ ducky_regal: int
+ ducky_camo: int
+ ducky_ninja: int
+ ducky_devil: int
+ ducky_tube: int
+ ducky_hunt: int
+ ducky_wizard: int
+ ducky_party: int
+ ducky_angel: int
+ ducky_maul: int
+ ducky_santa: int
upvotes: str
comments: str
@@ -315,6 +340,10 @@ class Icons(metaclass=YAMLGetter):
superstarify: str
unsuperstarify: str
+ voice_state_blue: str
+ voice_state_green: str
+ voice_state_red: str
+
class CleanMessages(metaclass=YAMLGetter):
section = "bot"
@@ -337,12 +366,16 @@ class Channels(metaclass=YAMLGetter):
admins: int
admin_spam: int
announcements: int
+ attachment_log: int
big_brother_logs: int
bot: int
checkpoint_test: int
defcon: int
+ devcontrib: int
+ devcore: int
devlog: int
devtest: int
+ esoteric: int
help_0: int
help_1: int
help_2: int
@@ -368,6 +401,7 @@ class Channels(metaclass=YAMLGetter):
userlog: int
user_event_a: int
verification: int
+ voice_log: int
class Webhooks(metaclass=YAMLGetter):
@@ -377,6 +411,8 @@ class Webhooks(metaclass=YAMLGetter):
talent_pool: int
big_brother: int
reddit: int
+ duck_pond: int
+ dev_log: int
class Roles(metaclass=YAMLGetter):
@@ -405,7 +441,7 @@ class Guild(metaclass=YAMLGetter):
id: int
ignored: List[int]
staff_channels: List[int]
-
+ reminder_whitelist: List[int]
class Keys(metaclass=YAMLGetter):
section = "keys"
@@ -453,6 +489,8 @@ class Reddit(metaclass=YAMLGetter):
section = "reddit"
subreddits: list
+ client_id: str
+ secret: str
class Wolfram(metaclass=YAMLGetter):
@@ -508,6 +546,39 @@ class RedirectOutput(metaclass=YAMLGetter):
delete_delay: int
+class Sync(metaclass=YAMLGetter):
+ section = 'sync'
+
+ confirm_timeout: int
+ max_diff: int
+
+
+class Event(Enum):
+ """
+ Event names. This does not include every event (for example, raw
+ events aren't here), but only events used in ModLog for now.
+ """
+
+ guild_channel_create = "guild_channel_create"
+ guild_channel_delete = "guild_channel_delete"
+ guild_channel_update = "guild_channel_update"
+ guild_role_create = "guild_role_create"
+ guild_role_delete = "guild_role_delete"
+ guild_role_update = "guild_role_update"
+ guild_update = "guild_update"
+
+ member_join = "member_join"
+ member_remove = "member_remove"
+ member_ban = "member_ban"
+ member_unban = "member_unban"
+ member_update = "member_update"
+
+ message_delete = "message_delete"
+ message_edit = "message_edit"
+
+ voice_state_update = "voice_state_update"
+
+
# Debug mode
DEBUG_MODE = True if 'local' in os.environ.get("SITE_URL", "local") else False
@@ -579,27 +650,3 @@ ERROR_REPLIES = [
"Noooooo!!",
"I can't believe you've done this",
]
-
-
-class Event(Enum):
- """
- Event names. This does not include every event (for example, raw
- events aren't here), but only events used in ModLog for now.
- """
-
- guild_channel_create = "guild_channel_create"
- guild_channel_delete = "guild_channel_delete"
- guild_channel_update = "guild_channel_update"
- guild_role_create = "guild_role_create"
- guild_role_delete = "guild_role_delete"
- guild_role_update = "guild_role_update"
- guild_update = "guild_update"
-
- member_join = "member_join"
- member_remove = "member_remove"
- member_ban = "member_ban"
- member_unban = "member_unban"
- member_update = "member_update"
-
- message_delete = "message_delete"
- message_edit = "message_edit"
diff --git a/bot/converters.py b/bot/converters.py
index cf0496541..cca57a02d 100644
--- a/bot/converters.py
+++ b/bot/converters.py
@@ -1,20 +1,39 @@
import logging
import re
+import typing as t
from datetime import datetime
from ssl import CertificateError
-from typing import Union
import dateutil.parser
import dateutil.tz
import discord
from aiohttp import ClientConnectorError
from dateutil.relativedelta import relativedelta
-from discord.ext.commands import BadArgument, Context, Converter
+from discord.ext.commands import BadArgument, Context, Converter, UserConverter
log = logging.getLogger(__name__)
+def allowed_strings(*values, preserve_case: bool = False) -> t.Callable[[str], str]:
+ """
+ Return a converter which only allows arguments equal to one of the given values.
+
+ Unless preserve_case is True, the argument is converted to lowercase. All values are then
+ expected to have already been given in lowercase too.
+ """
+ def converter(arg: str) -> str:
+ if not preserve_case:
+ arg = arg.lower()
+
+ if arg not in values:
+ raise BadArgument(f"Only the following values are allowed:\n```{', '.join(values)}```")
+ else:
+ return arg
+
+ return converter
+
+
class ValidPythonIdentifier(Converter):
"""
A converter that checks whether the given string is a valid Python identifier.
@@ -70,7 +89,7 @@ class InfractionSearchQuery(Converter):
"""A converter that checks if the argument is a Discord user, and if not, falls back to a string."""
@staticmethod
- async def convert(ctx: Context, arg: str) -> Union[discord.Member, str]:
+ async def convert(ctx: Context, arg: str) -> t.Union[discord.Member, str]:
"""Check if the argument is a Discord user, and if not, falls back to a string."""
try:
maybe_snowflake = arg.strip("<@!>")
@@ -259,3 +278,75 @@ class ISODateTime(Converter):
dt = dt.replace(tzinfo=None)
return dt
+
+
+def proxy_user(user_id: str) -> discord.Object:
+ """
+ Create a proxy user object from the given id.
+
+ Used when a Member or User object cannot be resolved.
+ """
+ log.trace(f"Attempting to create a proxy user for the user id {user_id}.")
+
+ try:
+ user_id = int(user_id)
+ except ValueError:
+ log.debug(f"Failed to create proxy user {user_id}: could not convert to int.")
+ raise BadArgument(f"User ID `{user_id}` is invalid - could not convert to an integer.")
+
+ user = discord.Object(user_id)
+ user.mention = user.id
+ user.display_name = f"<@{user.id}>"
+ user.avatar_url_as = lambda static_format: None
+ user.bot = False
+
+ return user
+
+
+class FetchedUser(UserConverter):
+ """
+ Converts to a `discord.User` or, if it fails, a `discord.Object`.
+
+ Unlike the default `UserConverter`, which only does lookups via the global user cache, this
+ converter attempts to fetch the user via an API call to Discord when the using the cache is
+ unsuccessful.
+
+ If the fetch also fails and the error doesn't imply the user doesn't exist, then a
+ `discord.Object` is returned via the `user_proxy` converter.
+
+ The lookup strategy is as follows (in order):
+
+ 1. Lookup by ID.
+ 2. Lookup by mention.
+ 3. Lookup by name#discrim
+ 4. Lookup by name
+ 5. Lookup via API
+ 6. Create a proxy user with discord.Object
+ """
+
+ async def convert(self, ctx: Context, arg: str) -> t.Union[discord.User, discord.Object]:
+ """Convert the `arg` to a `discord.User` or `discord.Object`."""
+ try:
+ return await super().convert(ctx, arg)
+ except BadArgument:
+ pass
+
+ try:
+ user_id = int(arg)
+ log.trace(f"Fetching user {user_id}...")
+ return await ctx.bot.fetch_user(user_id)
+ except ValueError:
+ log.debug(f"Failed to fetch user {arg}: could not convert to int.")
+ raise BadArgument(f"The provided argument can't be turned into integer: `{arg}`")
+ except discord.HTTPException as e:
+ # If the Discord error isn't `Unknown user`, return a proxy instead
+ if e.code != 10013:
+ log.warning(f"Failed to fetch user, returning a proxy instead: status {e.status}")
+ return proxy_user(arg)
+
+ log.debug(f"Failed to fetch user {arg}: user does not exist.")
+ raise BadArgument(f"User `{arg}` does not exist")
+
+
+Expiry = t.Union[Duration, ISODateTime]
+FetchedMember = t.Union[discord.Member, FetchedUser]
diff --git a/bot/decorators.py b/bot/decorators.py
index 935df4af0..2d18eaa6a 100644
--- a/bot/decorators.py
+++ b/bot/decorators.py
@@ -27,11 +27,23 @@ class InChannelCheckFailure(CheckFailure):
super().__init__(f"Sorry, but you may only use this command within {channels_str}.")
-def in_channel(*channels: int, bypass_roles: Container[int] = None) -> Callable:
- """Checks that the message is in a whitelisted channel or optionally has a bypass role."""
+def in_channel(
+ *channels: int,
+ hidden_channels: Container[int] = None,
+ bypass_roles: Container[int] = None
+) -> Callable:
+ """
+ Checks that the message is in a whitelisted channel or optionally has a bypass role.
+
+ Hidden channels are channels which will not be displayed in the InChannelCheckFailure error
+ message.
+ """
+ hidden_channels = hidden_channels or []
+ bypass_roles = bypass_roles or []
+
def predicate(ctx: Context) -> bool:
"""In-channel checker predicate."""
- if ctx.channel.id in channels:
+ if ctx.channel.id in channels or ctx.channel.id in hidden_channels:
log.debug(f"{ctx.author} tried to call the '{ctx.command.name}' command. "
f"The command was used in a whitelisted channel.")
return True
diff --git a/bot/interpreter.py b/bot/interpreter.py
index 76a3fc293..8b7268746 100644
--- a/bot/interpreter.py
+++ b/bot/interpreter.py
@@ -2,7 +2,9 @@ from code import InteractiveInterpreter
from io import StringIO
from typing import Any
-from discord.ext.commands import Bot, Context
+from discord.ext.commands import Context
+
+from bot.bot import Bot
CODE_TEMPLATE = """
async def _func():
diff --git a/bot/pagination.py b/bot/pagination.py
index 76082f459..90c8f849c 100644
--- a/bot/pagination.py
+++ b/bot/pagination.py
@@ -1,18 +1,21 @@
import asyncio
import logging
-from typing import Iterable, List, Optional, Tuple
+import typing as t
+from contextlib import suppress
-from discord import Embed, Member, Message, Reaction
+import discord
from discord.abc import User
from discord.ext.commands import Context, Paginator
+from bot import constants
+
FIRST_EMOJI = "\u23EE" # [:track_previous:]
LEFT_EMOJI = "\u2B05" # [:arrow_left:]
RIGHT_EMOJI = "\u27A1" # [:arrow_right:]
LAST_EMOJI = "\u23ED" # [:track_next:]
-DELETE_EMOJI = "\u274c" # [:x:]
+DELETE_EMOJI = constants.Emojis.trashcan # [:trashcan:]
-PAGINATION_EMOJI = [FIRST_EMOJI, LEFT_EMOJI, RIGHT_EMOJI, LAST_EMOJI, DELETE_EMOJI]
+PAGINATION_EMOJI = (FIRST_EMOJI, LEFT_EMOJI, RIGHT_EMOJI, LAST_EMOJI, DELETE_EMOJI)
log = logging.getLogger(__name__)
@@ -87,12 +90,12 @@ class LinePaginator(Paginator):
@classmethod
async def paginate(
cls,
- lines: Iterable[str],
+ lines: t.List[str],
ctx: Context,
- embed: Embed,
+ embed: discord.Embed,
prefix: str = "",
suffix: str = "",
- max_lines: Optional[int] = None,
+ max_lines: t.Optional[int] = None,
max_size: int = 500,
empty: bool = True,
restrict_to_user: User = None,
@@ -100,7 +103,7 @@ class LinePaginator(Paginator):
footer_text: str = None,
url: str = None,
exception_on_empty_embed: bool = False
- ) -> Optional[Message]:
+ ) -> t.Optional[discord.Message]:
"""
Use a paginator and set of reactions to provide pagination over a set of lines.
@@ -112,11 +115,11 @@ class LinePaginator(Paginator):
Pagination will also be removed automatically if no reaction is added for five minutes (300 seconds).
Example:
- >>> embed = Embed()
+ >>> embed = discord.Embed()
>>> embed.set_author(name="Some Operation", url=url, icon_url=icon)
- >>> await LinePaginator.paginate((line for line in lines), ctx, embed)
+ >>> await LinePaginator.paginate([line for line in lines], ctx, embed)
"""
- def event_check(reaction_: Reaction, user_: Member) -> bool:
+ def event_check(reaction_: discord.Reaction, user_: discord.Member) -> bool:
"""Make sure that this reaction is what we want to operate on."""
no_restrictions = (
# Pagination is not restricted
@@ -131,7 +134,7 @@ class LinePaginator(Paginator):
# Reaction is on this message
reaction_.message.id == message.id,
# Reaction is one of the pagination emotes
- reaction_.emoji in PAGINATION_EMOJI,
+ str(reaction_.emoji) in PAGINATION_EMOJI,
# Reaction was not made by the Bot
user_.id != ctx.bot.user.id,
# There were no restrictions
@@ -203,9 +206,9 @@ class LinePaginator(Paginator):
log.debug("Timed out waiting for a reaction")
break # We're done, no reactions for the last 5 minutes
- if reaction.emoji == DELETE_EMOJI:
+ if str(reaction.emoji) == DELETE_EMOJI:
log.debug("Got delete reaction")
- break
+ return await message.delete()
if reaction.emoji == FIRST_EMOJI:
await message.remove_reaction(reaction.emoji, user)
@@ -279,8 +282,9 @@ class LinePaginator(Paginator):
await message.edit(embed=embed)
- log.debug("Ending pagination and removing all reactions...")
- await message.clear_reactions()
+ log.debug("Ending pagination and clearing reactions.")
+ with suppress(discord.NotFound):
+ await message.clear_reactions()
class ImagePaginator(Paginator):
@@ -297,6 +301,7 @@ class ImagePaginator(Paginator):
self._current_page = [prefix]
self.images = []
self._pages = []
+ self._count = 0
def add_line(self, line: str = '', *, empty: bool = False) -> None:
"""Adds a line to each page."""
@@ -314,13 +319,13 @@ class ImagePaginator(Paginator):
@classmethod
async def paginate(
cls,
- pages: List[Tuple[str, str]],
- ctx: Context, embed: Embed,
+ pages: t.List[t.Tuple[str, str]],
+ ctx: Context, embed: discord.Embed,
prefix: str = "",
suffix: str = "",
timeout: int = 300,
exception_on_empty_embed: bool = False
- ) -> Optional[Message]:
+ ) -> t.Optional[discord.Message]:
"""
Use a paginator and set of reactions to provide pagination over a set of title/image pairs.
@@ -332,17 +337,17 @@ class ImagePaginator(Paginator):
Note: Pagination will be removed automatically if no reaction is added for five minutes (300 seconds).
Example:
- >>> embed = Embed()
+ >>> embed = discord.Embed()
>>> embed.set_author(name="Some Operation", url=url, icon_url=icon)
>>> await ImagePaginator.paginate(pages, ctx, embed)
"""
- def check_event(reaction_: Reaction, member: Member) -> bool:
+ def check_event(reaction_: discord.Reaction, member: discord.Member) -> bool:
"""Checks each reaction added, if it matches our conditions pass the wait_for."""
return all((
# Reaction is on the same message sent
reaction_.message.id == message.id,
# The reaction is part of the navigation menu
- reaction_.emoji in PAGINATION_EMOJI,
+ str(reaction_.emoji) in PAGINATION_EMOJI,
# The reactor is not a bot
not member.bot
))
@@ -388,10 +393,10 @@ class ImagePaginator(Paginator):
# Deletes the users reaction
await message.remove_reaction(reaction.emoji, user)
- # Delete reaction press - [:x:]
- if reaction.emoji == DELETE_EMOJI:
+ # Delete reaction press - [:trashcan:]
+ if str(reaction.emoji) == DELETE_EMOJI:
log.debug("Got delete reaction")
- break
+ return await message.delete()
# First reaction press - [:track_previous:]
if reaction.emoji == FIRST_EMOJI:
@@ -408,7 +413,7 @@ class ImagePaginator(Paginator):
log.debug("Got last page reaction, but we're on the last page - ignoring")
continue
- current_page = len(paginator.pages - 1)
+ current_page = len(paginator.pages) - 1
reaction_type = "last"
# Previous reaction press - [:arrow_left: ]
@@ -443,5 +448,6 @@ class ImagePaginator(Paginator):
await message.edit(embed=embed)
- log.debug("Ending pagination and removing all reactions...")
- await message.clear_reactions()
+ log.debug("Ending pagination and clearing reactions.")
+ with suppress(discord.NotFound):
+ await message.clear_reactions()
diff --git a/bot/rules/attachments.py b/bot/rules/attachments.py
index c550aed76..8903c385c 100644
--- a/bot/rules/attachments.py
+++ b/bot/rules/attachments.py
@@ -7,19 +7,19 @@ async def apply(
last_message: Message, recent_messages: List[Message], config: Dict[str, int]
) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
"""Detects total attachments exceeding the limit sent by a single user."""
- relevant_messages = [last_message] + [
+ relevant_messages = tuple(
msg
for msg in recent_messages
if (
msg.author == last_message.author
and len(msg.attachments) > 0
)
- ]
+ )
total_recent_attachments = sum(len(msg.attachments) for msg in relevant_messages)
if total_recent_attachments > config['max']:
return (
- f"sent {total_recent_attachments} attachments in {config['max']}s",
+ f"sent {total_recent_attachments} attachments in {config['interval']}s",
(last_message.author,),
relevant_messages
)
diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py
index 8184be824..3e4b15ce4 100644
--- a/bot/utils/__init__.py
+++ b/bot/utils/__init__.py
@@ -1,5 +1,5 @@
from abc import ABCMeta
-from typing import Any, Generator, Hashable, Iterable
+from typing import Any, Hashable
from discord.ext.commands import CogMeta
@@ -64,13 +64,3 @@ class CaseInsensitiveDict(dict):
for k in list(self.keys()):
v = super(CaseInsensitiveDict, self).pop(k)
self.__setitem__(k, v)
-
-
-def chunks(iterable: Iterable, size: int) -> Generator[Any, None, None]:
- """
- Generator that allows you to iterate over any indexable collection in `size`-length chunks.
-
- Found: https://stackoverflow.com/a/312464/4022104
- """
- for i in range(0, len(iterable), size):
- yield iterable[i:i + size]
diff --git a/bot/utils/messages.py b/bot/utils/messages.py
index 549b33ca6..a36edc774 100644
--- a/bot/utils/messages.py
+++ b/bot/utils/messages.py
@@ -1,7 +1,8 @@
import asyncio
import contextlib
+import logging
from io import BytesIO
-from typing import Optional, Sequence, Union
+from typing import List, Optional, Sequence, Union
from discord import Client, Embed, File, Member, Message, Reaction, TextChannel, Webhook
from discord.abc import Snowflake
@@ -9,13 +10,13 @@ from discord.errors import HTTPException
from bot.constants import Emojis
-MAX_SIZE = 1024 * 1024 * 8 # 8 Mebibytes
+log = logging.getLogger(__name__)
async def wait_for_deletion(
message: Message,
user_ids: Sequence[Snowflake],
- deletion_emojis: Sequence[str] = (Emojis.cross_mark,),
+ deletion_emojis: Sequence[str] = (Emojis.trashcan,),
timeout: float = 60 * 5,
attach_emojis: bool = True,
client: Optional[Client] = None
@@ -39,10 +40,10 @@ async def wait_for_deletion(
await message.add_reaction(emoji)
def check(reaction: Reaction, user: Member) -> bool:
- """Check that the deletion emoji is reacted by the approprite user."""
+ """Check that the deletion emoji is reacted by the appropriate user."""
return (
reaction.message.id == message.id
- and reaction.emoji in deletion_emojis
+ and str(reaction.emoji) in deletion_emojis
and user.id in user_ids
)
@@ -51,42 +52,58 @@ async def wait_for_deletion(
await message.delete()
-async def send_attachments(message: Message, destination: Union[TextChannel, Webhook]) -> None:
+async def send_attachments(
+ message: Message,
+ destination: Union[TextChannel, Webhook],
+ link_large: bool = True
+) -> List[str]:
"""
- Re-uploads each attachment in a message to the given channel or webhook.
+ Re-upload the message's attachments to the destination and return a list of their new URLs.
- Each attachment is sent as a separate message to more easily comply with the 8 MiB request size limit.
- If attachments are too large, they are instead grouped into a single embed which links to them.
+ Each attachment is sent as a separate message to more easily comply with the request/file size
+ limit. If link_large is True, attachments which are too large are instead grouped into a single
+ embed which links to them.
"""
large = []
+ urls = []
for attachment in message.attachments:
+ failure_msg = (
+ f"Failed to re-upload attachment {attachment.filename} from message {message.id}"
+ )
+
try:
- # This should avoid most files that are too large, but some may get through hence the try-catch.
# Allow 512 bytes of leeway for the rest of the request.
- if attachment.size <= MAX_SIZE - 512:
+ # This should avoid most files that are too large,
+ # but some may get through hence the try-catch.
+ if attachment.size <= destination.guild.filesize_limit - 512:
with BytesIO() as file:
- await attachment.save(file)
+ await attachment.save(file, use_cached=True)
attachment_file = File(file, filename=attachment.filename)
if isinstance(destination, TextChannel):
- await destination.send(file=attachment_file)
+ msg = await destination.send(file=attachment_file)
+ urls.append(msg.attachments[0].url)
else:
await destination.send(
file=attachment_file,
username=message.author.display_name,
avatar_url=message.author.avatar_url
)
- else:
+ elif link_large:
large.append(attachment)
+ else:
+ log.warning(f"{failure_msg} because it's too large.")
except HTTPException as e:
- if e.status == 413:
+ if link_large and e.status == 413:
large.append(attachment)
else:
- raise
+ log.warning(f"{failure_msg} with status {e.status}.")
- if large:
- embed = Embed(description=f"\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large))
+ if link_large and large:
+ desc = f"\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large)
+ embed = Embed(description=desc)
embed.set_footer(text="Attachments exceed upload size limit.")
+
if isinstance(destination, TextChannel):
await destination.send(embed=embed)
else:
@@ -95,3 +112,5 @@ async def send_attachments(message: Message, destination: Union[TextChannel, Web
username=message.author.display_name,
avatar_url=message.author.avatar_url
)
+
+ return urls
diff --git a/bot/utils/time.py b/bot/utils/time.py
index 2aea2c099..77060143c 100644
--- a/bot/utils/time.py
+++ b/bot/utils/time.py
@@ -111,3 +111,65 @@ async def wait_until(time: datetime.datetime, start: Optional[datetime.datetime]
def format_infraction(timestamp: str) -> str:
"""Format an infraction timestamp to a more readable ISO 8601 format."""
return dateutil.parser.isoparse(timestamp).strftime(INFRACTION_FORMAT)
+
+
+def format_infraction_with_duration(
+ date_to: Optional[str],
+ date_from: Optional[datetime.datetime] = None,
+ max_units: int = 2,
+ absolute: bool = True
+) -> Optional[str]:
+ """
+ Return `date_to` formatted as a readable ISO-8601 with the humanized duration since `date_from`.
+
+ `date_from` must be an ISO-8601 formatted timestamp. The duration is calculated as from
+ `date_from` until `date_to` with a precision of seconds. If `date_from` is unspecified, the
+ current time is used.
+
+ `max_units` specifies the maximum number of units of time to include in the duration. For
+ example, a value of 1 may include days but not hours.
+
+ If `absolute` is True, the absolute value of the duration delta is used. This prevents negative
+ values in the case that `date_to` is in the past relative to `date_from`.
+ """
+ if not date_to:
+ return None
+
+ date_to_formatted = format_infraction(date_to)
+
+ date_from = date_from or datetime.datetime.utcnow()
+ date_to = dateutil.parser.isoparse(date_to).replace(tzinfo=None, microsecond=0)
+
+ delta = relativedelta(date_to, date_from)
+ if absolute:
+ delta = abs(delta)
+
+ duration = humanize_delta(delta, max_units=max_units)
+ duration_formatted = f" ({duration})" if duration else ""
+
+ return f"{date_to_formatted}{duration_formatted}"
+
+
+def until_expiration(
+ expiry: Optional[str],
+ now: Optional[datetime.datetime] = None,
+ max_units: int = 2
+) -> Optional[str]:
+ """
+ Get the remaining time until infraction's expiration, in a human-readable version of the relativedelta.
+
+ Returns a human-readable version of the remaining duration between datetime.utcnow() and an expiry.
+ Unlike `humanize_delta`, this function will force the `precision` to be `seconds` by not passing it.
+ `max_units` specifies the maximum number of units of time to include (e.g. 1 may include days but not hours).
+ By default, max_units is 2.
+ """
+ if not expiry:
+ return None
+
+ now = now or datetime.datetime.utcnow()
+ since = dateutil.parser.isoparse(expiry).replace(tzinfo=None, microsecond=0)
+
+ if since < now:
+ return None
+
+ return humanize_delta(relativedelta(since, now), max_units=max_units)
diff --git a/config-default.yml b/config-default.yml
index ee9f8a06b..f70fe3c34 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -1,6 +1,7 @@
bot:
prefix: "!"
token: !ENV "BOT_TOKEN"
+ sentry_dsn: !ENV "BOT_SENTRY_DSN"
cooldowns:
# Per channel, per tag.
@@ -22,20 +23,33 @@ style:
defcon_enabled: "<:defconenabled:470326274213150730>"
defcon_updated: "<:defconsettingsupdated:470326274082996224>"
- green_chevron: "<:greenchevron:418104310329769993>"
- red_chevron: "<:redchevron:418112778184818698>"
- white_chevron: "<:whitechevron:418110396973711363>"
- bb_message: "<:bbmessage:476273120999636992>"
-
status_online: "<:status_online:470326272351010816>"
status_idle: "<:status_idle:470326266625785866>"
status_dnd: "<:status_dnd:470326272082313216>"
status_offline: "<:status_offline:470326266537705472>"
+ failmail: "<:failmail:633660039931887616>"
+ trashcan: "<:trashcan:637136429717389331>"
+
bullet: "\u2022"
pencil: "\u270F"
new: "\U0001F195"
cross_mark: "\u274C"
+ check_mark: "\u2705"
+
+ ducky_yellow: &DUCKY_YELLOW 574951975574175744
+ ducky_blurple: &DUCKY_BLURPLE 574951975310065675
+ ducky_regal: &DUCKY_REGAL 637883439185395712
+ ducky_camo: &DUCKY_CAMO 637914731566596096
+ ducky_ninja: &DUCKY_NINJA 637923502535606293
+ ducky_devil: &DUCKY_DEVIL 637925314982576139
+ ducky_tube: &DUCKY_TUBE 637881368008851456
+ ducky_hunt: &DUCKY_HUNT 639355090909528084
+ ducky_wizard: &DUCKY_WIZARD 639355996954689536
+ ducky_party: &DUCKY_PARTY 639468753440210977
+ ducky_angel: &DUCKY_ANGEL 640121935610511361
+ ducky_maul: &DUCKY_MAUL 640137724958867467
+ ducky_santa: &DUCKY_SANTA 655360331002019870
upvotes: "<:upvotes:638729835245731840>"
comments: "<:comments:638729835073765387>"
@@ -89,6 +103,10 @@ style:
superstarify: "https://cdn.discordapp.com/emojis/636288153044516874.png"
unsuperstarify: "https://cdn.discordapp.com/emojis/636288201258172446.png"
+ voice_state_blue: "https://cdn.discordapp.com/emojis/656899769662439456.png"
+ voice_state_green: "https://cdn.discordapp.com/emojis/656899770094452754.png"
+ voice_state_red: "https://cdn.discordapp.com/emojis/656899769905709076.png"
+
guild:
id: 267624335836053506
@@ -98,13 +116,18 @@ guild:
channels:
admins: &ADMINS 365960823622991872
admin_spam: &ADMIN_SPAM 563594791770914816
+ admins_voice: &ADMINS_VOICE 500734494840717332
announcements: 354619224620138496
+ attachment_log: &ATTCH_LOG 649243850006855680
big_brother_logs: &BBLOGS 468507907357409333
- bot: 267659945086812160
+ bot: &BOT_CMD 267659945086812160
checkpoint_test: 422077681434099723
defcon: &DEFCON 464469101889454091
+ devcontrib: &DEV_CONTRIB 635950537262759947
+ devcore: 411200599653351425
devlog: &DEVLOG 622895325144940554
devtest: &DEVTEST 414574275865870337
+ esoteric: 470884583684964352
help_0: 303906576991780866
help_1: 303906556754395136
help_2: 303906514266226689
@@ -127,13 +150,16 @@ guild:
python: 267624335836053506
reddit: 458224812528238616
staff_lounge: &STAFF_LOUNGE 464905259261755392
+ staff_voice: &STAFF_VOICE 412375055910043655
talent_pool: &TALENT_POOL 534321732593647616
userlog: 528976905546760203
user_event_a: &USER_EVENT_A 592000283102674944
verification: 352442727016693763
+ voice_log: 640292421988646961
staff_channels: [*ADMINS, *ADMIN_SPAM, *MOD_SPAM, *MODS, *HELPERS, *ORGANISATION, *DEFCON]
- ignored: [*ADMINS, *MESSAGE_LOG, *MODLOG]
+ ignored: [*ADMINS, *MESSAGE_LOG, *MODLOG, *ADMINS_VOICE, *STAFF_VOICE, *ATTCH_LOG]
+ reminder_whitelist: [*BOT_CMD, *DEV_CONTRIB]
roles:
admin: &ADMIN_ROLE 267628507062992896
@@ -142,7 +168,7 @@ guild:
contributor: 295488872404484098
core_developer: 587606783669829632
helpers: 267630620367257601
- jammer: 423054537079783434
+ jammer: 591786436651646989
moderator: &MOD_ROLE 267629731250176001
muted: &MUTED_ROLE 277914926603829249
owner: &OWNER_ROLE 267627879762755584
@@ -155,6 +181,8 @@ guild:
talent_pool: 569145364800602132
big_brother: 569133704568373283
reddit: 635408384794951680
+ duck_pond: 637821475327311927
+ dev_log: 680501655111729222
filter:
@@ -188,6 +216,13 @@ filter:
- 544525886180032552 # kennethreitz.org
- 590806733924859943 # Discord Hack Week
- 423249981340778496 # Kivy
+ - 197038439483310086 # Discord Testers
+ - 286633898581164032 # Ren'Py
+ - 349505959032389632 # PyGame
+ - 438622377094414346 # Pyglet
+ - 524691714909274162 # Panda3D
+ - 336642139381301249 # discord.py
+ - 405403391410438165 # Sentdex
domain_blacklist:
- pornhub.com
@@ -274,7 +309,7 @@ urls:
paste_service: !JOIN [*SCHEMA, *PASTE, "/{key}"]
# Snekbox
- snekbox_eval_api: "https://snekbox.pythondiscord.com/eval"
+ snekbox_eval_api: "http://snekbox:8060/eval"
# Discord API URLs
discord_api: &DISCORD_API "https://discordapp.com/api/v7/"
@@ -355,11 +390,22 @@ anti_malware:
- '.png'
- '.tiff'
- '.wmv'
+ - '.svg'
+ - '.psd' # Photoshop
+ - '.ai' # Illustrator
+ - '.aep' # After Effects
+ - '.xcf' # GIMP
+ - '.mp3'
+ - '.wav'
+ - '.ogg'
+ - '.md'
reddit:
subreddits:
- 'r/Python'
+ client_id: !ENV "REDDIT_CLIENT_ID"
+ secret: !ENV "REDDIT_SECRET"
wolfram:
@@ -389,5 +435,13 @@ redirect_output:
delete_invocation: true
delete_delay: 15
+sync:
+ confirm_timeout: 300
+ max_diff: 10
+
+duck_pond:
+ threshold: 5
+ custom_emojis: [*DUCKY_YELLOW, *DUCKY_BLURPLE, *DUCKY_CAMO, *DUCKY_DEVIL, *DUCKY_NINJA, *DUCKY_REGAL, *DUCKY_TUBE, *DUCKY_HUNT, *DUCKY_WIZARD, *DUCKY_PARTY, *DUCKY_ANGEL, *DUCKY_MAUL, *DUCKY_SANTA]
+
config:
required_keys: ['bot.token']
diff --git a/docker-compose.yml b/docker-compose.yml
index f79fdba58..11deceae8 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -23,6 +23,7 @@ services:
- staff.web
ports:
- "127.0.0.1:8000:8000"
+ tty: true
depends_on:
- postgres
environment:
@@ -37,8 +38,11 @@ services:
volumes:
- ./logs:/bot/logs
- .:/bot:ro
+ tty: true
depends_on:
- web
environment:
BOT_TOKEN: ${BOT_TOKEN}
BOT_API_KEY: badbot13m0n8f570f942013fc818f234916ca531
+ REDDIT_CLIENT_ID: ${REDDIT_CLIENT_ID}
+ REDDIT_SECRET: ${REDDIT_SECRET}
diff --git a/tests/README.md b/tests/README.md
index 6ab9bc93e..be78821bf 100644
--- a/tests/README.md
+++ b/tests/README.md
@@ -2,7 +2,7 @@
Our bot is one of the most important tools we have for running our community. As we don't want that tool break, we decided that we wanted to write unit tests for it. We hope that in the future, we'll have a 100% test coverage for the bot. This guide will help you get started with writing the tests needed to achieve that.
-_**Note:** This is a practical guide to getting started with writing tests for our bot, not a general introduction to writing unit tests in Python. If you're looking for a more general introduction, you may like Corey Schafer's [Python Tutorial: Unit Testing Your Code with the unittest Module](https://www.youtube.com/watch?v=6tNS--WetLI) or Ned Batchelder's PyCon talk [Getting Started Testing](https://www.youtube.com/watch?v=FxSsnHeWQBY)._
+_**Note:** This is a practical guide to getting started with writing tests for our bot, not a general introduction to writing unit tests in Python. If you're looking for a more general introduction, you can take a look at the [Additional resources](#additional-resources) section at the bottom of this page._
## Tools
@@ -15,6 +15,7 @@ We are using the following modules and packages for our unit tests:
To ensure the results you obtain on your personal machine are comparable to those generated in the Azure pipeline, please make sure to run your tests with the virtual environment defined by our [Pipfile](/Pipfile). To run your tests with `pipenv`, we've provided two "scripts" shortcuts:
- `pipenv run test` will run `unittest` with `coverage.py`
+- `pipenv run test path/to/test.py` will run a specific test.
- `pipenv run report` will generate a coverage report of the tests you've run with `pipenv run test`. If you append the `-m` flag to this command, the report will include the lines and branches not covered by tests in addition to the test coverage report.
If you want a coverage report, make sure to run the tests with `pipenv run test` *first*.
@@ -211,3 +212,10 @@ All in all, it's not only important to consider if all statements or branches we
Another restriction of unit testing is that it tests, well, in units. Even if we can guarantee that the units work as they should independently, we have no guarantee that they will actually work well together. Even more, while the mocking described above gives us a lot of flexibility in factoring out external code, we are work under the implicit assumption that we fully understand those external parts and utilize it correctly. What if our mocked `Context` object works with a `send` method, but `discord.py` has changed it to a `send_message` method in a recent update? It could mean our tests are passing, but the code it's testing still doesn't work in production.
The answer to this is that we also need to make sure that the individual parts come together into a working application. In addition, we will also need to make sure that the application communicates correctly with external applications. Since we currently have no automated integration tests or functional tests, that means **it's still very important to fire up the bot and test the code you've written manually** in addition to the unit tests you've written.
+
+## Additional resources
+
+* [Ned Batchelder's PyCon talk: Getting Started Testing](https://www.youtube.com/watch?v=FxSsnHeWQBY)
+* [Corey Schafer video about unittest](https://youtu.be/6tNS--WetLI)
+* [RealPython tutorial on unittest testing](https://realpython.com/python-testing/)
+* [RealPython tutorial on mocking](https://realpython.com/python-mock-library/)
diff --git a/tests/base.py b/tests/base.py
index 029a249ed..88693f382 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -1,6 +1,12 @@
import logging
import unittest
from contextlib import contextmanager
+from typing import Dict
+
+import discord
+from discord.ext import commands
+
+from tests import helpers
class _CaptureLogHandler(logging.Handler):
@@ -65,3 +71,31 @@ class LoggingTestCase(unittest.TestCase):
standard_message = self._truncateMessage(base_message, record_message)
msg = self._formatMessage(msg, standard_message)
self.fail(msg)
+
+
+class CommandTestCase(unittest.TestCase):
+ """TestCase with additional assertions that are useful for testing Discord commands."""
+
+ @helpers.async_test
+ async def assertHasPermissionsCheck(
+ self,
+ cmd: commands.Command,
+ permissions: Dict[str, bool],
+ ) -> None:
+ """
+ Test that `cmd` raises a `MissingPermissions` exception if author lacks `permissions`.
+
+ Every permission in `permissions` is expected to be reported as missing. In other words, do
+ not include permissions which should not raise an exception along with those which should.
+ """
+ # Invert permission values because it's more intuitive to pass to this assertion the same
+ # permissions as those given to the check decorator.
+ permissions = {k: not v for k, v in permissions.items()}
+
+ ctx = helpers.MockContext()
+ ctx.channel.permissions_for.return_value = discord.Permissions(**permissions)
+
+ with self.assertRaises(commands.MissingPermissions) as cm:
+ await cmd.can_run(ctx)
+
+ self.assertCountEqual(permissions.keys(), cm.exception.missing_perms)
diff --git a/tests/bot/cogs/sync/test_base.py b/tests/bot/cogs/sync/test_base.py
new file mode 100644
index 000000000..e6a6f9688
--- /dev/null
+++ b/tests/bot/cogs/sync/test_base.py
@@ -0,0 +1,412 @@
+import unittest
+from unittest import mock
+
+import discord
+
+from bot import constants
+from bot.api import ResponseCodeError
+from bot.cogs.sync.syncers import Syncer, _Diff
+from tests import helpers
+
+
+class TestSyncer(Syncer):
+ """Syncer subclass with mocks for abstract methods for testing purposes."""
+
+ name = "test"
+ _get_diff = helpers.AsyncMock()
+ _sync = helpers.AsyncMock()
+
+
+class SyncerBaseTests(unittest.TestCase):
+ """Tests for the syncer base class."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+
+ def test_instantiation_fails_without_abstract_methods(self):
+ """The class must have abstract methods implemented."""
+ with self.assertRaisesRegex(TypeError, "Can't instantiate abstract class"):
+ Syncer(self.bot)
+
+
+class SyncerSendPromptTests(unittest.TestCase):
+ """Tests for sending the sync confirmation prompt."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+ self.syncer = TestSyncer(self.bot)
+
+ def mock_get_channel(self):
+ """Fixture to return a mock channel and message for when `get_channel` is used."""
+ self.bot.reset_mock()
+
+ mock_channel = helpers.MockTextChannel()
+ mock_message = helpers.MockMessage()
+
+ mock_channel.send.return_value = mock_message
+ self.bot.get_channel.return_value = mock_channel
+
+ return mock_channel, mock_message
+
+ def mock_fetch_channel(self):
+ """Fixture to return a mock channel and message for when `fetch_channel` is used."""
+ self.bot.reset_mock()
+
+ mock_channel = helpers.MockTextChannel()
+ mock_message = helpers.MockMessage()
+
+ self.bot.get_channel.return_value = None
+ mock_channel.send.return_value = mock_message
+ self.bot.fetch_channel.return_value = mock_channel
+
+ return mock_channel, mock_message
+
+ @helpers.async_test
+ async def test_send_prompt_edits_and_returns_message(self):
+ """The given message should be edited to display the prompt and then should be returned."""
+ msg = helpers.MockMessage()
+ ret_val = await self.syncer._send_prompt(msg)
+
+ msg.edit.assert_called_once()
+ self.assertIn("content", msg.edit.call_args[1])
+ self.assertEqual(ret_val, msg)
+
+ @helpers.async_test
+ async def test_send_prompt_gets_dev_core_channel(self):
+ """The dev-core channel should be retrieved if an extant message isn't given."""
+ subtests = (
+ (self.bot.get_channel, self.mock_get_channel),
+ (self.bot.fetch_channel, self.mock_fetch_channel),
+ )
+
+ for method, mock_ in subtests:
+ with self.subTest(method=method, msg=mock_.__name__):
+ mock_()
+ await self.syncer._send_prompt()
+
+ method.assert_called_once_with(constants.Channels.devcore)
+
+ @helpers.async_test
+ async def test_send_prompt_returns_None_if_channel_fetch_fails(self):
+ """None should be returned if there's an HTTPException when fetching the channel."""
+ self.bot.get_channel.return_value = None
+ self.bot.fetch_channel.side_effect = discord.HTTPException(mock.MagicMock(), "test error!")
+
+ ret_val = await self.syncer._send_prompt()
+
+ self.assertIsNone(ret_val)
+
+ @helpers.async_test
+ async def test_send_prompt_sends_and_returns_new_message_if_not_given(self):
+ """A new message mentioning core devs should be sent and returned if message isn't given."""
+ for mock_ in (self.mock_get_channel, self.mock_fetch_channel):
+ with self.subTest(msg=mock_.__name__):
+ mock_channel, mock_message = mock_()
+ ret_val = await self.syncer._send_prompt()
+
+ mock_channel.send.assert_called_once()
+ self.assertIn(self.syncer._CORE_DEV_MENTION, mock_channel.send.call_args[0][0])
+ self.assertEqual(ret_val, mock_message)
+
+ @helpers.async_test
+ async def test_send_prompt_adds_reactions(self):
+ """The message should have reactions for confirmation added."""
+ extant_message = helpers.MockMessage()
+ subtests = (
+ (extant_message, lambda: (None, extant_message)),
+ (None, self.mock_get_channel),
+ (None, self.mock_fetch_channel),
+ )
+
+ for message_arg, mock_ in subtests:
+ subtest_msg = "Extant message" if mock_.__name__ == "<lambda>" else mock_.__name__
+
+ with self.subTest(msg=subtest_msg):
+ _, mock_message = mock_()
+ await self.syncer._send_prompt(message_arg)
+
+ calls = [mock.call(emoji) for emoji in self.syncer._REACTION_EMOJIS]
+ mock_message.add_reaction.assert_has_calls(calls)
+
+
+class SyncerConfirmationTests(unittest.TestCase):
+ """Tests for waiting for a sync confirmation reaction on the prompt."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+ self.syncer = TestSyncer(self.bot)
+ self.core_dev_role = helpers.MockRole(id=constants.Roles.core_developer)
+
+ @staticmethod
+ def get_message_reaction(emoji):
+ """Fixture to return a mock message an reaction from the given `emoji`."""
+ message = helpers.MockMessage()
+ reaction = helpers.MockReaction(emoji=emoji, message=message)
+
+ return message, reaction
+
+ def test_reaction_check_for_valid_emoji_and_authors(self):
+ """Should return True if authors are identical or are a bot and a core dev, respectively."""
+ user_subtests = (
+ (
+ helpers.MockMember(id=77),
+ helpers.MockMember(id=77),
+ "identical users",
+ ),
+ (
+ helpers.MockMember(id=77, bot=True),
+ helpers.MockMember(id=43, roles=[self.core_dev_role]),
+ "bot author and core-dev reactor",
+ ),
+ )
+
+ for emoji in self.syncer._REACTION_EMOJIS:
+ for author, user, msg in user_subtests:
+ with self.subTest(author=author, user=user, emoji=emoji, msg=msg):
+ message, reaction = self.get_message_reaction(emoji)
+ ret_val = self.syncer._reaction_check(author, message, reaction, user)
+
+ self.assertTrue(ret_val)
+
+ def test_reaction_check_for_invalid_reactions(self):
+ """Should return False for invalid reaction events."""
+ valid_emoji = self.syncer._REACTION_EMOJIS[0]
+ subtests = (
+ (
+ helpers.MockMember(id=77),
+ *self.get_message_reaction(valid_emoji),
+ helpers.MockMember(id=43, roles=[self.core_dev_role]),
+ "users are not identical",
+ ),
+ (
+ helpers.MockMember(id=77, bot=True),
+ *self.get_message_reaction(valid_emoji),
+ helpers.MockMember(id=43),
+ "reactor lacks the core-dev role",
+ ),
+ (
+ helpers.MockMember(id=77, bot=True, roles=[self.core_dev_role]),
+ *self.get_message_reaction(valid_emoji),
+ helpers.MockMember(id=77, bot=True, roles=[self.core_dev_role]),
+ "reactor is a bot",
+ ),
+ (
+ helpers.MockMember(id=77),
+ helpers.MockMessage(id=95),
+ helpers.MockReaction(emoji=valid_emoji, message=helpers.MockMessage(id=26)),
+ helpers.MockMember(id=77),
+ "messages are not identical",
+ ),
+ (
+ helpers.MockMember(id=77),
+ *self.get_message_reaction("InVaLiD"),
+ helpers.MockMember(id=77),
+ "emoji is invalid",
+ ),
+ )
+
+ for *args, msg in subtests:
+ kwargs = dict(zip(("author", "message", "reaction", "user"), args))
+ with self.subTest(**kwargs, msg=msg):
+ ret_val = self.syncer._reaction_check(*args)
+ self.assertFalse(ret_val)
+
+ @helpers.async_test
+ async def test_wait_for_confirmation(self):
+ """The message should always be edited and only return True if the emoji is a check mark."""
+ subtests = (
+ (constants.Emojis.check_mark, True, None),
+ ("InVaLiD", False, None),
+ (None, False, TimeoutError),
+ )
+
+ for emoji, ret_val, side_effect in subtests:
+ for bot in (True, False):
+ with self.subTest(emoji=emoji, ret_val=ret_val, side_effect=side_effect, bot=bot):
+ # Set up mocks
+ message = helpers.MockMessage()
+ member = helpers.MockMember(bot=bot)
+
+ self.bot.wait_for.reset_mock()
+ self.bot.wait_for.return_value = (helpers.MockReaction(emoji=emoji), None)
+ self.bot.wait_for.side_effect = side_effect
+
+ # Call the function
+ actual_return = await self.syncer._wait_for_confirmation(member, message)
+
+ # Perform assertions
+ self.bot.wait_for.assert_called_once()
+ self.assertIn("reaction_add", self.bot.wait_for.call_args[0])
+
+ message.edit.assert_called_once()
+ kwargs = message.edit.call_args[1]
+ self.assertIn("content", kwargs)
+
+ # Core devs should only be mentioned if the author is a bot.
+ if bot:
+ self.assertIn(self.syncer._CORE_DEV_MENTION, kwargs["content"])
+ else:
+ self.assertNotIn(self.syncer._CORE_DEV_MENTION, kwargs["content"])
+
+ self.assertIs(actual_return, ret_val)
+
+
+class SyncerSyncTests(unittest.TestCase):
+ """Tests for main function orchestrating the sync."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot(user=helpers.MockMember(bot=True))
+ self.syncer = TestSyncer(self.bot)
+
+ @helpers.async_test
+ async def test_sync_respects_confirmation_result(self):
+ """The sync should abort if confirmation fails and continue if confirmed."""
+ mock_message = helpers.MockMessage()
+ subtests = (
+ (True, mock_message),
+ (False, None),
+ )
+
+ for confirmed, message in subtests:
+ with self.subTest(confirmed=confirmed):
+ self.syncer._sync.reset_mock()
+ self.syncer._get_diff.reset_mock()
+
+ diff = _Diff({1, 2, 3}, {4, 5}, None)
+ self.syncer._get_diff.return_value = diff
+ self.syncer._get_confirmation_result = helpers.AsyncMock(
+ return_value=(confirmed, message)
+ )
+
+ guild = helpers.MockGuild()
+ await self.syncer.sync(guild)
+
+ self.syncer._get_diff.assert_called_once_with(guild)
+ self.syncer._get_confirmation_result.assert_called_once()
+
+ if confirmed:
+ self.syncer._sync.assert_called_once_with(diff)
+ else:
+ self.syncer._sync.assert_not_called()
+
+ @helpers.async_test
+ async def test_sync_diff_size(self):
+ """The diff size should be correctly calculated."""
+ subtests = (
+ (6, _Diff({1, 2}, {3, 4}, {5, 6})),
+ (5, _Diff({1, 2, 3}, None, {4, 5})),
+ (0, _Diff(None, None, None)),
+ (0, _Diff(set(), set(), set())),
+ )
+
+ for size, diff in subtests:
+ with self.subTest(size=size, diff=diff):
+ self.syncer._get_diff.reset_mock()
+ self.syncer._get_diff.return_value = diff
+ self.syncer._get_confirmation_result = helpers.AsyncMock(return_value=(False, None))
+
+ guild = helpers.MockGuild()
+ await self.syncer.sync(guild)
+
+ self.syncer._get_diff.assert_called_once_with(guild)
+ self.syncer._get_confirmation_result.assert_called_once()
+ self.assertEqual(self.syncer._get_confirmation_result.call_args[0][0], size)
+
+ @helpers.async_test
+ async def test_sync_message_edited(self):
+ """The message should be edited if one was sent, even if the sync has an API error."""
+ subtests = (
+ (None, None, False),
+ (helpers.MockMessage(), None, True),
+ (helpers.MockMessage(), ResponseCodeError(mock.MagicMock()), True),
+ )
+
+ for message, side_effect, should_edit in subtests:
+ with self.subTest(message=message, side_effect=side_effect, should_edit=should_edit):
+ self.syncer._sync.side_effect = side_effect
+ self.syncer._get_confirmation_result = helpers.AsyncMock(
+ return_value=(True, message)
+ )
+
+ guild = helpers.MockGuild()
+ await self.syncer.sync(guild)
+
+ if should_edit:
+ message.edit.assert_called_once()
+ self.assertIn("content", message.edit.call_args[1])
+
+ @helpers.async_test
+ async def test_sync_confirmation_context_redirect(self):
+ """If ctx is given, a new message should be sent and author should be ctx's author."""
+ mock_member = helpers.MockMember()
+ subtests = (
+ (None, self.bot.user, None),
+ (helpers.MockContext(author=mock_member), mock_member, helpers.MockMessage()),
+ )
+
+ for ctx, author, message in subtests:
+ with self.subTest(ctx=ctx, author=author, message=message):
+ if ctx is not None:
+ ctx.send.return_value = message
+
+ self.syncer._get_confirmation_result = helpers.AsyncMock(return_value=(False, None))
+
+ guild = helpers.MockGuild()
+ await self.syncer.sync(guild, ctx)
+
+ if ctx is not None:
+ ctx.send.assert_called_once()
+
+ self.syncer._get_confirmation_result.assert_called_once()
+ self.assertEqual(self.syncer._get_confirmation_result.call_args[0][1], author)
+ self.assertEqual(self.syncer._get_confirmation_result.call_args[0][2], message)
+
+ @mock.patch.object(constants.Sync, "max_diff", new=3)
+ @helpers.async_test
+ async def test_confirmation_result_small_diff(self):
+ """Should always return True and the given message if the diff size is too small."""
+ author = helpers.MockMember()
+ expected_message = helpers.MockMessage()
+
+ for size in (3, 2):
+ with self.subTest(size=size):
+ self.syncer._send_prompt = helpers.AsyncMock()
+ self.syncer._wait_for_confirmation = helpers.AsyncMock()
+
+ coro = self.syncer._get_confirmation_result(size, author, expected_message)
+ result, actual_message = await coro
+
+ self.assertTrue(result)
+ self.assertEqual(actual_message, expected_message)
+ self.syncer._send_prompt.assert_not_called()
+ self.syncer._wait_for_confirmation.assert_not_called()
+
+ @mock.patch.object(constants.Sync, "max_diff", new=3)
+ @helpers.async_test
+ async def test_confirmation_result_large_diff(self):
+ """Should return True if confirmed and False if _send_prompt fails or aborted."""
+ author = helpers.MockMember()
+ mock_message = helpers.MockMessage()
+
+ subtests = (
+ (True, mock_message, True, "confirmed"),
+ (False, None, False, "_send_prompt failed"),
+ (False, mock_message, False, "aborted"),
+ )
+
+ for expected_result, expected_message, confirmed, msg in subtests:
+ with self.subTest(msg=msg):
+ self.syncer._send_prompt = helpers.AsyncMock(return_value=expected_message)
+ self.syncer._wait_for_confirmation = helpers.AsyncMock(return_value=confirmed)
+
+ coro = self.syncer._get_confirmation_result(4, author)
+ actual_result, actual_message = await coro
+
+ self.syncer._send_prompt.assert_called_once_with(None) # message defaults to None
+ self.assertIs(actual_result, expected_result)
+ self.assertEqual(actual_message, expected_message)
+
+ if expected_message:
+ self.syncer._wait_for_confirmation.assert_called_once_with(
+ author, expected_message
+ )
diff --git a/tests/bot/cogs/sync/test_cog.py b/tests/bot/cogs/sync/test_cog.py
new file mode 100644
index 000000000..98c9afc0d
--- /dev/null
+++ b/tests/bot/cogs/sync/test_cog.py
@@ -0,0 +1,395 @@
+import unittest
+from unittest import mock
+
+import discord
+
+from bot import constants
+from bot.api import ResponseCodeError
+from bot.cogs import sync
+from bot.cogs.sync.syncers import Syncer
+from tests import helpers
+from tests.base import CommandTestCase
+
+
+class MockSyncer(helpers.CustomMockMixin, mock.MagicMock):
+ """
+ A MagicMock subclass to mock Syncer objects.
+
+ Instances of this class will follow the specifications of `bot.cogs.sync.syncers.Syncer`
+ instances. For more information, see the `MockGuild` docstring.
+ """
+
+ def __init__(self, **kwargs) -> None:
+ super().__init__(spec_set=Syncer, **kwargs)
+
+
+class SyncExtensionTests(unittest.TestCase):
+ """Tests for the sync extension."""
+
+ @staticmethod
+ def test_extension_setup():
+ """The Sync cog should be added."""
+ bot = helpers.MockBot()
+ sync.setup(bot)
+ bot.add_cog.assert_called_once()
+
+
+class SyncCogTestCase(unittest.TestCase):
+ """Base class for Sync cog tests. Sets up patches for syncers."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+
+ # These patch the type. When the type is called, a MockSyncer instanced is returned.
+ # MockSyncer is needed so that our custom AsyncMock is used.
+ # TODO: Use autospec instead in 3.8, which will automatically use AsyncMock when needed.
+ self.role_syncer_patcher = mock.patch(
+ "bot.cogs.sync.syncers.RoleSyncer",
+ new=mock.MagicMock(return_value=MockSyncer())
+ )
+ self.user_syncer_patcher = mock.patch(
+ "bot.cogs.sync.syncers.UserSyncer",
+ new=mock.MagicMock(return_value=MockSyncer())
+ )
+ self.RoleSyncer = self.role_syncer_patcher.start()
+ self.UserSyncer = self.user_syncer_patcher.start()
+
+ self.cog = sync.Sync(self.bot)
+
+ def tearDown(self):
+ self.role_syncer_patcher.stop()
+ self.user_syncer_patcher.stop()
+
+ @staticmethod
+ def response_error(status: int) -> ResponseCodeError:
+ """Fixture to return a ResponseCodeError with the given status code."""
+ response = mock.MagicMock()
+ response.status = status
+
+ return ResponseCodeError(response)
+
+
+class SyncCogTests(SyncCogTestCase):
+ """Tests for the Sync cog."""
+
+ @mock.patch.object(sync.Sync, "sync_guild")
+ def test_sync_cog_init(self, sync_guild):
+ """Should instantiate syncers and run a sync for the guild."""
+ # Reset because a Sync cog was already instantiated in setUp.
+ self.RoleSyncer.reset_mock()
+ self.UserSyncer.reset_mock()
+ self.bot.loop.create_task.reset_mock()
+
+ mock_sync_guild_coro = mock.MagicMock()
+ sync_guild.return_value = mock_sync_guild_coro
+
+ sync.Sync(self.bot)
+
+ self.RoleSyncer.assert_called_once_with(self.bot)
+ self.UserSyncer.assert_called_once_with(self.bot)
+ sync_guild.assert_called_once_with()
+ self.bot.loop.create_task.assert_called_once_with(mock_sync_guild_coro)
+
+ @helpers.async_test
+ async def test_sync_cog_sync_guild(self):
+ """Roles and users should be synced only if a guild is successfully retrieved."""
+ for guild in (helpers.MockGuild(), None):
+ with self.subTest(guild=guild):
+ self.bot.reset_mock()
+ self.cog.role_syncer.reset_mock()
+ self.cog.user_syncer.reset_mock()
+
+ self.bot.get_guild = mock.MagicMock(return_value=guild)
+
+ await self.cog.sync_guild()
+
+ self.bot.wait_until_guild_available.assert_called_once()
+ self.bot.get_guild.assert_called_once_with(constants.Guild.id)
+
+ if guild is None:
+ self.cog.role_syncer.sync.assert_not_called()
+ self.cog.user_syncer.sync.assert_not_called()
+ else:
+ self.cog.role_syncer.sync.assert_called_once_with(guild)
+ self.cog.user_syncer.sync.assert_called_once_with(guild)
+
+ async def patch_user_helper(self, side_effect: BaseException) -> None:
+ """Helper to set a side effect for bot.api_client.patch and then assert it is called."""
+ self.bot.api_client.patch.reset_mock(side_effect=True)
+ self.bot.api_client.patch.side_effect = side_effect
+
+ user_id, updated_information = 5, {"key": 123}
+ await self.cog.patch_user(user_id, updated_information)
+
+ self.bot.api_client.patch.assert_called_once_with(
+ f"bot/users/{user_id}",
+ json=updated_information,
+ )
+
+ @helpers.async_test
+ async def test_sync_cog_patch_user(self):
+ """A PATCH request should be sent and 404 errors ignored."""
+ for side_effect in (None, self.response_error(404)):
+ with self.subTest(side_effect=side_effect):
+ await self.patch_user_helper(side_effect)
+
+ @helpers.async_test
+ async def test_sync_cog_patch_user_non_404(self):
+ """A PATCH request should be sent and the error raised if it's not a 404."""
+ with self.assertRaises(ResponseCodeError):
+ await self.patch_user_helper(self.response_error(500))
+
+
+class SyncCogListenerTests(SyncCogTestCase):
+ """Tests for the listeners of the Sync cog."""
+
+ def setUp(self):
+ super().setUp()
+ self.cog.patch_user = helpers.AsyncMock(spec_set=self.cog.patch_user)
+
+ @helpers.async_test
+ async def test_sync_cog_on_guild_role_create(self):
+ """A POST request should be sent with the new role's data."""
+ self.assertTrue(self.cog.on_guild_role_create.__cog_listener__)
+
+ role_data = {
+ "colour": 49,
+ "id": 777,
+ "name": "rolename",
+ "permissions": 8,
+ "position": 23,
+ }
+ role = helpers.MockRole(**role_data)
+ await self.cog.on_guild_role_create(role)
+
+ self.bot.api_client.post.assert_called_once_with("bot/roles", json=role_data)
+
+ @helpers.async_test
+ async def test_sync_cog_on_guild_role_delete(self):
+ """A DELETE request should be sent."""
+ self.assertTrue(self.cog.on_guild_role_delete.__cog_listener__)
+
+ role = helpers.MockRole(id=99)
+ await self.cog.on_guild_role_delete(role)
+
+ self.bot.api_client.delete.assert_called_once_with("bot/roles/99")
+
+ @helpers.async_test
+ async def test_sync_cog_on_guild_role_update(self):
+ """A PUT request should be sent if the colour, name, permissions, or position changes."""
+ self.assertTrue(self.cog.on_guild_role_update.__cog_listener__)
+
+ role_data = {
+ "colour": 49,
+ "id": 777,
+ "name": "rolename",
+ "permissions": 8,
+ "position": 23,
+ }
+ subtests = (
+ (True, ("colour", "name", "permissions", "position")),
+ (False, ("hoist", "mentionable")),
+ )
+
+ for should_put, attributes in subtests:
+ for attribute in attributes:
+ with self.subTest(should_put=should_put, changed_attribute=attribute):
+ self.bot.api_client.put.reset_mock()
+
+ after_role_data = role_data.copy()
+ after_role_data[attribute] = 876
+
+ before_role = helpers.MockRole(**role_data)
+ after_role = helpers.MockRole(**after_role_data)
+
+ await self.cog.on_guild_role_update(before_role, after_role)
+
+ if should_put:
+ self.bot.api_client.put.assert_called_once_with(
+ f"bot/roles/{after_role.id}",
+ json=after_role_data
+ )
+ else:
+ self.bot.api_client.put.assert_not_called()
+
+ @helpers.async_test
+ async def test_sync_cog_on_member_remove(self):
+ """Member should patched to set in_guild as False."""
+ self.assertTrue(self.cog.on_member_remove.__cog_listener__)
+
+ member = helpers.MockMember()
+ await self.cog.on_member_remove(member)
+
+ self.cog.patch_user.assert_called_once_with(
+ member.id,
+ updated_information={"in_guild": False}
+ )
+
+ @helpers.async_test
+ async def test_sync_cog_on_member_update_roles(self):
+ """Members should be patched if their roles have changed."""
+ self.assertTrue(self.cog.on_member_update.__cog_listener__)
+
+ # Roles are intentionally unsorted.
+ before_roles = [helpers.MockRole(id=12), helpers.MockRole(id=30), helpers.MockRole(id=20)]
+ before_member = helpers.MockMember(roles=before_roles)
+ after_member = helpers.MockMember(roles=before_roles[1:])
+
+ await self.cog.on_member_update(before_member, after_member)
+
+ data = {"roles": sorted(role.id for role in after_member.roles)}
+ self.cog.patch_user.assert_called_once_with(after_member.id, updated_information=data)
+
+ @helpers.async_test
+ async def test_sync_cog_on_member_update_other(self):
+ """Members should not be patched if other attributes have changed."""
+ self.assertTrue(self.cog.on_member_update.__cog_listener__)
+
+ subtests = (
+ ("activities", discord.Game("Pong"), discord.Game("Frogger")),
+ ("nick", "old nick", "new nick"),
+ ("status", discord.Status.online, discord.Status.offline),
+ )
+
+ for attribute, old_value, new_value in subtests:
+ with self.subTest(attribute=attribute):
+ self.cog.patch_user.reset_mock()
+
+ before_member = helpers.MockMember(**{attribute: old_value})
+ after_member = helpers.MockMember(**{attribute: new_value})
+
+ await self.cog.on_member_update(before_member, after_member)
+
+ self.cog.patch_user.assert_not_called()
+
+ @helpers.async_test
+ async def test_sync_cog_on_user_update(self):
+ """A user should be patched only if the name, discriminator, or avatar changes."""
+ self.assertTrue(self.cog.on_user_update.__cog_listener__)
+
+ before_data = {
+ "name": "old name",
+ "discriminator": "1234",
+ "avatar": "old avatar",
+ "bot": False,
+ }
+
+ subtests = (
+ (True, "name", "name", "new name", "new name"),
+ (True, "discriminator", "discriminator", "8765", 8765),
+ (True, "avatar", "avatar_hash", "9j2e9", "9j2e9"),
+ (False, "bot", "bot", True, True),
+ )
+
+ for should_patch, attribute, api_field, value, api_value in subtests:
+ with self.subTest(attribute=attribute):
+ self.cog.patch_user.reset_mock()
+
+ after_data = before_data.copy()
+ after_data[attribute] = value
+ before_user = helpers.MockUser(**before_data)
+ after_user = helpers.MockUser(**after_data)
+
+ await self.cog.on_user_update(before_user, after_user)
+
+ if should_patch:
+ self.cog.patch_user.assert_called_once()
+
+ # Don't care if *all* keys are present; only the changed one is required
+ call_args = self.cog.patch_user.call_args
+ self.assertEqual(call_args[0][0], after_user.id)
+ self.assertIn("updated_information", call_args[1])
+
+ updated_information = call_args[1]["updated_information"]
+ self.assertIn(api_field, updated_information)
+ self.assertEqual(updated_information[api_field], api_value)
+ else:
+ self.cog.patch_user.assert_not_called()
+
+ async def on_member_join_helper(self, side_effect: Exception) -> dict:
+ """
+ Helper to set `side_effect` for on_member_join and assert a PUT request was sent.
+
+ The request data for the mock member is returned. All exceptions will be re-raised.
+ """
+ member = helpers.MockMember(
+ discriminator="1234",
+ roles=[helpers.MockRole(id=22), helpers.MockRole(id=12)],
+ )
+
+ data = {
+ "avatar_hash": member.avatar,
+ "discriminator": int(member.discriminator),
+ "id": member.id,
+ "in_guild": True,
+ "name": member.name,
+ "roles": sorted(role.id for role in member.roles)
+ }
+
+ self.bot.api_client.put.reset_mock(side_effect=True)
+ self.bot.api_client.put.side_effect = side_effect
+
+ try:
+ await self.cog.on_member_join(member)
+ except Exception:
+ raise
+ finally:
+ self.bot.api_client.put.assert_called_once_with(
+ f"bot/users/{member.id}",
+ json=data
+ )
+
+ return data
+
+ @helpers.async_test
+ async def test_sync_cog_on_member_join(self):
+ """Should PUT user's data or POST it if the user doesn't exist."""
+ for side_effect in (None, self.response_error(404)):
+ with self.subTest(side_effect=side_effect):
+ self.bot.api_client.post.reset_mock()
+ data = await self.on_member_join_helper(side_effect)
+
+ if side_effect:
+ self.bot.api_client.post.assert_called_once_with("bot/users", json=data)
+ else:
+ self.bot.api_client.post.assert_not_called()
+
+ @helpers.async_test
+ async def test_sync_cog_on_member_join_non_404(self):
+ """ResponseCodeError should be re-raised if status code isn't a 404."""
+ with self.assertRaises(ResponseCodeError):
+ await self.on_member_join_helper(self.response_error(500))
+
+ self.bot.api_client.post.assert_not_called()
+
+
+class SyncCogCommandTests(SyncCogTestCase, CommandTestCase):
+ """Tests for the commands in the Sync cog."""
+
+ @helpers.async_test
+ async def test_sync_roles_command(self):
+ """sync() should be called on the RoleSyncer."""
+ ctx = helpers.MockContext()
+ await self.cog.sync_roles_command.callback(self.cog, ctx)
+
+ self.cog.role_syncer.sync.assert_called_once_with(ctx.guild, ctx)
+
+ @helpers.async_test
+ async def test_sync_users_command(self):
+ """sync() should be called on the UserSyncer."""
+ ctx = helpers.MockContext()
+ await self.cog.sync_users_command.callback(self.cog, ctx)
+
+ self.cog.user_syncer.sync.assert_called_once_with(ctx.guild, ctx)
+
+ def test_commands_require_admin(self):
+ """The sync commands should only run if the author has the administrator permission."""
+ cmds = (
+ self.cog.sync_group,
+ self.cog.sync_roles_command,
+ self.cog.sync_users_command,
+ )
+
+ for cmd in cmds:
+ with self.subTest(cmd=cmd):
+ self.assertHasPermissionsCheck(cmd, {"administrator": True})
diff --git a/tests/bot/cogs/sync/test_roles.py b/tests/bot/cogs/sync/test_roles.py
index 27ae27639..14fb2577a 100644
--- a/tests/bot/cogs/sync/test_roles.py
+++ b/tests/bot/cogs/sync/test_roles.py
@@ -1,126 +1,165 @@
import unittest
+from unittest import mock
-from bot.cogs.sync.syncers import Role, get_roles_for_sync
-
-
-class GetRolesForSyncTests(unittest.TestCase):
- """Tests constructing the roles to synchronize with the site."""
-
- def test_get_roles_for_sync_empty_return_for_equal_roles(self):
- """No roles should be synced when no diff is found."""
- api_roles = {Role(id=41, name='name', colour=33, permissions=0x8, position=1)}
- guild_roles = {Role(id=41, name='name', colour=33, permissions=0x8, position=1)}
-
- self.assertEqual(
- get_roles_for_sync(guild_roles, api_roles),
- (set(), set(), set())
- )
-
- def test_get_roles_for_sync_returns_roles_to_update_with_non_id_diff(self):
- """Roles to be synced are returned when non-ID attributes differ."""
- api_roles = {Role(id=41, name='old name', colour=35, permissions=0x8, position=1)}
- guild_roles = {Role(id=41, name='new name', colour=33, permissions=0x8, position=2)}
-
- self.assertEqual(
- get_roles_for_sync(guild_roles, api_roles),
- (set(), guild_roles, set())
- )
-
- def test_get_roles_only_returns_roles_that_require_update(self):
- """Roles that require an update should be returned as the second tuple element."""
- api_roles = {
- Role(id=41, name='old name', colour=33, permissions=0x8, position=1),
- Role(id=53, name='other role', colour=55, permissions=0, position=3)
- }
- guild_roles = {
- Role(id=41, name='new name', colour=35, permissions=0x8, position=2),
- Role(id=53, name='other role', colour=55, permissions=0, position=3)
- }
-
- self.assertEqual(
- get_roles_for_sync(guild_roles, api_roles),
- (
- set(),
- {Role(id=41, name='new name', colour=35, permissions=0x8, position=2)},
- set(),
- )
- )
-
- def test_get_roles_returns_new_roles_in_first_tuple_element(self):
- """Newly created roles are returned as the first tuple element."""
- api_roles = {
- Role(id=41, name='name', colour=35, permissions=0x8, position=1),
- }
- guild_roles = {
- Role(id=41, name='name', colour=35, permissions=0x8, position=1),
- Role(id=53, name='other role', colour=55, permissions=0, position=2)
- }
-
- self.assertEqual(
- get_roles_for_sync(guild_roles, api_roles),
- (
- {Role(id=53, name='other role', colour=55, permissions=0, position=2)},
- set(),
- set(),
- )
- )
-
- def test_get_roles_returns_roles_to_update_and_new_roles(self):
- """Newly created and updated roles should be returned together."""
- api_roles = {
- Role(id=41, name='old name', colour=35, permissions=0x8, position=1),
- }
- guild_roles = {
- Role(id=41, name='new name', colour=40, permissions=0x16, position=2),
- Role(id=53, name='other role', colour=55, permissions=0, position=3)
- }
-
- self.assertEqual(
- get_roles_for_sync(guild_roles, api_roles),
- (
- {Role(id=53, name='other role', colour=55, permissions=0, position=3)},
- {Role(id=41, name='new name', colour=40, permissions=0x16, position=2)},
- set(),
- )
- )
-
- def test_get_roles_returns_roles_to_delete(self):
- """Roles to be deleted should be returned as the third tuple element."""
- api_roles = {
- Role(id=41, name='name', colour=35, permissions=0x8, position=1),
- Role(id=61, name='to delete', colour=99, permissions=0x9, position=2),
- }
- guild_roles = {
- Role(id=41, name='name', colour=35, permissions=0x8, position=1),
- }
-
- self.assertEqual(
- get_roles_for_sync(guild_roles, api_roles),
- (
- set(),
- set(),
- {Role(id=61, name='to delete', colour=99, permissions=0x9, position=2)},
- )
- )
-
- def test_get_roles_returns_roles_to_delete_update_and_new_roles(self):
- """When roles were added, updated, and removed, all of them are returned properly."""
- api_roles = {
- Role(id=41, name='not changed', colour=35, permissions=0x8, position=1),
- Role(id=61, name='to delete', colour=99, permissions=0x9, position=2),
- Role(id=71, name='to update', colour=99, permissions=0x9, position=3),
- }
- guild_roles = {
- Role(id=41, name='not changed', colour=35, permissions=0x8, position=1),
- Role(id=81, name='to create', colour=99, permissions=0x9, position=4),
- Role(id=71, name='updated', colour=101, permissions=0x5, position=3),
- }
-
- self.assertEqual(
- get_roles_for_sync(guild_roles, api_roles),
- (
- {Role(id=81, name='to create', colour=99, permissions=0x9, position=4)},
- {Role(id=71, name='updated', colour=101, permissions=0x5, position=3)},
- {Role(id=61, name='to delete', colour=99, permissions=0x9, position=2)},
- )
- )
+import discord
+
+from bot.cogs.sync.syncers import RoleSyncer, _Diff, _Role
+from tests import helpers
+
+
+def fake_role(**kwargs):
+ """Fixture to return a dictionary representing a role with default values set."""
+ kwargs.setdefault("id", 9)
+ kwargs.setdefault("name", "fake role")
+ kwargs.setdefault("colour", 7)
+ kwargs.setdefault("permissions", 0)
+ kwargs.setdefault("position", 55)
+
+ return kwargs
+
+
+class RoleSyncerDiffTests(unittest.TestCase):
+ """Tests for determining differences between roles in the DB and roles in the Guild cache."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+ self.syncer = RoleSyncer(self.bot)
+
+ @staticmethod
+ def get_guild(*roles):
+ """Fixture to return a guild object with the given roles."""
+ guild = helpers.MockGuild()
+ guild.roles = []
+
+ for role in roles:
+ mock_role = helpers.MockRole(**role)
+ mock_role.colour = discord.Colour(role["colour"])
+ mock_role.permissions = discord.Permissions(role["permissions"])
+ guild.roles.append(mock_role)
+
+ return guild
+
+ @helpers.async_test
+ async def test_empty_diff_for_identical_roles(self):
+ """No differences should be found if the roles in the guild and DB are identical."""
+ self.bot.api_client.get.return_value = [fake_role()]
+ guild = self.get_guild(fake_role())
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), set(), set())
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_for_updated_roles(self):
+ """Only updated roles should be added to the 'updated' set of the diff."""
+ updated_role = fake_role(id=41, name="new")
+
+ self.bot.api_client.get.return_value = [fake_role(id=41, name="old"), fake_role()]
+ guild = self.get_guild(updated_role, fake_role())
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), {_Role(**updated_role)}, set())
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_for_new_roles(self):
+ """Only new roles should be added to the 'created' set of the diff."""
+ new_role = fake_role(id=41, name="new")
+
+ self.bot.api_client.get.return_value = [fake_role()]
+ guild = self.get_guild(fake_role(), new_role)
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = ({_Role(**new_role)}, set(), set())
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_for_deleted_roles(self):
+ """Only deleted roles should be added to the 'deleted' set of the diff."""
+ deleted_role = fake_role(id=61, name="deleted")
+
+ self.bot.api_client.get.return_value = [fake_role(), deleted_role]
+ guild = self.get_guild(fake_role())
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), set(), {_Role(**deleted_role)})
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_for_new_updated_and_deleted_roles(self):
+ """When roles are added, updated, and removed, all of them are returned properly."""
+ new = fake_role(id=41, name="new")
+ updated = fake_role(id=71, name="updated")
+ deleted = fake_role(id=61, name="deleted")
+
+ self.bot.api_client.get.return_value = [
+ fake_role(),
+ fake_role(id=71, name="updated name"),
+ deleted,
+ ]
+ guild = self.get_guild(fake_role(), new, updated)
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = ({_Role(**new)}, {_Role(**updated)}, {_Role(**deleted)})
+
+ self.assertEqual(actual_diff, expected_diff)
+
+
+class RoleSyncerSyncTests(unittest.TestCase):
+ """Tests for the API requests that sync roles."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+ self.syncer = RoleSyncer(self.bot)
+
+ @helpers.async_test
+ async def test_sync_created_roles(self):
+ """Only POST requests should be made with the correct payload."""
+ roles = [fake_role(id=111), fake_role(id=222)]
+
+ role_tuples = {_Role(**role) for role in roles}
+ diff = _Diff(role_tuples, set(), set())
+ await self.syncer._sync(diff)
+
+ calls = [mock.call("bot/roles", json=role) for role in roles]
+ self.bot.api_client.post.assert_has_calls(calls, any_order=True)
+ self.assertEqual(self.bot.api_client.post.call_count, len(roles))
+
+ self.bot.api_client.put.assert_not_called()
+ self.bot.api_client.delete.assert_not_called()
+
+ @helpers.async_test
+ async def test_sync_updated_roles(self):
+ """Only PUT requests should be made with the correct payload."""
+ roles = [fake_role(id=111), fake_role(id=222)]
+
+ role_tuples = {_Role(**role) for role in roles}
+ diff = _Diff(set(), role_tuples, set())
+ await self.syncer._sync(diff)
+
+ calls = [mock.call(f"bot/roles/{role['id']}", json=role) for role in roles]
+ self.bot.api_client.put.assert_has_calls(calls, any_order=True)
+ self.assertEqual(self.bot.api_client.put.call_count, len(roles))
+
+ self.bot.api_client.post.assert_not_called()
+ self.bot.api_client.delete.assert_not_called()
+
+ @helpers.async_test
+ async def test_sync_deleted_roles(self):
+ """Only DELETE requests should be made with the correct payload."""
+ roles = [fake_role(id=111), fake_role(id=222)]
+
+ role_tuples = {_Role(**role) for role in roles}
+ diff = _Diff(set(), set(), role_tuples)
+ await self.syncer._sync(diff)
+
+ calls = [mock.call(f"bot/roles/{role['id']}") for role in roles]
+ self.bot.api_client.delete.assert_has_calls(calls, any_order=True)
+ self.assertEqual(self.bot.api_client.delete.call_count, len(roles))
+
+ self.bot.api_client.post.assert_not_called()
+ self.bot.api_client.put.assert_not_called()
diff --git a/tests/bot/cogs/sync/test_users.py b/tests/bot/cogs/sync/test_users.py
index ccaf67490..421bf6bb6 100644
--- a/tests/bot/cogs/sync/test_users.py
+++ b/tests/bot/cogs/sync/test_users.py
@@ -1,84 +1,169 @@
import unittest
+from unittest import mock
-from bot.cogs.sync.syncers import User, get_users_for_sync
+from bot.cogs.sync.syncers import UserSyncer, _Diff, _User
+from tests import helpers
def fake_user(**kwargs):
- kwargs.setdefault('id', 43)
- kwargs.setdefault('name', 'bob the test man')
- kwargs.setdefault('discriminator', 1337)
- kwargs.setdefault('avatar_hash', None)
- kwargs.setdefault('roles', (666,))
- kwargs.setdefault('in_guild', True)
- return User(**kwargs)
-
-
-class GetUsersForSyncTests(unittest.TestCase):
- """Tests constructing the users to synchronize with the site."""
-
- def test_get_users_for_sync_returns_nothing_for_empty_params(self):
- """When no users are given, none are returned."""
- self.assertEqual(
- get_users_for_sync({}, {}),
- (set(), set())
- )
-
- def test_get_users_for_sync_returns_nothing_for_equal_users(self):
- """When no users are updated, none are returned."""
- api_users = {43: fake_user()}
- guild_users = {43: fake_user()}
-
- self.assertEqual(
- get_users_for_sync(guild_users, api_users),
- (set(), set())
- )
-
- def test_get_users_for_sync_returns_users_to_update_on_non_id_field_diff(self):
- """When a non-ID-field differs, the user to update is returned."""
- api_users = {43: fake_user()}
- guild_users = {43: fake_user(name='new fancy name')}
-
- self.assertEqual(
- get_users_for_sync(guild_users, api_users),
- (set(), {fake_user(name='new fancy name')})
- )
-
- def test_get_users_for_sync_returns_users_to_create_with_new_ids_on_guild(self):
- """When new users join the guild, they are returned as the first tuple element."""
- api_users = {43: fake_user()}
- guild_users = {43: fake_user(), 63: fake_user(id=63)}
-
- self.assertEqual(
- get_users_for_sync(guild_users, api_users),
- ({fake_user(id=63)}, set())
- )
-
- def test_get_users_for_sync_updates_in_guild_field_on_user_leave(self):
+ """Fixture to return a dictionary representing a user with default values set."""
+ kwargs.setdefault("id", 43)
+ kwargs.setdefault("name", "bob the test man")
+ kwargs.setdefault("discriminator", 1337)
+ kwargs.setdefault("avatar_hash", None)
+ kwargs.setdefault("roles", (666,))
+ kwargs.setdefault("in_guild", True)
+
+ return kwargs
+
+
+class UserSyncerDiffTests(unittest.TestCase):
+ """Tests for determining differences between users in the DB and users in the Guild cache."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+ self.syncer = UserSyncer(self.bot)
+
+ @staticmethod
+ def get_guild(*members):
+ """Fixture to return a guild object with the given members."""
+ guild = helpers.MockGuild()
+ guild.members = []
+
+ for member in members:
+ member = member.copy()
+ member["avatar"] = member.pop("avatar_hash")
+ del member["in_guild"]
+
+ mock_member = helpers.MockMember(**member)
+ mock_member.roles = [helpers.MockRole(id=role_id) for role_id in member["roles"]]
+
+ guild.members.append(mock_member)
+
+ return guild
+
+ @helpers.async_test
+ async def test_empty_diff_for_no_users(self):
+ """When no users are given, an empty diff should be returned."""
+ guild = self.get_guild()
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), set(), None)
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_empty_diff_for_identical_users(self):
+ """No differences should be found if the users in the guild and DB are identical."""
+ self.bot.api_client.get.return_value = [fake_user()]
+ guild = self.get_guild(fake_user())
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), set(), None)
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_for_updated_users(self):
+ """Only updated users should be added to the 'updated' set of the diff."""
+ updated_user = fake_user(id=99, name="new")
+
+ self.bot.api_client.get.return_value = [fake_user(id=99, name="old"), fake_user()]
+ guild = self.get_guild(updated_user, fake_user())
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), {_User(**updated_user)}, None)
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_for_new_users(self):
+ """Only new users should be added to the 'created' set of the diff."""
+ new_user = fake_user(id=99, name="new")
+
+ self.bot.api_client.get.return_value = [fake_user()]
+ guild = self.get_guild(fake_user(), new_user)
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = ({_User(**new_user)}, set(), None)
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_sets_in_guild_false_for_leaving_users(self):
"""When a user leaves the guild, the `in_guild` flag is updated to `False`."""
- api_users = {43: fake_user(), 63: fake_user(id=63)}
- guild_users = {43: fake_user()}
-
- self.assertEqual(
- get_users_for_sync(guild_users, api_users),
- (set(), {fake_user(id=63, in_guild=False)})
- )
-
- def test_get_users_for_sync_updates_and_creates_users_as_needed(self):
- """When one user left and another one was updated, both are returned."""
- api_users = {43: fake_user()}
- guild_users = {63: fake_user(id=63)}
-
- self.assertEqual(
- get_users_for_sync(guild_users, api_users),
- ({fake_user(id=63)}, {fake_user(in_guild=False)})
- )
-
- def test_get_users_for_sync_does_not_duplicate_update_users(self):
- """When the API knows a user the guild doesn't, nothing is performed."""
- api_users = {43: fake_user(in_guild=False)}
- guild_users = {}
-
- self.assertEqual(
- get_users_for_sync(guild_users, api_users),
- (set(), set())
- )
+ leaving_user = fake_user(id=63, in_guild=False)
+
+ self.bot.api_client.get.return_value = [fake_user(), fake_user(id=63)]
+ guild = self.get_guild(fake_user())
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), {_User(**leaving_user)}, None)
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_diff_for_new_updated_and_leaving_users(self):
+ """When users are added, updated, and removed, all of them are returned properly."""
+ new_user = fake_user(id=99, name="new")
+ updated_user = fake_user(id=55, name="updated")
+ leaving_user = fake_user(id=63, in_guild=False)
+
+ self.bot.api_client.get.return_value = [fake_user(), fake_user(id=55), fake_user(id=63)]
+ guild = self.get_guild(fake_user(), new_user, updated_user)
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = ({_User(**new_user)}, {_User(**updated_user), _User(**leaving_user)}, None)
+
+ self.assertEqual(actual_diff, expected_diff)
+
+ @helpers.async_test
+ async def test_empty_diff_for_db_users_not_in_guild(self):
+ """When the DB knows a user the guild doesn't, no difference is found."""
+ self.bot.api_client.get.return_value = [fake_user(), fake_user(id=63, in_guild=False)]
+ guild = self.get_guild(fake_user())
+
+ actual_diff = await self.syncer._get_diff(guild)
+ expected_diff = (set(), set(), None)
+
+ self.assertEqual(actual_diff, expected_diff)
+
+
+class UserSyncerSyncTests(unittest.TestCase):
+ """Tests for the API requests that sync users."""
+
+ def setUp(self):
+ self.bot = helpers.MockBot()
+ self.syncer = UserSyncer(self.bot)
+
+ @helpers.async_test
+ async def test_sync_created_users(self):
+ """Only POST requests should be made with the correct payload."""
+ users = [fake_user(id=111), fake_user(id=222)]
+
+ user_tuples = {_User(**user) for user in users}
+ diff = _Diff(user_tuples, set(), None)
+ await self.syncer._sync(diff)
+
+ calls = [mock.call("bot/users", json=user) for user in users]
+ self.bot.api_client.post.assert_has_calls(calls, any_order=True)
+ self.assertEqual(self.bot.api_client.post.call_count, len(users))
+
+ self.bot.api_client.put.assert_not_called()
+ self.bot.api_client.delete.assert_not_called()
+
+ @helpers.async_test
+ async def test_sync_updated_users(self):
+ """Only PUT requests should be made with the correct payload."""
+ users = [fake_user(id=111), fake_user(id=222)]
+
+ user_tuples = {_User(**user) for user in users}
+ diff = _Diff(set(), user_tuples, None)
+ await self.syncer._sync(diff)
+
+ calls = [mock.call(f"bot/users/{user['id']}", json=user) for user in users]
+ self.bot.api_client.put.assert_has_calls(calls, any_order=True)
+ self.assertEqual(self.bot.api_client.put.call_count, len(users))
+
+ self.bot.api_client.post.assert_not_called()
+ self.bot.api_client.delete.assert_not_called()
diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py
new file mode 100644
index 000000000..5b0a3b8c3
--- /dev/null
+++ b/tests/bot/cogs/test_duck_pond.py
@@ -0,0 +1,584 @@
+import asyncio
+import logging
+import typing
+import unittest
+from unittest.mock import MagicMock, patch
+
+import discord
+
+from bot import constants
+from bot.cogs import duck_pond
+from tests import base
+from tests import helpers
+
+MODULE_PATH = "bot.cogs.duck_pond"
+
+
+class DuckPondTests(base.LoggingTestCase):
+ """Tests for DuckPond functionality."""
+
+ @classmethod
+ def setUpClass(cls):
+ """Sets up the objects that only have to be initialized once."""
+ cls.nonstaff_member = helpers.MockMember(name="Non-staffer")
+
+ cls.staff_role = helpers.MockRole(name="Staff role", id=constants.STAFF_ROLES[0])
+ cls.staff_member = helpers.MockMember(name="staffer", roles=[cls.staff_role])
+
+ cls.checkmark_emoji = "\N{White Heavy Check Mark}"
+ cls.thumbs_up_emoji = "\N{Thumbs Up Sign}"
+ cls.unicode_duck_emoji = "\N{Duck}"
+ cls.duck_pond_emoji = helpers.MockPartialEmoji(id=constants.DuckPond.custom_emojis[0])
+ cls.non_duck_custom_emoji = helpers.MockPartialEmoji(id=123)
+
+ def setUp(self):
+ """Sets up the objects that need to be refreshed before each test."""
+ self.bot = helpers.MockBot(user=helpers.MockMember(id=46692))
+ self.cog = duck_pond.DuckPond(bot=self.bot)
+
+ def test_duck_pond_correctly_initializes(self):
+ """`__init__ should set `bot` and `webhook_id` attributes and schedule `fetch_webhook`."""
+ bot = helpers.MockBot()
+ cog = MagicMock()
+
+ duck_pond.DuckPond.__init__(cog, bot)
+
+ self.assertEqual(cog.bot, bot)
+ self.assertEqual(cog.webhook_id, constants.Webhooks.duck_pond)
+ bot.loop.create_loop.called_once_with(cog.fetch_webhook())
+
+ def test_fetch_webhook_succeeds_without_connectivity_issues(self):
+ """The `fetch_webhook` method waits until `READY` event and sets the `webhook` attribute."""
+ self.bot.fetch_webhook.return_value = "dummy webhook"
+ self.cog.webhook_id = 1
+
+ asyncio.run(self.cog.fetch_webhook())
+
+ self.bot.wait_until_guild_available.assert_called_once()
+ self.bot.fetch_webhook.assert_called_once_with(1)
+ self.assertEqual(self.cog.webhook, "dummy webhook")
+
+ def test_fetch_webhook_logs_when_unable_to_fetch_webhook(self):
+ """The `fetch_webhook` method should log an exception when it fails to fetch the webhook."""
+ self.bot.fetch_webhook.side_effect = discord.HTTPException(response=MagicMock(), message="Not found.")
+ self.cog.webhook_id = 1
+
+ log = logging.getLogger('bot.cogs.duck_pond')
+ with self.assertLogs(logger=log, level=logging.ERROR) as log_watcher:
+ asyncio.run(self.cog.fetch_webhook())
+
+ self.bot.wait_until_guild_available.assert_called_once()
+ self.bot.fetch_webhook.assert_called_once_with(1)
+
+ self.assertEqual(len(log_watcher.records), 1)
+
+ record = log_watcher.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+
+ def test_is_staff_returns_correct_values_based_on_instance_passed(self):
+ """The `is_staff` method should return correct values based on the instance passed."""
+ test_cases = (
+ (helpers.MockUser(name="User instance"), False),
+ (helpers.MockMember(name="Member instance without staff role"), False),
+ (helpers.MockMember(name="Member instance with staff role", roles=[self.staff_role]), True)
+ )
+
+ for user, expected_return in test_cases:
+ actual_return = self.cog.is_staff(user)
+ with self.subTest(user_type=user.name, expected_return=expected_return, actual_return=actual_return):
+ self.assertEqual(expected_return, actual_return)
+
+ @helpers.async_test
+ async def test_has_green_checkmark_correctly_detects_presence_of_green_checkmark_emoji(self):
+ """The `has_green_checkmark` method should only return `True` if one is present."""
+ test_cases = (
+ (
+ "No reactions", helpers.MockMessage(), False
+ ),
+ (
+ "No green check mark reactions",
+ helpers.MockMessage(reactions=[
+ helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.bot.user]),
+ helpers.MockReaction(emoji=self.thumbs_up_emoji, users=[self.bot.user])
+ ]),
+ False
+ ),
+ (
+ "Green check mark reaction, but not from our bot",
+ helpers.MockMessage(reactions=[
+ helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.bot.user]),
+ helpers.MockReaction(emoji=self.checkmark_emoji, users=[self.staff_member])
+ ]),
+ False
+ ),
+ (
+ "Green check mark reaction, with one from the bot",
+ helpers.MockMessage(reactions=[
+ helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.bot.user]),
+ helpers.MockReaction(emoji=self.checkmark_emoji, users=[self.staff_member, self.bot.user])
+ ]),
+ True
+ )
+ )
+
+ for description, message, expected_return in test_cases:
+ actual_return = await self.cog.has_green_checkmark(message)
+ with self.subTest(
+ test_case=description,
+ expected_return=expected_return,
+ actual_return=actual_return
+ ):
+ self.assertEqual(expected_return, actual_return)
+
+ def test_send_webhook_correctly_passes_on_arguments(self):
+ """The `send_webhook` method should pass the arguments to the webhook correctly."""
+ self.cog.webhook = helpers.MockAsyncWebhook()
+
+ content = "fake content"
+ username = "fake username"
+ avatar_url = "fake avatar_url"
+ embed = "fake embed"
+
+ asyncio.run(self.cog.send_webhook(content, username, avatar_url, embed))
+
+ self.cog.webhook.send.assert_called_once_with(
+ content=content,
+ username=username,
+ avatar_url=avatar_url,
+ embed=embed
+ )
+
+ def test_send_webhook_logs_when_sending_message_fails(self):
+ """The `send_webhook` method should catch a `discord.HTTPException` and log accordingly."""
+ self.cog.webhook = helpers.MockAsyncWebhook()
+ self.cog.webhook.send.side_effect = discord.HTTPException(response=MagicMock(), message="Something failed.")
+
+ log = logging.getLogger('bot.cogs.duck_pond')
+ with self.assertLogs(logger=log, level=logging.ERROR) as log_watcher:
+ asyncio.run(self.cog.send_webhook())
+
+ self.assertEqual(len(log_watcher.records), 1)
+
+ record = log_watcher.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+
+ def _get_reaction(
+ self,
+ emoji: typing.Union[str, helpers.MockEmoji],
+ staff: int = 0,
+ nonstaff: int = 0
+ ) -> helpers.MockReaction:
+ staffers = [helpers.MockMember(roles=[self.staff_role]) for _ in range(staff)]
+ nonstaffers = [helpers.MockMember() for _ in range(nonstaff)]
+ return helpers.MockReaction(emoji=emoji, users=staffers + nonstaffers)
+
+ @helpers.async_test
+ async def test_count_ducks_correctly_counts_the_number_of_eligible_duck_emojis(self):
+ """The `count_ducks` method should return the number of unique staffers who gave a duck."""
+ test_cases = (
+ # Simple test cases
+ # A message without reactions should return 0
+ (
+ "No reactions",
+ helpers.MockMessage(),
+ 0
+ ),
+ # A message with a non-duck reaction from a non-staffer should return 0
+ (
+ "Non-duck reaction from non-staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.thumbs_up_emoji, nonstaff=1)]),
+ 0
+ ),
+ # A message with a non-duck reaction from a staffer should return 0
+ (
+ "Non-duck reaction from staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.non_duck_custom_emoji, staff=1)]),
+ 0
+ ),
+ # A message with a non-duck reaction from a non-staffer and staffer should return 0
+ (
+ "Non-duck reaction from staffer + non-staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.thumbs_up_emoji, staff=1, nonstaff=1)]),
+ 0
+ ),
+ # A message with a unicode duck reaction from a non-staffer should return 0
+ (
+ "Unicode Duck Reaction from non-staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.unicode_duck_emoji, nonstaff=1)]),
+ 0
+ ),
+ # A message with a unicode duck reaction from a staffer should return 1
+ (
+ "Unicode Duck Reaction from staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.unicode_duck_emoji, staff=1)]),
+ 1
+ ),
+ # A message with a unicode duck reaction from a non-staffer and staffer should return 1
+ (
+ "Unicode Duck Reaction from staffer + non-staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.unicode_duck_emoji, staff=1, nonstaff=1)]),
+ 1
+ ),
+ # A message with a duckpond duck reaction from a non-staffer should return 0
+ (
+ "Duckpond Duck Reaction from non-staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, nonstaff=1)]),
+ 0
+ ),
+ # A message with a duckpond duck reaction from a staffer should return 1
+ (
+ "Duckpond Duck Reaction from staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, staff=1)]),
+ 1
+ ),
+ # A message with a duckpond duck reaction from a non-staffer and staffer should return 1
+ (
+ "Duckpond Duck Reaction from staffer + non-staffer",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, staff=1, nonstaff=1)]),
+ 1
+ ),
+
+ # Complex test cases
+ # A message with duckpond duck reactions from 3 staffers and 2 non-staffers returns 3
+ (
+ "Duckpond Duck Reaction from 3 staffers + 2 non-staffers",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, staff=3, nonstaff=2)]),
+ 3
+ ),
+ # A staffer with multiple duck reactions only counts once
+ (
+ "Two different duck reactions from the same staffer",
+ helpers.MockMessage(
+ reactions=[
+ helpers.MockReaction(emoji=self.duck_pond_emoji, users=[self.staff_member]),
+ helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.staff_member]),
+ ]
+ ),
+ 1
+ ),
+ # A non-string emoji does not count (to test the `isinstance(reaction.emoji, str)` elif)
+ (
+ "Reaction with non-Emoji/str emoij from 3 staffers + 2 non-staffers",
+ helpers.MockMessage(reactions=[self._get_reaction(emoji=100, staff=3, nonstaff=2)]),
+ 0
+ ),
+ # We correctly sum when multiple reactions are provided.
+ (
+ "Duckpond Duck Reaction from 3 staffers + 2 non-staffers",
+ helpers.MockMessage(
+ reactions=[
+ self._get_reaction(emoji=self.duck_pond_emoji, staff=3, nonstaff=2),
+ self._get_reaction(emoji=self.unicode_duck_emoji, staff=4, nonstaff=9),
+ ]
+ ),
+ 3 + 4
+ ),
+ )
+
+ for description, message, expected_count in test_cases:
+ actual_count = await self.cog.count_ducks(message)
+ with self.subTest(test_case=description, expected_count=expected_count, actual_count=actual_count):
+ self.assertEqual(expected_count, actual_count)
+
+ @helpers.async_test
+ async def test_relay_message_correctly_relays_content_and_attachments(self):
+ """The `relay_message` method should correctly relay message content and attachments."""
+ send_webhook_path = f"{MODULE_PATH}.DuckPond.send_webhook"
+ send_attachments_path = f"{MODULE_PATH}.send_attachments"
+
+ self.cog.webhook = helpers.MockAsyncWebhook()
+
+ test_values = (
+ (helpers.MockMessage(clean_content="", attachments=[]), False, False),
+ (helpers.MockMessage(clean_content="message", attachments=[]), True, False),
+ (helpers.MockMessage(clean_content="", attachments=["attachment"]), False, True),
+ (helpers.MockMessage(clean_content="message", attachments=["attachment"]), True, True),
+ )
+
+ for message, expect_webhook_call, expect_attachment_call in test_values:
+ with patch(send_webhook_path, new_callable=helpers.AsyncMock) as send_webhook:
+ with patch(send_attachments_path, new_callable=helpers.AsyncMock) as send_attachments:
+ with self.subTest(clean_content=message.clean_content, attachments=message.attachments):
+ await self.cog.relay_message(message)
+
+ self.assertEqual(expect_webhook_call, send_webhook.called)
+ self.assertEqual(expect_attachment_call, send_attachments.called)
+
+ message.add_reaction.assert_called_once_with(self.checkmark_emoji)
+
+ @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock)
+ @helpers.async_test
+ async def test_relay_message_handles_irretrievable_attachment_exceptions(self, send_attachments):
+ """The `relay_message` method should handle irretrievable attachments."""
+ message = helpers.MockMessage(clean_content="message", attachments=["attachment"])
+ side_effects = (discord.errors.Forbidden(MagicMock(), ""), discord.errors.NotFound(MagicMock(), ""))
+
+ self.cog.webhook = helpers.MockAsyncWebhook()
+ log = logging.getLogger("bot.cogs.duck_pond")
+
+ for side_effect in side_effects:
+ send_attachments.side_effect = side_effect
+ with patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=helpers.AsyncMock) as send_webhook:
+ with self.subTest(side_effect=type(side_effect).__name__):
+ with self.assertNotLogs(logger=log, level=logging.ERROR):
+ await self.cog.relay_message(message)
+
+ self.assertEqual(send_webhook.call_count, 2)
+
+ @patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=helpers.AsyncMock)
+ @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock)
+ @helpers.async_test
+ async def test_relay_message_handles_attachment_http_error(self, send_attachments, send_webhook):
+ """The `relay_message` method should handle irretrievable attachments."""
+ message = helpers.MockMessage(clean_content="message", attachments=["attachment"])
+
+ self.cog.webhook = helpers.MockAsyncWebhook()
+ log = logging.getLogger("bot.cogs.duck_pond")
+
+ side_effect = discord.HTTPException(MagicMock(), "")
+ send_attachments.side_effect = side_effect
+ with self.subTest(side_effect=type(side_effect).__name__):
+ with self.assertLogs(logger=log, level=logging.ERROR) as log_watcher:
+ await self.cog.relay_message(message)
+
+ send_webhook.assert_called_once_with(
+ content=message.clean_content,
+ username=message.author.display_name,
+ avatar_url=message.author.avatar_url
+ )
+
+ self.assertEqual(len(log_watcher.records), 1)
+
+ record = log_watcher.records[0]
+ self.assertEqual(record.levelno, logging.ERROR)
+
+ def _mock_payload(self, label: str, is_custom_emoji: bool, id_: int, emoji_name: str):
+ """Creates a mock `on_raw_reaction_add` payload with the specified emoji data."""
+ payload = MagicMock(name=label)
+ payload.emoji.is_custom_emoji.return_value = is_custom_emoji
+ payload.emoji.id = id_
+ payload.emoji.name = emoji_name
+ return payload
+
+ @helpers.async_test
+ async def test_payload_has_duckpond_emoji_correctly_detects_relevant_emojis(self):
+ """The `on_raw_reaction_add` event handler should ignore irrelevant emojis."""
+ test_values = (
+ # Custom Emojis
+ (
+ self._mock_payload(
+ label="Custom Duckpond Emoji",
+ is_custom_emoji=True,
+ id_=constants.DuckPond.custom_emojis[0],
+ emoji_name=""
+ ),
+ True
+ ),
+ (
+ self._mock_payload(
+ label="Custom Non-Duckpond Emoji",
+ is_custom_emoji=True,
+ id_=123,
+ emoji_name=""
+ ),
+ False
+ ),
+ # Unicode Emojis
+ (
+ self._mock_payload(
+ label="Unicode Duck Emoji",
+ is_custom_emoji=False,
+ id_=1,
+ emoji_name=self.unicode_duck_emoji
+ ),
+ True
+ ),
+ (
+ self._mock_payload(
+ label="Unicode Non-Duck Emoji",
+ is_custom_emoji=False,
+ id_=1,
+ emoji_name=self.thumbs_up_emoji
+ ),
+ False
+ ),
+ )
+
+ for payload, expected_return in test_values:
+ actual_return = self.cog._payload_has_duckpond_emoji(payload)
+ with self.subTest(case=payload._mock_name, expected_return=expected_return, actual_return=actual_return):
+ self.assertEqual(expected_return, actual_return)
+
+ @patch(f"{MODULE_PATH}.discord.utils.get")
+ @patch(f"{MODULE_PATH}.DuckPond._payload_has_duckpond_emoji", new=MagicMock(return_value=False))
+ def test_on_raw_reaction_add_returns_early_with_payload_without_duck_emoji(self, utils_get):
+ """The `on_raw_reaction_add` method should return early if the payload does not contain a duck emoji."""
+ self.assertIsNone(asyncio.run(self.cog.on_raw_reaction_add(payload=MagicMock())))
+
+ # Ensure we've returned before making an unnecessary API call in the lines of code after the emoji check
+ utils_get.assert_not_called()
+
+ def _raw_reaction_mocks(self, channel_id, message_id, user_id):
+ """Sets up mocks for tests of the `on_raw_reaction_add` event listener."""
+ channel = helpers.MockTextChannel(id=channel_id)
+ self.bot.get_all_channels.return_value = (channel,)
+
+ message = helpers.MockMessage(id=message_id)
+
+ channel.fetch_message.return_value = message
+
+ member = helpers.MockMember(id=user_id, roles=[self.staff_role])
+ message.guild.members = (member,)
+
+ payload = MagicMock(channel_id=channel_id, message_id=message_id, user_id=user_id)
+
+ return channel, message, member, payload
+
+ @helpers.async_test
+ async def test_on_raw_reaction_add_returns_for_bot_and_non_staff_members(self):
+ """The `on_raw_reaction_add` event handler should return for bot users or non-staff members."""
+ channel_id = 1234
+ message_id = 2345
+ user_id = 3456
+
+ channel, message, _, payload = self._raw_reaction_mocks(channel_id, message_id, user_id)
+
+ test_cases = (
+ ("non-staff member", helpers.MockMember(id=user_id)),
+ ("bot staff member", helpers.MockMember(id=user_id, roles=[self.staff_role], bot=True)),
+ )
+
+ payload.emoji = self.duck_pond_emoji
+
+ for description, member in test_cases:
+ message.guild.members = (member, )
+ with self.subTest(test_case=description), patch(f"{MODULE_PATH}.DuckPond.has_green_checkmark") as checkmark:
+ checkmark.side_effect = AssertionError(
+ "Expected method to return before calling `self.has_green_checkmark`."
+ )
+ self.assertIsNone(await self.cog.on_raw_reaction_add(payload))
+
+ # Check that we did make it past the payload checks
+ channel.fetch_message.assert_called_once()
+ channel.fetch_message.reset_mock()
+
+ @patch(f"{MODULE_PATH}.DuckPond.is_staff")
+ @patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock)
+ def test_on_raw_reaction_add_returns_on_message_with_green_checkmark_placed_by_bot(self, count_ducks, is_staff):
+ """The `on_raw_reaction_add` event should return when the message has a green check mark placed by the bot."""
+ channel_id = 31415926535
+ message_id = 27182818284
+ user_id = 16180339887
+
+ channel, message, member, payload = self._raw_reaction_mocks(channel_id, message_id, user_id)
+
+ payload.emoji = helpers.MockPartialEmoji(name=self.unicode_duck_emoji)
+ payload.emoji.is_custom_emoji.return_value = False
+
+ message.reactions = [helpers.MockReaction(emoji=self.checkmark_emoji, users=[self.bot.user])]
+
+ is_staff.return_value = True
+ count_ducks.side_effect = AssertionError("Expected method to return before calling `self.count_ducks`")
+
+ self.assertIsNone(asyncio.run(self.cog.on_raw_reaction_add(payload)))
+
+ # Assert that we've made it past `self.is_staff`
+ is_staff.assert_called_once()
+
+ @helpers.async_test
+ async def test_on_raw_reaction_add_does_not_relay_below_duck_threshold(self):
+ """The `on_raw_reaction_add` listener should not relay messages or attachments below the duck threshold."""
+ test_cases = (
+ (constants.DuckPond.threshold - 1, False),
+ (constants.DuckPond.threshold, True),
+ (constants.DuckPond.threshold + 1, True),
+ )
+
+ channel, message, member, payload = self._raw_reaction_mocks(channel_id=3, message_id=4, user_id=5)
+
+ payload.emoji = self.duck_pond_emoji
+
+ for duck_count, should_relay in test_cases:
+ with patch(f"{MODULE_PATH}.DuckPond.relay_message", new_callable=helpers.AsyncMock) as relay_message:
+ with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) as count_ducks:
+ count_ducks.return_value = duck_count
+ with self.subTest(duck_count=duck_count, should_relay=should_relay):
+ await self.cog.on_raw_reaction_add(payload)
+
+ # Confirm that we've made it past counting
+ count_ducks.assert_called_once()
+
+ # Did we relay a message?
+ has_relayed = relay_message.called
+ self.assertEqual(has_relayed, should_relay)
+
+ if should_relay:
+ relay_message.assert_called_once_with(message)
+
+ @helpers.async_test
+ async def test_on_raw_reaction_remove_prevents_removal_of_green_checkmark_depending_on_the_duck_count(self):
+ """The `on_raw_reaction_remove` listener prevents removal of the check mark on messages with enough ducks."""
+ checkmark = helpers.MockPartialEmoji(name=self.checkmark_emoji)
+
+ message = helpers.MockMessage(id=1234)
+
+ channel = helpers.MockTextChannel(id=98765)
+ channel.fetch_message.return_value = message
+
+ self.bot.get_all_channels.return_value = (channel, )
+
+ payload = MagicMock(channel_id=channel.id, message_id=message.id, emoji=checkmark)
+
+ test_cases = (
+ (constants.DuckPond.threshold - 1, False),
+ (constants.DuckPond.threshold, True),
+ (constants.DuckPond.threshold + 1, True),
+ )
+ for duck_count, should_re_add_checkmark in test_cases:
+ with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) as count_ducks:
+ count_ducks.return_value = duck_count
+ with self.subTest(duck_count=duck_count, should_re_add_checkmark=should_re_add_checkmark):
+ await self.cog.on_raw_reaction_remove(payload)
+
+ # Check if we fetched the message
+ channel.fetch_message.assert_called_once_with(message.id)
+
+ # Check if we actually counted the number of ducks
+ count_ducks.assert_called_once_with(message)
+
+ has_re_added_checkmark = message.add_reaction.called
+ self.assertEqual(should_re_add_checkmark, has_re_added_checkmark)
+
+ if should_re_add_checkmark:
+ message.add_reaction.assert_called_once_with(self.checkmark_emoji)
+ message.add_reaction.reset_mock()
+
+ # reset mocks
+ channel.fetch_message.reset_mock()
+ message.reset_mock()
+
+ def test_on_raw_reaction_remove_ignores_removal_of_non_checkmark_reactions(self):
+ """The `on_raw_reaction_remove` listener should ignore the removal of non-check mark emojis."""
+ channel = helpers.MockTextChannel(id=98765)
+
+ channel.fetch_message.side_effect = AssertionError(
+ "Expected method to return before calling `channel.fetch_message`"
+ )
+
+ self.bot.get_all_channels.return_value = (channel, )
+
+ payload = MagicMock(emoji=helpers.MockPartialEmoji(name=self.thumbs_up_emoji), channel_id=channel.id)
+
+ self.assertIsNone(asyncio.run(self.cog.on_raw_reaction_remove(payload)))
+
+ channel.fetch_message.assert_not_called()
+
+
+class DuckPondSetupTests(unittest.TestCase):
+ """Tests setup of the `DuckPond` cog."""
+
+ def test_setup(self):
+ """Setup of the extension should call add_cog."""
+ bot = helpers.MockBot()
+ duck_pond.setup(bot)
+ bot.add_cog.assert_called_once()
diff --git a/tests/bot/cogs/test_information.py b/tests/bot/cogs/test_information.py
index 4496a2ae0..deae7ebad 100644
--- a/tests/bot/cogs/test_information.py
+++ b/tests/bot/cogs/test_information.py
@@ -125,10 +125,10 @@ class InformationCogTests(unittest.TestCase):
)
],
members=[
- *(helpers.MockMember(status='online') for _ in range(2)),
- *(helpers.MockMember(status='idle') for _ in range(1)),
- *(helpers.MockMember(status='dnd') for _ in range(4)),
- *(helpers.MockMember(status='offline') for _ in range(3)),
+ *(helpers.MockMember(status=discord.Status.online) for _ in range(2)),
+ *(helpers.MockMember(status=discord.Status.idle) for _ in range(1)),
+ *(helpers.MockMember(status=discord.Status.dnd) for _ in range(4)),
+ *(helpers.MockMember(status=discord.Status.offline) for _ in range(3)),
],
member_count=1_234,
icon_url='a-lemon.jpg',
@@ -153,9 +153,9 @@ class InformationCogTests(unittest.TestCase):
**Counts**
Members: {self.ctx.guild.member_count:,}
Roles: {len(self.ctx.guild.roles)}
- Text: 1
- Voice: 1
- Channel categories: 1
+ Category channels: 1
+ Text channels: 1
+ Voice channels: 1
**Members**
{constants.Emojis.status_online} 2
diff --git a/tests/bot/cogs/test_security.py b/tests/bot/cogs/test_security.py
index efa7a50b1..9d1a62f7e 100644
--- a/tests/bot/cogs/test_security.py
+++ b/tests/bot/cogs/test_security.py
@@ -1,4 +1,3 @@
-import logging
import unittest
from unittest.mock import MagicMock
@@ -49,11 +48,7 @@ class SecurityCogLoadTests(unittest.TestCase):
"""Tests loading the `Security` cog."""
def test_security_cog_load(self):
- """Cog loading logs a message at `INFO` level."""
+ """Setup of the extension should call add_cog."""
bot = MagicMock()
- with self.assertLogs(logger='bot.cogs.security', level=logging.INFO) as cm:
- security.setup(bot)
- bot.add_cog.assert_called_once()
-
- [line] = cm.output
- self.assertIn("Cog loaded: Security", line)
+ security.setup(bot)
+ bot.add_cog.assert_called_once()
diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py
index 3276cf5a5..a54b839d7 100644
--- a/tests/bot/cogs/test_token_remover.py
+++ b/tests/bot/cogs/test_token_remover.py
@@ -125,11 +125,7 @@ class TokenRemoverSetupTests(unittest.TestCase):
"""Tests setup of the `TokenRemover` cog."""
def test_setup(self):
- """Setup of the cog should log a message at `INFO` level."""
+ """Setup of the extension should call add_cog."""
bot = MockBot()
- with self.assertLogs(logger='bot.cogs.token_remover', level=logging.INFO) as cm:
- setup_cog(bot)
-
- [line] = cm.output
+ setup_cog(bot)
bot.add_cog.assert_called_once()
- self.assertIn("Cog loaded: TokenRemover", line)
diff --git a/tests/bot/rules/__init__.py b/tests/bot/rules/__init__.py
index e69de29bb..36c986fe1 100644
--- a/tests/bot/rules/__init__.py
+++ b/tests/bot/rules/__init__.py
@@ -0,0 +1,76 @@
+import unittest
+from abc import ABCMeta, abstractmethod
+from typing import Callable, Dict, Iterable, List, NamedTuple, Tuple
+
+from tests.helpers import MockMessage
+
+
+class DisallowedCase(NamedTuple):
+ """Encapsulation for test cases expected to fail."""
+ recent_messages: List[MockMessage]
+ culprits: Iterable[str]
+ n_violations: int
+
+
+class RuleTest(unittest.TestCase, metaclass=ABCMeta):
+ """
+ Abstract class for antispam rule test cases.
+
+ Tests for specific rules should inherit from `RuleTest` and implement
+ `relevant_messages` and `get_report`. Each instance should also set the
+ `apply` and `config` attributes as necessary.
+
+ The execution of test cases can then be delegated to the `run_allowed`
+ and `run_disallowed` methods.
+ """
+
+ apply: Callable # The tested rule's apply function
+ config: Dict[str, int]
+
+ async def run_allowed(self, cases: Tuple[List[MockMessage], ...]) -> None:
+ """Run all `cases` against `self.apply` expecting them to pass."""
+ for recent_messages in cases:
+ last_message = recent_messages[0]
+
+ with self.subTest(
+ last_message=last_message,
+ recent_messages=recent_messages,
+ config=self.config,
+ ):
+ self.assertIsNone(
+ await self.apply(last_message, recent_messages, self.config)
+ )
+
+ async def run_disallowed(self, cases: Tuple[DisallowedCase, ...]) -> None:
+ """Run all `cases` against `self.apply` expecting them to fail."""
+ for case in cases:
+ recent_messages, culprits, n_violations = case
+ last_message = recent_messages[0]
+ relevant_messages = self.relevant_messages(case)
+ desired_output = (
+ self.get_report(case),
+ culprits,
+ relevant_messages,
+ )
+
+ with self.subTest(
+ last_message=last_message,
+ recent_messages=recent_messages,
+ relevant_messages=relevant_messages,
+ n_violations=n_violations,
+ config=self.config,
+ ):
+ self.assertTupleEqual(
+ await self.apply(last_message, recent_messages, self.config),
+ desired_output,
+ )
+
+ @abstractmethod
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ """Give expected relevant messages for `case`."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def get_report(self, case: DisallowedCase) -> str:
+ """Give expected error report for `case`."""
+ raise NotImplementedError
diff --git a/tests/bot/rules/test_attachments.py b/tests/bot/rules/test_attachments.py
index 4bb0acf7c..e54b4b5b8 100644
--- a/tests/bot/rules/test_attachments.py
+++ b/tests/bot/rules/test_attachments.py
@@ -1,52 +1,71 @@
-import asyncio
-import unittest
-from dataclasses import dataclass
-from typing import Any, List
+from typing import Iterable
from bot.rules import attachments
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
-# Using `MagicMock` sadly doesn't work for this usecase
-# since it's __eq__ compares the MagicMock's ID. We just
-# want to compare the actual attributes we set.
-@dataclass
-class FakeMessage:
- author: str
- attachments: List[Any]
+def make_msg(author: str, total_attachments: int) -> MockMessage:
+ """Builds a message with `total_attachments` attachments."""
+ return MockMessage(author=author, attachments=list(range(total_attachments)))
-def msg(total_attachments: int) -> FakeMessage:
- return FakeMessage(author='lemon', attachments=list(range(total_attachments)))
+class AttachmentRuleTests(RuleTest):
+ """Tests applying the `attachments` antispam rule."""
+ def setUp(self):
+ self.apply = attachments.apply
+ self.config = {"max": 5, "interval": 10}
-class AttachmentRuleTests(unittest.TestCase):
- """Tests applying the `attachment` antispam rule."""
-
- def test_allows_messages_without_too_many_attachments(self):
+ @async_test
+ async def test_allows_messages_without_too_many_attachments(self):
"""Messages without too many attachments are allowed as-is."""
cases = (
- (msg(0), msg(0), msg(0)),
- (msg(2), msg(2)),
- (msg(0),),
+ [make_msg("bob", 0), make_msg("bob", 0), make_msg("bob", 0)],
+ [make_msg("bob", 2), make_msg("bob", 2)],
+ [make_msg("bob", 2), make_msg("alice", 2), make_msg("bob", 2)],
)
- for last_message, *recent_messages in cases:
- with self.subTest(last_message=last_message, recent_messages=recent_messages):
- coro = attachments.apply(last_message, recent_messages, {'max': 5})
- self.assertIsNone(asyncio.run(coro))
+ await self.run_allowed(cases)
- def test_disallows_messages_with_too_many_attachments(self):
+ @async_test
+ async def test_disallows_messages_with_too_many_attachments(self):
"""Messages with too many attachments trigger the rule."""
cases = (
- ((msg(4), msg(0), msg(6)), [msg(4), msg(6)], 10),
- ((msg(6),), [msg(6)], 6),
- ((msg(1),) * 6, [msg(1)] * 6, 6),
+ DisallowedCase(
+ [make_msg("bob", 4), make_msg("bob", 0), make_msg("bob", 6)],
+ ("bob",),
+ 10,
+ ),
+ DisallowedCase(
+ [make_msg("bob", 4), make_msg("alice", 6), make_msg("bob", 2)],
+ ("bob",),
+ 6,
+ ),
+ DisallowedCase(
+ [make_msg("alice", 6)],
+ ("alice",),
+ 6,
+ ),
+ DisallowedCase(
+ [make_msg("alice", 1) for _ in range(6)],
+ ("alice",),
+ 6,
+ ),
)
- for messages, relevant_messages, total in cases:
- with self.subTest(messages=messages, relevant_messages=relevant_messages, total=total):
- last_message, *recent_messages = messages
- coro = attachments.apply(last_message, recent_messages, {'max': 5})
- self.assertEqual(
- asyncio.run(coro),
- (f"sent {total} attachments in 5s", ('lemon',), relevant_messages)
- )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_message = case.recent_messages[0]
+ return tuple(
+ msg
+ for msg in case.recent_messages
+ if (
+ msg.author == last_message.author
+ and len(msg.attachments) > 0
+ )
+ )
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} attachments in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_burst.py b/tests/bot/rules/test_burst.py
new file mode 100644
index 000000000..72f0be0c7
--- /dev/null
+++ b/tests/bot/rules/test_burst.py
@@ -0,0 +1,56 @@
+from typing import Iterable
+
+from bot.rules import burst
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+
+def make_msg(author: str) -> MockMessage:
+ """
+ Init a MockMessage instance with author set to `author`.
+
+ This serves as a shorthand / alias to keep the test cases visually clean.
+ """
+ return MockMessage(author=author)
+
+
+class BurstRuleTests(RuleTest):
+ """Tests the `burst` antispam rule."""
+
+ def setUp(self):
+ self.apply = burst.apply
+ self.config = {"max": 2, "interval": 10}
+
+ @async_test
+ async def test_allows_messages_within_limit(self):
+ """Cases which do not violate the rule."""
+ cases = (
+ [make_msg("bob"), make_msg("bob")],
+ [make_msg("bob"), make_msg("alice"), make_msg("bob")],
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_disallows_messages_beyond_limit(self):
+ """Cases where the amount of messages exceeds the limit, triggering the rule."""
+ cases = (
+ DisallowedCase(
+ [make_msg("bob"), make_msg("bob"), make_msg("bob")],
+ ("bob",),
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("bob"), make_msg("bob"), make_msg("alice"), make_msg("bob")],
+ ("bob",),
+ 3,
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ return tuple(msg for msg in case.recent_messages if msg.author in case.culprits)
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} messages in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_burst_shared.py b/tests/bot/rules/test_burst_shared.py
new file mode 100644
index 000000000..47367a5f8
--- /dev/null
+++ b/tests/bot/rules/test_burst_shared.py
@@ -0,0 +1,59 @@
+from typing import Iterable
+
+from bot.rules import burst_shared
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+
+def make_msg(author: str) -> MockMessage:
+ """
+ Init a MockMessage instance with the passed arg.
+
+ This serves as a shorthand / alias to keep the test cases visually clean.
+ """
+ return MockMessage(author=author)
+
+
+class BurstSharedRuleTests(RuleTest):
+ """Tests the `burst_shared` antispam rule."""
+
+ def setUp(self):
+ self.apply = burst_shared.apply
+ self.config = {"max": 2, "interval": 10}
+
+ @async_test
+ async def test_allows_messages_within_limit(self):
+ """
+ Cases that do not violate the rule.
+
+ There really isn't more to test here than a single case.
+ """
+ cases = (
+ [make_msg("spongebob"), make_msg("patrick")],
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_disallows_messages_beyond_limit(self):
+ """Cases where the amount of messages exceeds the limit, triggering the rule."""
+ cases = (
+ DisallowedCase(
+ [make_msg("bob"), make_msg("bob"), make_msg("bob")],
+ {"bob"},
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("bob"), make_msg("bob"), make_msg("alice"), make_msg("bob")],
+ {"bob", "alice"},
+ 4,
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ return case.recent_messages
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} messages in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_chars.py b/tests/bot/rules/test_chars.py
new file mode 100644
index 000000000..7cc36f49e
--- /dev/null
+++ b/tests/bot/rules/test_chars.py
@@ -0,0 +1,66 @@
+from typing import Iterable
+
+from bot.rules import chars
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+
+def make_msg(author: str, n_chars: int) -> MockMessage:
+ """Build a message with arbitrary content of `n_chars` length."""
+ return MockMessage(author=author, content="A" * n_chars)
+
+
+class CharsRuleTests(RuleTest):
+ """Tests the `chars` antispam rule."""
+
+ def setUp(self):
+ self.apply = chars.apply
+ self.config = {
+ "max": 20, # Max allowed sum of chars per user
+ "interval": 10,
+ }
+
+ @async_test
+ async def test_allows_messages_within_limit(self):
+ """Cases with a total amount of chars within limit."""
+ cases = (
+ [make_msg("bob", 0)],
+ [make_msg("bob", 20)],
+ [make_msg("bob", 15), make_msg("alice", 15)],
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_disallows_messages_beyond_limit(self):
+ """Cases where the total amount of chars exceeds the limit, triggering the rule."""
+ cases = (
+ DisallowedCase(
+ [make_msg("bob", 21)],
+ ("bob",),
+ 21,
+ ),
+ DisallowedCase(
+ [make_msg("bob", 15), make_msg("bob", 15)],
+ ("bob",),
+ 30,
+ ),
+ DisallowedCase(
+ [make_msg("alice", 15), make_msg("bob", 20), make_msg("alice", 15)],
+ ("alice",),
+ 30,
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_message = case.recent_messages[0]
+ return tuple(
+ msg
+ for msg in case.recent_messages
+ if msg.author == last_message.author
+ )
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} characters in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_discord_emojis.py b/tests/bot/rules/test_discord_emojis.py
new file mode 100644
index 000000000..0239b0b00
--- /dev/null
+++ b/tests/bot/rules/test_discord_emojis.py
@@ -0,0 +1,54 @@
+from typing import Iterable
+
+from bot.rules import discord_emojis
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+discord_emoji = "<:abcd:1234>" # Discord emojis follow the format <:name:id>
+
+
+def make_msg(author: str, n_emojis: int) -> MockMessage:
+ """Build a MockMessage instance with content containing `n_emojis` arbitrary emojis."""
+ return MockMessage(author=author, content=discord_emoji * n_emojis)
+
+
+class DiscordEmojisRuleTests(RuleTest):
+ """Tests for the `discord_emojis` antispam rule."""
+
+ def setUp(self):
+ self.apply = discord_emojis.apply
+ self.config = {"max": 2, "interval": 10}
+
+ @async_test
+ async def test_allows_messages_within_limit(self):
+ """Cases with a total amount of discord emojis within limit."""
+ cases = (
+ [make_msg("bob", 2)],
+ [make_msg("alice", 1), make_msg("bob", 2), make_msg("alice", 1)],
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_disallows_messages_beyond_limit(self):
+ """Cases with more than the allowed amount of discord emojis."""
+ cases = (
+ DisallowedCase(
+ [make_msg("bob", 3)],
+ ("bob",),
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("alice", 2), make_msg("bob", 2), make_msg("alice", 2)],
+ ("alice",),
+ 4,
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ return tuple(msg for msg in case.recent_messages if msg.author in case.culprits)
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} emojis in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_duplicates.py b/tests/bot/rules/test_duplicates.py
new file mode 100644
index 000000000..59e0fb6ef
--- /dev/null
+++ b/tests/bot/rules/test_duplicates.py
@@ -0,0 +1,66 @@
+from typing import Iterable
+
+from bot.rules import duplicates
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+
+def make_msg(author: str, content: str) -> MockMessage:
+ """Give a MockMessage instance with `author` and `content` attrs."""
+ return MockMessage(author=author, content=content)
+
+
+class DuplicatesRuleTests(RuleTest):
+ """Tests the `duplicates` antispam rule."""
+
+ def setUp(self):
+ self.apply = duplicates.apply
+ self.config = {"max": 2, "interval": 10}
+
+ @async_test
+ async def test_allows_messages_within_limit(self):
+ """Cases which do not violate the rule."""
+ cases = (
+ [make_msg("alice", "A"), make_msg("alice", "A")],
+ [make_msg("alice", "A"), make_msg("alice", "B"), make_msg("alice", "C")], # Non-duplicate
+ [make_msg("alice", "A"), make_msg("bob", "A"), make_msg("alice", "A")], # Different author
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_disallows_messages_beyond_limit(self):
+ """Cases with too many duplicate messages from the same author."""
+ cases = (
+ DisallowedCase(
+ [make_msg("alice", "A"), make_msg("alice", "A"), make_msg("alice", "A")],
+ ("alice",),
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("bob", "A"), make_msg("alice", "A"), make_msg("bob", "A"), make_msg("bob", "A")],
+ ("bob",),
+ 3, # 4 duplicate messages, but only 3 from bob
+ ),
+ DisallowedCase(
+ [make_msg("bob", "A"), make_msg("bob", "B"), make_msg("bob", "A"), make_msg("bob", "A")],
+ ("bob",),
+ 3, # 4 message from bob, but only 3 duplicates
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_message = case.recent_messages[0]
+ return tuple(
+ msg
+ for msg in case.recent_messages
+ if (
+ msg.author == last_message.author
+ and msg.content == last_message.content
+ )
+ )
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} duplicated messages in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_links.py b/tests/bot/rules/test_links.py
index be832843b..3c3f90e5f 100644
--- a/tests/bot/rules/test_links.py
+++ b/tests/bot/rules/test_links.py
@@ -1,32 +1,21 @@
-import unittest
-from typing import List, NamedTuple, Tuple
+from typing import Iterable
from bot.rules import links
-from tests.helpers import async_test
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
-class FakeMessage(NamedTuple):
- author: str
- content: str
-
-
-class Case(NamedTuple):
- recent_messages: List[FakeMessage]
- relevant_messages: Tuple[FakeMessage]
- culprit: Tuple[str]
- total_links: int
-
-
-def msg(author: str, total_links: int) -> FakeMessage:
- """Makes a message with *total_links* links."""
+def make_msg(author: str, total_links: int) -> MockMessage:
+ """Makes a message with `total_links` links."""
content = " ".join(["https://pydis.com"] * total_links)
- return FakeMessage(author=author, content=content)
+ return MockMessage(author=author, content=content)
-class LinksTests(unittest.TestCase):
+class LinksTests(RuleTest):
"""Tests applying the `links` rule."""
def setUp(self):
+ self.apply = links.apply
self.config = {
"max": 2,
"interval": 10
@@ -36,66 +25,45 @@ class LinksTests(unittest.TestCase):
async def test_links_within_limit(self):
"""Messages with an allowed amount of links."""
cases = (
- [msg("bob", 0)],
- [msg("bob", 2)],
- [msg("bob", 3)], # Filter only applies if len(messages_with_links) > 1
- [msg("bob", 1), msg("bob", 1)],
- [msg("bob", 2), msg("alice", 2)] # Only messages from latest author count
+ [make_msg("bob", 0)],
+ [make_msg("bob", 2)],
+ [make_msg("bob", 3)], # Filter only applies if len(messages_with_links) > 1
+ [make_msg("bob", 1), make_msg("bob", 1)],
+ [make_msg("bob", 2), make_msg("alice", 2)] # Only messages from latest author count
)
- for recent_messages in cases:
- last_message = recent_messages[0]
-
- with self.subTest(
- last_message=last_message,
- recent_messages=recent_messages,
- config=self.config
- ):
- self.assertIsNone(
- await links.apply(last_message, recent_messages, self.config)
- )
+ await self.run_allowed(cases)
@async_test
async def test_links_exceeding_limit(self):
"""Messages with a a higher than allowed amount of links."""
cases = (
- Case(
- [msg("bob", 1), msg("bob", 2)],
- (msg("bob", 1), msg("bob", 2)),
+ DisallowedCase(
+ [make_msg("bob", 1), make_msg("bob", 2)],
("bob",),
3
),
- Case(
- [msg("alice", 1), msg("alice", 1), msg("alice", 1)],
- (msg("alice", 1), msg("alice", 1), msg("alice", 1)),
+ DisallowedCase(
+ [make_msg("alice", 1), make_msg("alice", 1), make_msg("alice", 1)],
("alice",),
3
),
- Case(
- [msg("alice", 2), msg("bob", 3), msg("alice", 1)],
- (msg("alice", 2), msg("alice", 1)),
+ DisallowedCase(
+ [make_msg("alice", 2), make_msg("bob", 3), make_msg("alice", 1)],
("alice",),
3
)
)
- for recent_messages, relevant_messages, culprit, total_links in cases:
- last_message = recent_messages[0]
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_message = case.recent_messages[0]
+ return tuple(
+ msg
+ for msg in case.recent_messages
+ if msg.author == last_message.author
+ )
- with self.subTest(
- last_message=last_message,
- recent_messages=recent_messages,
- relevant_messages=relevant_messages,
- culprit=culprit,
- total_links=total_links,
- config=self.config
- ):
- desired_output = (
- f"sent {total_links} links in {self.config['interval']}s",
- culprit,
- relevant_messages
- )
- self.assertTupleEqual(
- await links.apply(last_message, recent_messages, self.config),
- desired_output
- )
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} links in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_mentions.py b/tests/bot/rules/test_mentions.py
new file mode 100644
index 000000000..ebcdabac6
--- /dev/null
+++ b/tests/bot/rules/test_mentions.py
@@ -0,0 +1,67 @@
+from typing import Iterable
+
+from bot.rules import mentions
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+
+def make_msg(author: str, total_mentions: int) -> MockMessage:
+ """Makes a message with `total_mentions` mentions."""
+ return MockMessage(author=author, mentions=list(range(total_mentions)))
+
+
+class TestMentions(RuleTest):
+ """Tests applying the `mentions` antispam rule."""
+
+ def setUp(self):
+ self.apply = mentions.apply
+ self.config = {
+ "max": 2,
+ "interval": 10,
+ }
+
+ @async_test
+ async def test_mentions_within_limit(self):
+ """Messages with an allowed amount of mentions."""
+ cases = (
+ [make_msg("bob", 0)],
+ [make_msg("bob", 2)],
+ [make_msg("bob", 1), make_msg("bob", 1)],
+ [make_msg("bob", 1), make_msg("alice", 2)],
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_mentions_exceeding_limit(self):
+ """Messages with a higher than allowed amount of mentions."""
+ cases = (
+ DisallowedCase(
+ [make_msg("bob", 3)],
+ ("bob",),
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("alice", 2), make_msg("alice", 0), make_msg("alice", 1)],
+ ("alice",),
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("bob", 2), make_msg("alice", 3), make_msg("bob", 2)],
+ ("bob",),
+ 4,
+ )
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_message = case.recent_messages[0]
+ return tuple(
+ msg
+ for msg in case.recent_messages
+ if msg.author == last_message.author
+ )
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} mentions in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_newlines.py b/tests/bot/rules/test_newlines.py
new file mode 100644
index 000000000..d61c4609d
--- /dev/null
+++ b/tests/bot/rules/test_newlines.py
@@ -0,0 +1,105 @@
+from typing import Iterable, List
+
+from bot.rules import newlines
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+
+def make_msg(author: str, newline_groups: List[int]) -> MockMessage:
+ """Init a MockMessage instance with `author` and content configured by `newline_groups".
+
+ Configure content by passing a list of ints, where each int `n` will generate
+ a separate group of `n` newlines.
+
+ Example:
+ newline_groups=[3, 1, 2] -> content="\n\n\n \n \n\n"
+ """
+ content = " ".join("\n" * n for n in newline_groups)
+ return MockMessage(author=author, content=content)
+
+
+class TotalNewlinesRuleTests(RuleTest):
+ """Tests the `newlines` antispam rule against allowed cases and total newline count violations."""
+
+ def setUp(self):
+ self.apply = newlines.apply
+ self.config = {
+ "max": 5, # Max sum of newlines in relevant messages
+ "max_consecutive": 3, # Max newlines in one group, in one message
+ "interval": 10,
+ }
+
+ @async_test
+ async def test_allows_messages_within_limit(self):
+ """Cases which do not violate the rule."""
+ cases = (
+ [make_msg("alice", [])], # Single message with no newlines
+ [make_msg("alice", [1, 2]), make_msg("alice", [1, 1])], # 5 newlines in 2 messages
+ [make_msg("alice", [2, 2, 1]), make_msg("bob", [2, 3])], # 5 newlines from each author
+ [make_msg("bob", [1]), make_msg("alice", [5])], # Alice breaks the rule, but only bob is relevant
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_disallows_messages_total(self):
+ """Cases which violate the rule by having too many newlines in total."""
+ cases = (
+ DisallowedCase( # Alice sends a total of 6 newlines (disallowed)
+ [make_msg("alice", [2, 2]), make_msg("alice", [2])],
+ ("alice",),
+ 6,
+ ),
+ DisallowedCase( # Here we test that only alice's newlines count in the sum
+ [make_msg("alice", [2, 2]), make_msg("bob", [3]), make_msg("alice", [3])],
+ ("alice",),
+ 7,
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_author = case.recent_messages[0].author
+ return tuple(msg for msg in case.recent_messages if msg.author == last_author)
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} newlines in {self.config['interval']}s"
+
+
+class GroupNewlinesRuleTests(RuleTest):
+ """
+ Tests the `newlines` antispam rule against max consecutive newline violations.
+
+ As these violations yield a different error report, they require a different
+ `get_report` implementation.
+ """
+
+ def setUp(self):
+ self.apply = newlines.apply
+ self.config = {"max": 5, "max_consecutive": 3, "interval": 10}
+
+ @async_test
+ async def test_disallows_messages_consecutive(self):
+ """Cases which violate the rule due to having too many consecutive newlines."""
+ cases = (
+ DisallowedCase( # Bob sends a group of newlines too large
+ [make_msg("bob", [4])],
+ ("bob",),
+ 4,
+ ),
+ DisallowedCase( # Alice sends 5 in total (allowed), but 4 in one group (disallowed)
+ [make_msg("alice", [1]), make_msg("alice", [4])],
+ ("alice",),
+ 4,
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_author = case.recent_messages[0].author
+ return tuple(msg for msg in case.recent_messages if msg.author == last_author)
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} consecutive newlines in {self.config['interval']}s"
diff --git a/tests/bot/rules/test_role_mentions.py b/tests/bot/rules/test_role_mentions.py
new file mode 100644
index 000000000..b339cccf7
--- /dev/null
+++ b/tests/bot/rules/test_role_mentions.py
@@ -0,0 +1,57 @@
+from typing import Iterable
+
+from bot.rules import role_mentions
+from tests.bot.rules import DisallowedCase, RuleTest
+from tests.helpers import MockMessage, async_test
+
+
+def make_msg(author: str, n_mentions: int) -> MockMessage:
+ """Build a MockMessage instance with `n_mentions` role mentions."""
+ return MockMessage(author=author, role_mentions=[None] * n_mentions)
+
+
+class RoleMentionsRuleTests(RuleTest):
+ """Tests for the `role_mentions` antispam rule."""
+
+ def setUp(self):
+ self.apply = role_mentions.apply
+ self.config = {"max": 2, "interval": 10}
+
+ @async_test
+ async def test_allows_messages_within_limit(self):
+ """Cases with a total amount of role mentions within limit."""
+ cases = (
+ [make_msg("bob", 2)],
+ [make_msg("bob", 1), make_msg("alice", 1), make_msg("bob", 1)],
+ )
+
+ await self.run_allowed(cases)
+
+ @async_test
+ async def test_disallows_messages_beyond_limit(self):
+ """Cases with more than the allowed amount of role mentions."""
+ cases = (
+ DisallowedCase(
+ [make_msg("bob", 3)],
+ ("bob",),
+ 3,
+ ),
+ DisallowedCase(
+ [make_msg("alice", 2), make_msg("bob", 2), make_msg("alice", 2)],
+ ("alice",),
+ 4,
+ ),
+ )
+
+ await self.run_disallowed(cases)
+
+ def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]:
+ last_message = case.recent_messages[0]
+ return tuple(
+ msg
+ for msg in case.recent_messages
+ if msg.author == last_message.author
+ )
+
+ def get_report(self, case: DisallowedCase) -> str:
+ return f"sent {case.n_violations} role mentions in {self.config['interval']}s"
diff --git a/tests/bot/test_api.py b/tests/bot/test_api.py
index 5a88adc5c..bdfcc73e4 100644
--- a/tests/bot/test_api.py
+++ b/tests/bot/test_api.py
@@ -1,9 +1,7 @@
-import logging
import unittest
-from unittest.mock import MagicMock, patch
+from unittest.mock import MagicMock
from bot import api
-from tests.base import LoggingTestCase
from tests.helpers import async_test
@@ -34,7 +32,7 @@ class APIClientTests(unittest.TestCase):
self.assertEqual(error.response_text, "")
self.assertIs(error.response, self.error_api_response)
- def test_responde_code_error_string_representation_default_initialization(self):
+ def test_response_code_error_string_representation_default_initialization(self):
"""Test the string representation of `ResponseCodeError` initialized without text or json."""
error = api.ResponseCodeError(response=self.error_api_response)
self.assertEqual(str(error), f"Status: {self.error_api_response.status} Response: ")
@@ -76,61 +74,3 @@ class APIClientTests(unittest.TestCase):
response_text=text_data
)
self.assertEqual(str(error), f"Status: {self.error_api_response.status} Response: {text_data}")
-
-
-class LoggingHandlerTests(LoggingTestCase):
- """Tests the bot's API Log Handler."""
-
- @classmethod
- def setUpClass(cls):
- cls.debug_log_record = logging.LogRecord(
- name='my.logger', level=logging.DEBUG,
- pathname='my/logger.py', lineno=666,
- msg="Lemon wins", args=(),
- exc_info=None
- )
-
- cls.trace_log_record = logging.LogRecord(
- name='my.logger', level=logging.TRACE,
- pathname='my/logger.py', lineno=666,
- msg="This will not be logged", args=(),
- exc_info=None
- )
-
- def setUp(self):
- self.log_handler = api.APILoggingHandler(None)
-
- def test_emit_appends_to_queue_with_stopped_event_loop(self):
- """Test if `APILoggingHandler.emit` appends to queue when the event loop is not running."""
- with patch("bot.api.APILoggingHandler.ship_off") as ship_off:
- # Patch `ship_off` to ease testing against the return value of this coroutine.
- ship_off.return_value = 42
- self.log_handler.emit(self.debug_log_record)
-
- self.assertListEqual(self.log_handler.queue, [42])
-
- def test_emit_ignores_less_than_debug(self):
- """`APILoggingHandler.emit` should not queue logs with a log level lower than DEBUG."""
- self.log_handler.emit(self.trace_log_record)
- self.assertListEqual(self.log_handler.queue, [])
-
- def test_schedule_queued_tasks_for_empty_queue(self):
- """`APILoggingHandler` should not schedule anything when the queue is empty."""
- with self.assertNotLogs(level=logging.DEBUG):
- self.log_handler.schedule_queued_tasks()
-
- def test_schedule_queued_tasks_for_nonempty_queue(self):
- """`APILoggingHandler` should schedule logs when the queue is not empty."""
- log = logging.getLogger("bot.api")
-
- with self.assertLogs(logger=log, level=logging.DEBUG) as logs, patch('asyncio.create_task') as create_task:
- self.log_handler.queue = [555]
- self.log_handler.schedule_queued_tasks()
- self.assertListEqual(self.log_handler.queue, [])
- create_task.assert_called_once_with(555)
-
- [record] = logs.records
- self.assertEqual(record.message, "Scheduled 1 pending logging tasks.")
- self.assertEqual(record.levelno, logging.DEBUG)
- self.assertEqual(record.name, 'bot.api')
- self.assertIn('via_handler', record.__dict__)
diff --git a/tests/bot/test_utils.py b/tests/bot/test_utils.py
index 58ae2a81a..d7bcc3ba6 100644
--- a/tests/bot/test_utils.py
+++ b/tests/bot/test_utils.py
@@ -35,18 +35,3 @@ class CaseInsensitiveDictTests(unittest.TestCase):
instance = utils.CaseInsensitiveDict()
instance.update({'FOO': 'bar'})
self.assertEqual(instance['foo'], 'bar')
-
-
-class ChunkTests(unittest.TestCase):
- """Tests the `chunk` method."""
-
- def test_empty_chunking(self):
- """Tests chunking on an empty iterable."""
- generator = utils.chunks(iterable=[], size=5)
- self.assertEqual(list(generator), [])
-
- def test_list_chunking(self):
- """Tests chunking a non-empty list."""
- iterable = [1, 2, 3, 4, 5]
- generator = utils.chunks(iterable=iterable, size=2)
- self.assertEqual(list(generator), [[1, 2], [3, 4], [5]])
diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py
new file mode 100644
index 000000000..69f35f2f5
--- /dev/null
+++ b/tests/bot/utils/test_time.py
@@ -0,0 +1,162 @@
+import asyncio
+import unittest
+from datetime import datetime, timezone
+from unittest.mock import patch
+
+from dateutil.relativedelta import relativedelta
+
+from bot.utils import time
+from tests.helpers import AsyncMock
+
+
+class TimeTests(unittest.TestCase):
+ """Test helper functions in bot.utils.time."""
+
+ def test_humanize_delta_handle_unknown_units(self):
+ """humanize_delta should be able to handle unknown units, and will not abort."""
+ # Does not abort for unknown units, as the unit name is checked
+ # against the attribute of the relativedelta instance.
+ self.assertEqual(time.humanize_delta(relativedelta(days=2, hours=2), 'elephants', 2), '2 days and 2 hours')
+
+ def test_humanize_delta_handle_high_units(self):
+ """humanize_delta should be able to handle very high units."""
+ # Very high maximum units, but it only ever iterates over
+ # each value the relativedelta might have.
+ self.assertEqual(time.humanize_delta(relativedelta(days=2, hours=2), 'hours', 20), '2 days and 2 hours')
+
+ def test_humanize_delta_should_normal_usage(self):
+ """Testing humanize delta."""
+ test_cases = (
+ (relativedelta(days=2), 'seconds', 1, '2 days'),
+ (relativedelta(days=2, hours=2), 'seconds', 2, '2 days and 2 hours'),
+ (relativedelta(days=2, hours=2), 'seconds', 1, '2 days'),
+ (relativedelta(days=2, hours=2), 'days', 2, '2 days'),
+ )
+
+ for delta, precision, max_units, expected in test_cases:
+ with self.subTest(delta=delta, precision=precision, max_units=max_units, expected=expected):
+ self.assertEqual(time.humanize_delta(delta, precision, max_units), expected)
+
+ def test_humanize_delta_raises_for_invalid_max_units(self):
+ """humanize_delta should raises ValueError('max_units must be positive') for invalid max_units."""
+ test_cases = (-1, 0)
+
+ for max_units in test_cases:
+ with self.subTest(max_units=max_units), self.assertRaises(ValueError) as error:
+ time.humanize_delta(relativedelta(days=2, hours=2), 'hours', max_units)
+ self.assertEqual(str(error), 'max_units must be positive')
+
+ def test_parse_rfc1123(self):
+ """Testing parse_rfc1123."""
+ self.assertEqual(
+ time.parse_rfc1123('Sun, 15 Sep 2019 12:00:00 GMT'),
+ datetime(2019, 9, 15, 12, 0, 0, tzinfo=timezone.utc)
+ )
+
+ def test_format_infraction(self):
+ """Testing format_infraction."""
+ self.assertEqual(time.format_infraction('2019-12-12T00:01:00Z'), '2019-12-12 00:01')
+
+ @patch('asyncio.sleep', new_callable=AsyncMock)
+ def test_wait_until(self, mock):
+ """Testing wait_until."""
+ start = datetime(2019, 1, 1, 0, 0)
+ then = datetime(2019, 1, 1, 0, 10)
+
+ # No return value
+ self.assertIs(asyncio.run(time.wait_until(then, start)), None)
+
+ mock.assert_called_once_with(10 * 60)
+
+ def test_format_infraction_with_duration_none_expiry(self):
+ """format_infraction_with_duration should work for None expiry."""
+ test_cases = (
+ (None, None, None, None),
+
+ # To make sure that date_from and max_units are not touched
+ (None, 'Why hello there!', None, None),
+ (None, None, float('inf'), None),
+ (None, 'Why hello there!', float('inf'), None),
+ )
+
+ for expiry, date_from, max_units, expected in test_cases:
+ with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected):
+ self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected)
+
+ def test_format_infraction_with_duration_custom_units(self):
+ """format_infraction_with_duration should work for custom max_units."""
+ test_cases = (
+ ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 5, 5), 6,
+ '2019-12-12 00:01 (11 hours, 55 minutes and 55 seconds)'),
+ ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 20,
+ '2019-11-23 20:09 (6 months, 28 days, 23 hours and 54 minutes)')
+ )
+
+ for expiry, date_from, max_units, expected in test_cases:
+ with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected):
+ self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected)
+
+ def test_format_infraction_with_duration_normal_usage(self):
+ """format_infraction_with_duration should work for normal usage, across various durations."""
+ test_cases = (
+ ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 2, '2019-12-12 00:01 (12 hours and 55 seconds)'),
+ ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 1, '2019-12-12 00:01 (12 hours)'),
+ ('2019-12-12T00:00:00Z', datetime(2019, 12, 11, 23, 59), 2, '2019-12-12 00:00 (1 minute)'),
+ ('2019-11-23T20:09:00Z', datetime(2019, 11, 15, 20, 15), 2, '2019-11-23 20:09 (7 days and 23 hours)'),
+ ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 2, '2019-11-23 20:09 (6 months and 28 days)'),
+ ('2019-11-23T20:58:00Z', datetime(2019, 11, 23, 20, 53), 2, '2019-11-23 20:58 (5 minutes)'),
+ ('2019-11-24T00:00:00Z', datetime(2019, 11, 23, 23, 59, 0), 2, '2019-11-24 00:00 (1 minute)'),
+ ('2019-11-23T23:59:00Z', datetime(2017, 7, 21, 23, 0), 2, '2019-11-23 23:59 (2 years and 4 months)'),
+ ('2019-11-23T23:59:00Z', datetime(2019, 11, 23, 23, 49, 5), 2,
+ '2019-11-23 23:59 (9 minutes and 55 seconds)'),
+ (None, datetime(2019, 11, 23, 23, 49, 5), 2, None),
+ )
+
+ for expiry, date_from, max_units, expected in test_cases:
+ with self.subTest(expiry=expiry, date_from=date_from, max_units=max_units, expected=expected):
+ self.assertEqual(time.format_infraction_with_duration(expiry, date_from, max_units), expected)
+
+ def test_until_expiration_with_duration_none_expiry(self):
+ """until_expiration should work for None expiry."""
+ test_cases = (
+ (None, None, None, None),
+
+ # To make sure that now and max_units are not touched
+ (None, 'Why hello there!', None, None),
+ (None, None, float('inf'), None),
+ (None, 'Why hello there!', float('inf'), None),
+ )
+
+ for expiry, now, max_units, expected in test_cases:
+ with self.subTest(expiry=expiry, now=now, max_units=max_units, expected=expected):
+ self.assertEqual(time.until_expiration(expiry, now, max_units), expected)
+
+ def test_until_expiration_with_duration_custom_units(self):
+ """until_expiration should work for custom max_units."""
+ test_cases = (
+ ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 5, 5), 6, '11 hours, 55 minutes and 55 seconds'),
+ ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 20, '6 months, 28 days, 23 hours and 54 minutes')
+ )
+
+ for expiry, now, max_units, expected in test_cases:
+ with self.subTest(expiry=expiry, now=now, max_units=max_units, expected=expected):
+ self.assertEqual(time.until_expiration(expiry, now, max_units), expected)
+
+ def test_until_expiration_normal_usage(self):
+ """until_expiration should work for normal usage, across various durations."""
+ test_cases = (
+ ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 2, '12 hours and 55 seconds'),
+ ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 1, '12 hours'),
+ ('2019-12-12T00:00:00Z', datetime(2019, 12, 11, 23, 59), 2, '1 minute'),
+ ('2019-11-23T20:09:00Z', datetime(2019, 11, 15, 20, 15), 2, '7 days and 23 hours'),
+ ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 2, '6 months and 28 days'),
+ ('2019-11-23T20:58:00Z', datetime(2019, 11, 23, 20, 53), 2, '5 minutes'),
+ ('2019-11-24T00:00:00Z', datetime(2019, 11, 23, 23, 59, 0), 2, '1 minute'),
+ ('2019-11-23T23:59:00Z', datetime(2017, 7, 21, 23, 0), 2, '2 years and 4 months'),
+ ('2019-11-23T23:59:00Z', datetime(2019, 11, 23, 23, 49, 5), 2, '9 minutes and 55 seconds'),
+ (None, datetime(2019, 11, 23, 23, 49, 5), 2, None),
+ )
+
+ for expiry, now, max_units, expected in test_cases:
+ with self.subTest(expiry=expiry, now=now, max_units=max_units, expected=expected):
+ self.assertEqual(time.until_expiration(expiry, now, max_units), expected)
diff --git a/tests/helpers.py b/tests/helpers.py
index 8a14aeef4..9d9dd5da6 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -10,7 +10,10 @@ import unittest.mock
from typing import Any, Iterable, Optional
import discord
-from discord.ext.commands import Bot, Context
+from discord.ext.commands import Context
+
+from bot.api import APIClient
+from bot.bot import Bot
for logger in logging.Logger.manager.loggerDict.values():
@@ -120,8 +123,80 @@ class AsyncMock(CustomMockMixin, unittest.mock.MagicMock):
Python 3.8 will introduce an AsyncMock class in the standard library that will have some more
features; this stand-in only overwrites the `__call__` method to an async version.
"""
+
async def __call__(self, *args, **kwargs):
- return super(AsyncMock, self).__call__(*args, **kwargs)
+ return super().__call__(*args, **kwargs)
+
+
+class AsyncIteratorMock:
+ """
+ A class to mock asynchronous iterators.
+
+ This allows async for, which is used in certain Discord.py objects. For example,
+ an async iterator is returned by the Reaction.users() method.
+ """
+
+ def __init__(self, iterable: Iterable = None):
+ if iterable is None:
+ iterable = []
+
+ self.iter = iter(iterable)
+ self.iterable = iterable
+
+ self.call_count = 0
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ try:
+ return next(self.iter)
+ except StopIteration:
+ raise StopAsyncIteration
+
+ def __call__(self):
+ """
+ Keeps track of the number of times an instance has been called.
+
+ This is useful, since it typically shows that the iterator has actually been used somewhere after we have
+ instantiated the mock for an attribute that normally returns an iterator when called.
+ """
+ self.call_count += 1
+ return self
+
+ @property
+ def return_value(self):
+ """Makes `self.iterable` accessible as self.return_value."""
+ return self.iterable
+
+ @return_value.setter
+ def return_value(self, iterable):
+ """Stores the `return_value` as `self.iterable` and its iterator as `self.iter`."""
+ self.iter = iter(iterable)
+ self.iterable = iterable
+
+ def assert_called(self):
+ """Asserts if the AsyncIteratorMock instance has been called at least once."""
+ if self.call_count == 0:
+ raise AssertionError("Expected AsyncIteratorMock to have been called.")
+
+ def assert_called_once(self):
+ """Asserts if the AsyncIteratorMock instance has been called exactly once."""
+ if self.call_count != 1:
+ raise AssertionError(
+ f"Expected AsyncIteratorMock to have been called once. Called {self.call_count} times."
+ )
+
+ def assert_not_called(self):
+ """Asserts if the AsyncIteratorMock instance has not been called."""
+ if self.call_count != 0:
+ raise AssertionError(
+ f"Expected AsyncIteratorMock to not have been called once. Called {self.call_count} times."
+ )
+
+ def reset_mock(self):
+ """Resets the call count, but not the return value or iterator."""
+ self.call_count = 0
# Create a guild instance to get a realistic Mock of `discord.Guild`
@@ -195,9 +270,21 @@ class MockRole(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin):
information, see the `MockGuild` docstring.
"""
def __init__(self, **kwargs) -> None:
- default_kwargs = {'id': next(self.discord_id), 'name': 'role', 'position': 1}
+ default_kwargs = {
+ 'id': next(self.discord_id),
+ 'name': 'role',
+ 'position': 1,
+ 'colour': discord.Colour(0xdeadbf),
+ 'permissions': discord.Permissions(),
+ }
super().__init__(spec_set=role_instance, **collections.ChainMap(kwargs, default_kwargs))
+ if isinstance(self.colour, int):
+ self.colour = discord.Colour(self.colour)
+
+ if isinstance(self.permissions, int):
+ self.permissions = discord.Permissions(self.permissions)
+
if 'mention' not in kwargs:
self.mention = f'&{self.name}'
@@ -220,7 +307,7 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin
information, see the `MockGuild` docstring.
"""
def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None:
- default_kwargs = {'name': 'member', 'id': next(self.discord_id)}
+ default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False}
super().__init__(spec_set=member_instance, **collections.ChainMap(kwargs, default_kwargs))
self.roles = [MockRole(name="@everyone", position=1, id=0)]
@@ -231,6 +318,37 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin
self.mention = f"@{self.name}"
+# Create a User instance to get a realistic Mock of `discord.User`
+user_instance = discord.User(data=unittest.mock.MagicMock(), state=unittest.mock.MagicMock())
+
+
+class MockUser(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin):
+ """
+ A Mock subclass to mock User objects.
+
+ Instances of this class will follow the specifications of `discord.User` instances. For more
+ information, see the `MockGuild` docstring.
+ """
+ def __init__(self, **kwargs) -> None:
+ default_kwargs = {'name': 'user', 'id': next(self.discord_id), 'bot': False}
+ super().__init__(spec_set=user_instance, **collections.ChainMap(kwargs, default_kwargs))
+
+ if 'mention' not in kwargs:
+ self.mention = f"@{self.name}"
+
+
+class MockAPIClient(CustomMockMixin, unittest.mock.MagicMock):
+ """
+ A MagicMock subclass to mock APIClient objects.
+
+ Instances of this class will follow the specifications of `bot.api.APIClient` instances.
+ For more information, see the `MockGuild` docstring.
+ """
+
+ def __init__(self, **kwargs) -> None:
+ super().__init__(spec_set=APIClient, **kwargs)
+
+
# Create a Bot instance to get a realistic MagicMock of `discord.ext.commands.Bot`
bot_instance = Bot(command_prefix=unittest.mock.MagicMock())
bot_instance.http_session = None
@@ -244,8 +362,10 @@ class MockBot(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.ext.commands.Bot` instances.
For more information, see the `MockGuild` docstring.
"""
+
def __init__(self, **kwargs) -> None:
super().__init__(spec_set=bot_instance, **kwargs)
+ self.api_client = MockAPIClient()
# self.wait_for is *not* a coroutine function, but returns a coroutine nonetheless and
# and should therefore be awaited. (The documentation calls it a coroutine as well, which
@@ -281,6 +401,7 @@ class MockTextChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin):
Instances of this class will follow the specifications of `discord.TextChannel` instances. For
more information, see the `MockGuild` docstring.
"""
+
def __init__(self, name: str = 'channel', channel_id: int = 1, **kwargs) -> None:
default_kwargs = {'id': next(self.discord_id), 'name': 'channel', 'guild': MockGuild()}
super().__init__(spec_set=channel_instance, **collections.ChainMap(kwargs, default_kwargs))
@@ -322,6 +443,7 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.ext.commands.Context`
instances. For more information, see the `MockGuild` docstring.
"""
+
def __init__(self, **kwargs) -> None:
super().__init__(spec_set=context_instance, **kwargs)
self.bot = kwargs.get('bot', MockBot())
@@ -330,6 +452,20 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock):
self.channel = kwargs.get('channel', MockTextChannel())
+attachment_instance = discord.Attachment(data=unittest.mock.MagicMock(id=1), state=unittest.mock.MagicMock())
+
+
+class MockAttachment(CustomMockMixin, unittest.mock.MagicMock):
+ """
+ A MagicMock subclass to mock Attachment objects.
+
+ Instances of this class will follow the specifications of `discord.Attachment` instances. For
+ more information, see the `MockGuild` docstring.
+ """
+ def __init__(self, **kwargs) -> None:
+ super().__init__(spec_set=attachment_instance, **kwargs)
+
+
class MockMessage(CustomMockMixin, unittest.mock.MagicMock):
"""
A MagicMock subclass to mock Message objects.
@@ -337,8 +473,10 @@ class MockMessage(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.Message` instances. For more
information, see the `MockGuild` docstring.
"""
+
def __init__(self, **kwargs) -> None:
- super().__init__(spec_set=message_instance, **kwargs)
+ default_kwargs = {'attachments': []}
+ super().__init__(spec_set=message_instance, **collections.ChainMap(kwargs, default_kwargs))
self.author = kwargs.get('author', MockMember())
self.channel = kwargs.get('channel', MockTextChannel())
@@ -354,6 +492,7 @@ class MockEmoji(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.Emoji` instances. For more
information, see the `MockGuild` docstring.
"""
+
def __init__(self, **kwargs) -> None:
super().__init__(spec_set=emoji_instance, **kwargs)
self.guild = kwargs.get('guild', MockGuild())
@@ -369,6 +508,7 @@ class MockPartialEmoji(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.PartialEmoji` instances. For
more information, see the `MockGuild` docstring.
"""
+
def __init__(self, **kwargs) -> None:
super().__init__(spec_set=partial_emoji_instance, **kwargs)
@@ -383,7 +523,32 @@ class MockReaction(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.Reaction` instances. For
more information, see the `MockGuild` docstring.
"""
+
def __init__(self, **kwargs) -> None:
super().__init__(spec_set=reaction_instance, **kwargs)
self.emoji = kwargs.get('emoji', MockEmoji())
self.message = kwargs.get('message', MockMessage())
+ self.users = AsyncIteratorMock(kwargs.get('users', []))
+ self.__str__.return_value = str(self.emoji)
+
+
+webhook_instance = discord.Webhook(data=unittest.mock.MagicMock(), adapter=unittest.mock.MagicMock())
+
+
+class MockAsyncWebhook(CustomMockMixin, unittest.mock.MagicMock):
+ """
+ A MagicMock subclass to mock Webhook objects using an AsyncWebhookAdapter.
+
+ Instances of this class will follow the specifications of `discord.Webhook` instances. For
+ more information, see the `MockGuild` docstring.
+ """
+
+ def __init__(self, **kwargs) -> None:
+ super().__init__(spec_set=webhook_instance, **kwargs)
+
+ # Because Webhooks can also use a synchronous "WebhookAdapter", the methods are not defined
+ # as coroutines. That's why we need to set the methods manually.
+ self.send = AsyncMock()
+ self.edit = AsyncMock()
+ self.delete = AsyncMock()
+ self.execute = AsyncMock()
diff --git a/tox.ini b/tox.ini
index d14819d57..b8293a3b6 100644
--- a/tox.ini
+++ b/tox.ini
@@ -3,7 +3,7 @@ max-line-length=120
docstring-convention=all
import-order-style=pycharm
application_import_names=bot,tests
-exclude=.cache,.venv,constants.py
+exclude=.cache,.venv,.git,constants.py
ignore=
B311,W503,E226,S311,T000
# Missing Docstrings
@@ -15,5 +15,5 @@ ignore=
# Docstring Content
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
# Type Annotations
- TYP002,TYP003,TYP101,TYP102,TYP204,TYP206
-per-file-ignores=tests/*:D,TYP
+ ANN002,ANN003,ANN101,ANN102,ANN204,ANN206
+per-file-ignores=tests/*:D,ANN