aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Pipfile26
-rw-r--r--Pipfile.lock308
-rw-r--r--azure-pipelines.yml1
-rw-r--r--bot/bot.py45
-rw-r--r--bot/cogs/antimalware.py55
-rw-r--r--bot/cogs/antispam.py4
-rw-r--r--bot/cogs/bot.py2
-rw-r--r--bot/cogs/defcon.py2
-rw-r--r--bot/cogs/duck_pond.py2
-rw-r--r--bot/cogs/error_handler.py6
-rw-r--r--bot/cogs/filtering.py14
-rw-r--r--bot/cogs/help_channels.py81
-rw-r--r--bot/cogs/information.py4
-rw-r--r--bot/cogs/moderation/management.py22
-rw-r--r--bot/cogs/moderation/scheduler.py10
-rw-r--r--bot/cogs/moderation/silence.py2
-rw-r--r--bot/cogs/python_news.py7
-rw-r--r--bot/cogs/snekbox.py18
-rw-r--r--bot/cogs/stats.py29
-rw-r--r--bot/cogs/tags.py59
-rw-r--r--bot/cogs/utils.py4
-rw-r--r--bot/cogs/verification.py4
-rw-r--r--bot/cogs/watchchannels/talentpool.py2
-rw-r--r--bot/cogs/watchchannels/watchchannel.py14
-rw-r--r--bot/constants.py31
-rw-r--r--bot/decorators.py55
-rw-r--r--bot/pagination.py4
-rw-r--r--bot/utils/__init__.py4
-rw-r--r--bot/utils/checks.py94
-rw-r--r--bot/utils/messages.py2
-rw-r--r--bot/utils/redis_cache.py409
-rw-r--r--config-default.yml14
-rw-r--r--docker-compose.yml6
-rw-r--r--tests/bot/cogs/test_antimalware.py159
-rw-r--r--tests/bot/cogs/test_duck_pond.py2
-rw-r--r--tests/bot/cogs/test_information.py3
-rw-r--r--tests/bot/cogs/test_snekbox.py15
-rw-r--r--tests/bot/test_constants.py43
-rw-r--r--tests/bot/test_decorators.py4
-rw-r--r--tests/bot/utils/test_checks.py52
-rw-r--r--tests/bot/utils/test_redis_cache.py273
-rw-r--r--tests/helpers.py30
42 files changed, 1571 insertions, 350 deletions
diff --git a/Pipfile b/Pipfile
index 14c9ef926..b42ca6d58 100644
--- a/Pipfile
+++ b/Pipfile
@@ -4,25 +4,27 @@ verify_ssl = true
name = "pypi"
[packages]
-discord-py = "~=1.3.2"
+aio-pika = "~=6.1"
aiodns = "~=2.0"
aiohttp = "~=3.5"
-sphinx = "~=2.2"
-markdownify = "~=0.4"
-lxml = "~=4.4"
-pyyaml = "~=5.1"
+aioredis = "~=1.3.1"
+beautifulsoup4 = "~=4.9"
+colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}
+coloredlogs = "~=14.0"
+deepdiff = "~=4.0"
+discord.py = "~=1.3.2"
+fakeredis = "~=1.4"
+feedparser = "~=5.2"
fuzzywuzzy = "~=0.17"
-aio-pika = "~=6.1"
+lxml = "~=4.4"
+markdownify = "~=0.4"
+more_itertools = "~=8.2"
python-dateutil = "~=2.8"
-deepdiff = "~=4.0"
+pyyaml = "~=5.1"
requests = "~=2.22"
-more_itertools = "~=8.2"
sentry-sdk = "~=0.14"
-coloredlogs = "~=14.0"
-colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}
+sphinx = "~=2.2"
statsd = "~=3.3"
-feedparser = "~=5.2"
-beautifulsoup4 = "~=4.9"
[dev-packages]
coverage = "~=5.0"
diff --git a/Pipfile.lock b/Pipfile.lock
index 4e7050a13..0e591710c 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "64620e7e825c74fd3010821fb30843b19f5dafb2b5a1f6eafedc0a5febd99b69"
+ "sha256": "0297accc3d614d3da8080b89d56ef7fe489c28a0ada8102df396a604af7ee330"
},
"pipfile-spec": 6,
"requires": {
@@ -18,11 +18,11 @@
"default": {
"aio-pika": {
"hashes": [
- "sha256:9e4614636296e0040055bd6b304e97a38cc9796669ef391fc9b36649831d43ee",
- "sha256:c9d242b3c7142d64b185feb6c5cce4154962610e89ec2e9b52bd69ef01f89b2f"
+ "sha256:c4cbbeb85b3c7bf81bc127371846cd949e6231717ce1e6ac7ee1dd5ede21f866",
+ "sha256:ec7fef24f588d90314873463ab4f2c3debce0bd8830e49e3786586be96bc2e8e"
],
"index": "pypi",
- "version": "==6.6.0"
+ "version": "==6.6.1"
},
"aiodns": {
"hashes": [
@@ -50,12 +50,20 @@
"index": "pypi",
"version": "==3.6.2"
},
+ "aioredis": {
+ "hashes": [
+ "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a",
+ "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"
+ ],
+ "index": "pypi",
+ "version": "==1.3.1"
+ },
"aiormq": {
"hashes": [
- "sha256:286e0b0772075580466e45f98f051b9728a9316b9c36f0c14c7bc1409be375b0",
- "sha256:7ed7d6df6b57af7f8bce7d1ebcbdfc32b676192e46703e81e9e217316e56b5bd"
+ "sha256:41a9d4eb17db805f30ed172f3f609fe0c2b16657fb15b1b67df19d251dd93c0d",
+ "sha256:7c19477a9450824cb79f9949fd238f4148e2c0dca67756a2868863c387209f04"
],
- "version": "==3.2.1"
+ "version": "==3.2.2"
},
"alabaster": {
"hashes": [
@@ -87,12 +95,12 @@
},
"beautifulsoup4": {
"hashes": [
- "sha256:594ca51a10d2b3443cbac41214e12dbb2a1cd57e1a7344659849e2e20ba6a8d8",
- "sha256:a4bbe77fd30670455c5296242967a123ec28c37e9702a8a81bd2f20a4baf0368",
- "sha256:d4e96ac9b0c3a6d3f0caae2e4124e6055c5dcafde8e2f831ff194c104f0775a0"
+ "sha256:73cc4d115b96f79c7d77c1c7f7a0a8d4c57860d1041df407dd1aae7f07a77fd7",
+ "sha256:a6237df3c32ccfaee4fd201c8f5f9d9df619b93121d01353a64a73ce8c6ef9a8",
+ "sha256:e718f2342e2e099b640a34ab782407b7b676f47ee272d6739e60b8ea23829f2c"
],
"index": "pypi",
- "version": "==4.9.0"
+ "version": "==4.9.1"
},
"certifi": {
"hashes": [
@@ -166,11 +174,19 @@
"index": "pypi",
"version": "==4.3.2"
},
- "discord-py": {
+ "discord": {
"hashes": [
- "sha256:406871b06d86c3dc49fba63238519f28628dac946fef8a0e22988ff58ec05580"
+ "sha256:9d4debb4a37845543bd4b92cb195bc53a302797333e768e70344222857ff1559",
+ "sha256:ff6653655e342e7721dfb3f10421345fd852c2a33f2cca912b1c39b3778a9429"
],
"index": "pypi",
+ "version": "==1.0.1"
+ },
+ "discord.py": {
+ "hashes": [
+ "sha256:406871b06d86c3dc49fba63238519f28628dac946fef8a0e22988ff58ec05580",
+ "sha256:ad00e34c72d2faa8db2157b651d05f3c415d7d05078e7e41dc9e8dc240051beb"
+ ],
"version": "==1.3.3"
},
"docutils": {
@@ -180,6 +196,14 @@
],
"version": "==0.16"
},
+ "fakeredis": {
+ "hashes": [
+ "sha256:4d170886865a91dbc8b7f8cbd4e5d488f4c5f2f25dfae127f001617bbe9e8f97",
+ "sha256:647b2593d349d9d4e566c8dadb2e4c71ba35be5bdc4f1f7ac2d565a12a965053"
+ ],
+ "index": "pypi",
+ "version": "==1.4.1"
+ },
"feedparser": {
"hashes": [
"sha256:bd030652c2d08532c034c27fcd7c85868e7fa3cb2b17f230a44a6bbc92519bf9",
@@ -197,6 +221,51 @@
"index": "pypi",
"version": "==0.18.0"
},
+ "hiredis": {
+ "hashes": [
+ "sha256:01b577f84c20ecc9c07fc4c184231b08e3c3942de096fa99978e053de231c423",
+ "sha256:01ff0900134166961c9e339df77c33b72f7edc5cb41739f0babcd9faa345926e",
+ "sha256:03ed34a13316d0c34213c4fd46e0fa3a5299073f4d4f08e93fed8c2108b399b3",
+ "sha256:040436e91df5143aff9e0debb49530d0b17a6bd52200ce568621c31ef581b10d",
+ "sha256:091eb38fbf968d1c5b703e412bbbd25f43a7967d8400842cee33a5a07b33c27b",
+ "sha256:102f9b9dc6ed57feb3a7c9bdf7e71cb7c278fe8df1edfcfe896bc3e0c2be9447",
+ "sha256:2b4b392c7e3082860c8371fab3ae762139090f9115819e12d9f56060f9ede05d",
+ "sha256:2c9cc0b986397b833073f466e6b9e9c70d1d4dc2c2c1b3e9cae3a23102ff296c",
+ "sha256:2fa65a9df683bca72073cd77709ddeb289ea2b114d3775d225fbbcc5faf808c5",
+ "sha256:38437a681f17c975fd22349e72c29bc643f8e7eb2d6dc5df419eac59afa4d7ce",
+ "sha256:3b3428fa3cf1ee178807b52c9bee8950ab94cd4eaa9bfae8c1bbae3c49501d34",
+ "sha256:3dd8c2fae7f5494978facb0e93297dd627b1a3f536f3b070cf0a7d9157a07dcb",
+ "sha256:4414a96c212e732723b5c3d7c04d386ebbb2ec359e1de646322cbc3f875cbd0d",
+ "sha256:48c627581ad4ef60adbac980981407939acf13a0e18f093502c7b542223c4f19",
+ "sha256:4a60e71625a2d78d8ab84dfb2fa2cfd9458c964b6e6c04fea76d9ade153fb371",
+ "sha256:585ace09f434e43d8a8dbeb366865b1a044d7c06319b3c7372a0a00e63b860f4",
+ "sha256:74b364b3f06c9cf0a53f7df611045bc9437ed972a283fa1f0b12537236d23ddc",
+ "sha256:75c65c3850e89e9daa68d1b9bedd5806f177d60aa5a7b0953b4829481cfc1f72",
+ "sha256:7f052de8bf744730a9120dbdc67bfeb7605a01f69fb8e7ba5c475af33c24e145",
+ "sha256:8113a7d5e87ecf57cd4ae263cc9e429adb9a3e59f5a7768da5d3312a8d0a051a",
+ "sha256:84857ce239eb8ed191ac78e77ff65d52902f00f30f4ee83bf80eb71da73b70e6",
+ "sha256:8644a48ddc4a40b3e3a6b9443f396c2ee353afb2d45656c4fc68d04a82e8e3f7",
+ "sha256:936aa565e673536e8a211e43ec43197406f24cd1f290138bd143765079c8ba00",
+ "sha256:9afeb88c67bbc663b9f27385c496da056d06ad87f55df6e393e1516cfecb0461",
+ "sha256:9d62cc7880110e4f83b0a51d218f465d3095e2751fbddd34e553dbd106a929ff",
+ "sha256:a1fadd062fc8d647ff39220c57ea2b48c99bb73f18223828ec97f88fc27e7898",
+ "sha256:a7754a783b1e5d6f627c19d099b178059c62f782ab62b4d8ba165b9fbc2ee34c",
+ "sha256:aa59dd63bb3f736de4fc2d080114429d5d369dfb3265f771778e8349d67a97a4",
+ "sha256:ae2ee0992f8de249715435942137843a93db204dd7db1e7cc9bdc5a8436443e8",
+ "sha256:b36842d7cf32929d568f37ec5b3173b72b2ec6572dec4d6be6ce774762215aee",
+ "sha256:bcbf9379c553b5facc6c04c1e5569b44b38ff16bcbf354676287698d61ee0c92",
+ "sha256:cbccbda6f1c62ab460449d9c85fdf24d0d32a6bf45176581151e53cc26a5d910",
+ "sha256:d0caf98dfb8af395d6732bd16561c0a2458851bea522e39f12f04802dbf6f502",
+ "sha256:d6456afeddba036def1a36d8a2758eca53202308d83db20ab5d0b66590919627",
+ "sha256:dbaef9a21a4f10bc281684ee4124f169e62bb533c2a92b55f8c06f64f9af7b8f",
+ "sha256:dce84916c09aaece006272b37234ae84a8ed13abb3a4d341a23933b8701abfb5",
+ "sha256:eb8c9c8b9869539d58d60ff4a28373a22514d40495911451343971cb4835b7a9",
+ "sha256:efc98b14ee3a8595e40b1425e8d42f5fd26f11a7b215a81ef9259068931754f4",
+ "sha256:fa2dc05b87d97acc1c6ae63f3e0f39eae5246565232484b08db6bf2dc1580678",
+ "sha256:fe7d6ce9f6a5fbe24f09d95ea93e9c7271abc4e1565da511e1449b107b4d7848"
+ ],
+ "version": "==1.0.1"
+ },
"humanfriendly": {
"hashes": [
"sha256:bf52ec91244819c780341a3438d5d7b09f431d3f113a475147ac9b7b167a3d12",
@@ -227,36 +296,36 @@
},
"lxml": {
"hashes": [
- "sha256:06d4e0bbb1d62e38ae6118406d7cdb4693a3fa34ee3762238bcb96c9e36a93cd",
- "sha256:0701f7965903a1c3f6f09328c1278ac0eee8f56f244e66af79cb224b7ef3801c",
- "sha256:1f2c4ec372bf1c4a2c7e4bb20845e8bcf8050365189d86806bad1e3ae473d081",
- "sha256:4235bc124fdcf611d02047d7034164897ade13046bda967768836629bc62784f",
- "sha256:5828c7f3e615f3975d48f40d4fe66e8a7b25f16b5e5705ffe1d22e43fb1f6261",
- "sha256:585c0869f75577ac7a8ff38d08f7aac9033da2c41c11352ebf86a04652758b7a",
- "sha256:5d467ce9c5d35b3bcc7172c06320dddb275fea6ac2037f72f0a4d7472035cea9",
- "sha256:63dbc21efd7e822c11d5ddbedbbb08cd11a41e0032e382a0fd59b0b08e405a3a",
- "sha256:7bc1b221e7867f2e7ff1933165c0cec7153dce93d0cdba6554b42a8beb687bdb",
- "sha256:8620ce80f50d023d414183bf90cc2576c2837b88e00bea3f33ad2630133bbb60",
- "sha256:8a0ebda56ebca1a83eb2d1ac266649b80af8dd4b4a3502b2c1e09ac2f88fe128",
- "sha256:90ed0e36455a81b25b7034038e40880189169c308a3df360861ad74da7b68c1a",
- "sha256:95e67224815ef86924fbc2b71a9dbd1f7262384bca4bc4793645794ac4200717",
- "sha256:afdb34b715daf814d1abea0317b6d672476b498472f1e5aacbadc34ebbc26e89",
- "sha256:b4b2c63cc7963aedd08a5f5a454c9f67251b1ac9e22fd9d72836206c42dc2a72",
- "sha256:d068f55bda3c2c3fcaec24bd083d9e2eede32c583faf084d6e4b9daaea77dde8",
- "sha256:d5b3c4b7edd2e770375a01139be11307f04341ec709cf724e0f26ebb1eef12c3",
- "sha256:deadf4df349d1dcd7b2853a2c8796593cc346600726eff680ed8ed11812382a7",
- "sha256:df533af6f88080419c5a604d0d63b2c33b1c0c4409aba7d0cb6de305147ea8c8",
- "sha256:e4aa948eb15018a657702fee0b9db47e908491c64d36b4a90f59a64741516e77",
- "sha256:e5d842c73e4ef6ed8c1bd77806bf84a7cb535f9c0cf9b2c74d02ebda310070e1",
- "sha256:ebec08091a22c2be870890913bdadd86fcd8e9f0f22bcb398abd3af914690c15",
- "sha256:edc15fcfd77395e24543be48871c251f38132bb834d9fdfdad756adb6ea37679",
- "sha256:f2b74784ed7e0bc2d02bd53e48ad6ba523c9b36c194260b7a5045071abbb1012",
- "sha256:fa071559f14bd1e92077b1b5f6c22cf09756c6de7139370249eb372854ce51e6",
- "sha256:fd52e796fee7171c4361d441796b64df1acfceb51f29e545e812f16d023c4bbc",
- "sha256:fe976a0f1ef09b3638778024ab9fb8cde3118f203364212c198f71341c0715ca"
- ],
- "index": "pypi",
- "version": "==4.5.0"
+ "sha256:06748c7192eab0f48e3d35a7adae609a329c6257495d5e53878003660dc0fec6",
+ "sha256:0790ddca3f825dd914978c94c2545dbea5f56f008b050e835403714babe62a5f",
+ "sha256:1aa7a6197c1cdd65d974f3e4953764eee3d9c7b67e3966616b41fab7f8f516b7",
+ "sha256:22c6d34fdb0e65d5f782a4d1a1edb52e0a8365858dafb1c08cb1d16546cf0786",
+ "sha256:2754d4406438c83144f9ffd3628bbe2dcc6d62b20dbc5c1ec4bc4385e5d44b42",
+ "sha256:27ee0faf8077c7c1a589573b1450743011117f1aa1a91d5ae776bbc5ca6070f2",
+ "sha256:2b02c106709466a93ed424454ce4c970791c486d5fcdf52b0d822a7e29789626",
+ "sha256:2d1ddce96cf15f1254a68dba6935e6e0f1fe39247de631c115e84dd404a6f031",
+ "sha256:4f282737d187ae723b2633856085c31ae5d4d432968b7f3f478a48a54835f5c4",
+ "sha256:51bb4edeb36d24ec97eb3e6a6007be128b720114f9a875d6b370317d62ac80b9",
+ "sha256:7eee37c1b9815e6505847aa5e68f192e8a1b730c5c7ead39ff317fde9ce29448",
+ "sha256:7fd88cb91a470b383aafad554c3fe1ccf6dfb2456ff0e84b95335d582a799804",
+ "sha256:9144ce36ca0824b29ebc2e02ca186e54040ebb224292072250467190fb613b96",
+ "sha256:925baf6ff1ef2c45169f548cc85204433e061360bfa7d01e1be7ae38bef73194",
+ "sha256:a636346c6c0e1092ffc202d97ec1843a75937d8c98aaf6771348ad6422e44bb0",
+ "sha256:a87dbee7ad9dce3aaefada2081843caf08a44a8f52e03e0a4cc5819f8398f2f4",
+ "sha256:a9e3b8011388e7e373565daa5e92f6c9cb844790dc18e43073212bb3e76f7007",
+ "sha256:afb53edf1046599991fb4a7d03e601ab5f5422a5435c47ee6ba91ec3b61416a6",
+ "sha256:b26719890c79a1dae7d53acac5f089d66fd8cc68a81f4e4bd355e45470dc25e1",
+ "sha256:b7462cdab6fffcda853338e1741ce99706cdf880d921b5a769202ea7b94e8528",
+ "sha256:b77975465234ff49fdad871c08aa747aae06f5e5be62866595057c43f8d2f62c",
+ "sha256:c47a8a5d00060122ca5908909478abce7bbf62d812e3fc35c6c802df8fb01fe7",
+ "sha256:c79e5debbe092e3c93ca4aee44c9a7631bdd407b2871cb541b979fd350bbbc29",
+ "sha256:d8d40e0121ca1606aa9e78c28a3a7d88a05c06b3ca61630242cded87d8ce55fa",
+ "sha256:ee2be8b8f72a2772e72ab926a3bccebf47bb727bda41ae070dc91d1fb759b726",
+ "sha256:f95d28193c3863132b1f55c1056036bf580b5a488d908f7d22a04ace8935a3a9",
+ "sha256:fadd2a63a2bfd7fb604508e553d1cf68eca250b2fbdbd81213b5f6f2fbf23529"
+ ],
+ "index": "pypi",
+ "version": "==4.5.1"
},
"markdownify": {
"hashes": [
@@ -305,46 +374,46 @@
},
"more-itertools": {
"hashes": [
- "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c",
- "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"
+ "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be",
+ "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982"
],
"index": "pypi",
- "version": "==8.2.0"
+ "version": "==8.3.0"
},
"multidict": {
"hashes": [
- "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1",
- "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35",
- "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928",
- "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969",
- "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e",
- "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78",
- "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1",
- "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136",
- "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8",
- "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2",
- "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e",
- "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4",
- "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5",
- "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd",
- "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab",
- "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20",
- "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3"
- ],
- "version": "==4.7.5"
+ "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a",
+ "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000",
+ "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2",
+ "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507",
+ "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5",
+ "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7",
+ "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d",
+ "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463",
+ "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19",
+ "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3",
+ "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b",
+ "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c",
+ "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87",
+ "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7",
+ "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430",
+ "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255",
+ "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"
+ ],
+ "version": "==4.7.6"
},
"ordered-set": {
"hashes": [
- "sha256:a7bfa858748c73b096e43db14eb23e2bc714a503f990c89fac8fab9b0ee79724"
+ "sha256:a31008c57f9c9776b12eb8841b1f61d1e4d70dfbbe8875ccfa2403c54af3d51b"
],
- "version": "==3.1.1"
+ "version": "==4.0.1"
},
"packaging": {
"hashes": [
- "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3",
- "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"
+ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
+ "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
- "version": "==20.3"
+ "version": "==20.4"
},
"pamqp": {
"hashes": [
@@ -418,10 +487,10 @@
},
"pytz": {
"hashes": [
- "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d",
- "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"
+ "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
+ "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
],
- "version": "==2019.3"
+ "version": "==2020.1"
},
"pyyaml": {
"hashes": [
@@ -440,6 +509,13 @@
"index": "pypi",
"version": "==5.3.1"
},
+ "redis": {
+ "hashes": [
+ "sha256:2ef11f489003f151777c064c5dbc6653dfb9f3eade159bcadc524619fddc2242",
+ "sha256:6d65e84bc58091140081ee9d9c187aab0480097750fac44239307a3bdf0b1251"
+ ],
+ "version": "==3.5.2"
+ },
"requests": {
"hashes": [
"sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
@@ -450,18 +526,18 @@
},
"sentry-sdk": {
"hashes": [
- "sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f",
- "sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950"
+ "sha256:0e5e947d0f7a969314aa23669a94a9712be5a688ff069ff7b9fc36c66adc160c",
+ "sha256:799a8bf76b012e3030a881be00e97bc0b922ce35dde699c6537122b751d80e2c"
],
"index": "pypi",
- "version": "==0.14.3"
+ "version": "==0.14.4"
},
"six": {
"hashes": [
- "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
- "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
+ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
+ "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
- "version": "==1.14.0"
+ "version": "==1.15.0"
},
"snowballstemmer": {
"hashes": [
@@ -470,12 +546,19 @@
],
"version": "==2.0.0"
},
+ "sortedcontainers": {
+ "hashes": [
+ "sha256:974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a",
+ "sha256:d9e96492dd51fae31e60837736b38fe42a187b5404c16606ff7ee7cd582d4c60"
+ ],
+ "version": "==2.1.0"
+ },
"soupsieve": {
"hashes": [
- "sha256:e914534802d7ffd233242b785229d5ba0766a7f487385e3f714446a07bf540ae",
- "sha256:fcd71e08c0aee99aca1b73f45478549ee7e7fc006d51b37bec9e9def7dc22b69"
+ "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55",
+ "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232"
],
- "version": "==2.0"
+ "version": "==2.0.1"
},
"sphinx": {
"hashes": [
@@ -595,10 +678,10 @@
"develop": {
"appdirs": {
"hashes": [
- "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92",
- "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"
+ "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41",
+ "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"
],
- "version": "==1.4.3"
+ "version": "==1.4.4"
},
"attrs": {
"hashes": [
@@ -657,13 +740,6 @@
],
"version": "==0.3.0"
},
- "entrypoints": {
- "hashes": [
- "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
- "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
- ],
- "version": "==0.3"
- },
"filelock": {
"hashes": [
"sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59",
@@ -673,11 +749,11 @@
},
"flake8": {
"hashes": [
- "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb",
- "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca"
+ "sha256:c69ac1668e434d37a2d2880b3ca9aafd54b3a10a3ac1ab101d22f29e29cf8634",
+ "sha256:ccaa799ef9893cebe69fdfefed76865aeaefbb94cb8545617b2298786a4de9a5"
],
"index": "pypi",
- "version": "==3.7.9"
+ "version": "==3.8.2"
},
"flake8-annotations": {
"hashes": [
@@ -743,10 +819,10 @@
},
"identify": {
"hashes": [
- "sha256:2bb8760d97d8df4408f4e805883dad26a2d076f04be92a10a3e43f09c6060742",
- "sha256:faffea0fd8ec86bb146ac538ac350ed0c73908326426d387eded0bcc9d077522"
+ "sha256:0f3c3aac62b51b86fea6ff52fe8ff9e06f57f10411502443809064d23e16f1c2",
+ "sha256:f9ad3d41f01e98eb066b6e05c5b184fd1e925fadec48eb165b4e01c72a1ef3a7"
],
- "version": "==1.4.14"
+ "version": "==1.4.16"
},
"mccabe": {
"hashes": [
@@ -771,18 +847,18 @@
},
"pre-commit": {
"hashes": [
- "sha256:487c675916e6f99d355ec5595ad77b325689d423ef4839db1ed2f02f639c9522",
- "sha256:c0aa11bce04a7b46c5544723aedf4e81a4d5f64ad1205a30a9ea12d5e81969e1"
+ "sha256:5559e09afcac7808933951ffaf4ff9aac524f31efbc3f24d021540b6c579813c",
+ "sha256:703e2e34cbe0eedb0d319eff9f7b83e2022bb5a3ab5289a6a8841441076514d0"
],
"index": "pypi",
- "version": "==2.2.0"
+ "version": "==2.4.0"
},
"pycodestyle": {
"hashes": [
- "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
- "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
+ "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
],
- "version": "==2.5.0"
+ "version": "==2.6.0"
},
"pydocstyle": {
"hashes": [
@@ -793,10 +869,10 @@
},
"pyflakes": {
"hashes": [
- "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
- "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
+ "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
],
- "version": "==2.1.1"
+ "version": "==2.2.0"
},
"pyyaml": {
"hashes": [
@@ -817,10 +893,10 @@
},
"six": {
"hashes": [
- "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
- "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
+ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
+ "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
- "version": "==1.14.0"
+ "version": "==1.15.0"
},
"snowballstemmer": {
"hashes": [
@@ -831,10 +907,10 @@
},
"toml": {
"hashes": [
- "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
- "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
+ "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
+ "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
],
- "version": "==0.10.0"
+ "version": "==0.10.1"
},
"unittest-xml-reporting": {
"hashes": [
@@ -846,10 +922,10 @@
},
"virtualenv": {
"hashes": [
- "sha256:5021396e8f03d0d002a770da90e31e61159684db2859d0ba4850fbea752aa675",
- "sha256:ac53ade75ca189bc97b6c1d9ec0f1a50efe33cbf178ae09452dcd9fd309013c1"
+ "sha256:a116629d4e7f4d03433b8afa27f43deba09d48bc48f5ecefa4f015a178efb6cf",
+ "sha256:a730548b27366c5e6cbdf6f97406d861cccece2e22275e8e1a757aeff5e00c70"
],
- "version": "==20.0.18"
+ "version": "==20.0.21"
}
}
}
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index d56675029..4500cb6e8 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -22,6 +22,7 @@ jobs:
REDDIT_CLIENT_ID: spam
REDDIT_SECRET: ham
WOLFRAM_API_KEY: baz
+ REDIS_PASSWORD: ''
steps:
- task: UsePythonVersion@0
diff --git a/bot/bot.py b/bot/bot.py
index a85a22aa9..313652d11 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -5,7 +5,9 @@ import warnings
from typing import Optional
import aiohttp
+import aioredis
import discord
+import fakeredis.aioredis
from discord.ext import commands
from sentry_sdk import push_scope
@@ -28,6 +30,9 @@ class Bot(commands.Bot):
super().__init__(*args, **kwargs)
self.http_session: Optional[aiohttp.ClientSession] = None
+ self.redis_session: Optional[aioredis.Redis] = None
+ self.redis_ready = asyncio.Event()
+ self.redis_closed = False
self.api_client = api.APIClient(loop=self.loop)
self._connector = None
@@ -44,6 +49,30 @@ class Bot(commands.Bot):
self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot")
+ async def _create_redis_session(self) -> None:
+ """
+ Create the Redis connection pool, and then open the redis event gate.
+
+ If constants.Redis.use_fakeredis is True, we'll set up a fake redis pool instead
+ of attempting to communicate with a real Redis server. This is useful because it
+ means contributors don't necessarily need to get Redis running locally just
+ to run the bot.
+
+ The fakeredis cache won't have persistence across restarts, but that
+ usually won't matter for local bot testing.
+ """
+ if constants.Redis.use_fakeredis:
+ log.info("Using fakeredis instead of communicating with a real Redis server.")
+ self.redis_session = await fakeredis.aioredis.create_redis_pool()
+ else:
+ self.redis_session = await aioredis.create_redis_pool(
+ address=(constants.Redis.host, constants.Redis.port),
+ password=constants.Redis.password,
+ )
+
+ self.redis_closed = False
+ self.redis_ready.set()
+
def add_cog(self, cog: commands.Cog) -> None:
"""Adds a "cog" to the bot and logs the operation."""
super().add_cog(cog)
@@ -78,6 +107,12 @@ class Bot(commands.Bot):
if self.stats._transport:
self.stats._transport.close()
+ if self.redis_session:
+ self.redis_closed = True
+ self.redis_session.close()
+ self.redis_ready.clear()
+ await self.redis_session.wait_closed()
+
async def login(self, *args, **kwargs) -> None:
"""Re-create the connector and set up sessions before logging into Discord."""
self._recreate()
@@ -85,7 +120,7 @@ class Bot(commands.Bot):
await super().login(*args, **kwargs)
def _recreate(self) -> None:
- """Re-create the connector, aiohttp session, and the APIClient."""
+ """Re-create the connector, aiohttp session, the APIClient and the Redis session."""
# Use asyncio for DNS resolution instead of threads so threads aren't spammed.
# Doesn't seem to have any state with regards to being closed, so no need to worry?
self._resolver = aiohttp.AsyncResolver()
@@ -96,6 +131,14 @@ class Bot(commands.Bot):
"The previous connector was not closed; it will remain open and be overwritten"
)
+ if self.redis_session and not self.redis_session.closed:
+ log.warning(
+ "The previous redis pool was not closed; it will remain open and be overwritten"
+ )
+
+ # Create the redis session
+ self.loop.create_task(self._create_redis_session())
+
# Use AF_INET as its socket family to prevent HTTPS related problems both locally
# and in production.
self._connector = aiohttp.TCPConnector(
diff --git a/bot/cogs/antimalware.py b/bot/cogs/antimalware.py
index 66b5073e8..ea257442e 100644
--- a/bot/cogs/antimalware.py
+++ b/bot/cogs/antimalware.py
@@ -1,4 +1,5 @@
import logging
+import typing as t
from os.path import splitext
from discord import Embed, Message, NotFound
@@ -9,6 +10,27 @@ from bot.constants import AntiMalware as AntiMalwareConfig, Channels, STAFF_ROLE
log = logging.getLogger(__name__)
+PY_EMBED_DESCRIPTION = (
+ "It looks like you tried to attach a Python file - "
+ f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}"
+)
+
+TXT_EMBED_DESCRIPTION = (
+ "**Uh-oh!** It looks like your message got zapped by our spam filter. "
+ "We currently don't allow `.txt` attachments, so here are some tips to help you travel safely: \n\n"
+ "• If you attempted to send a message longer than 2000 characters, try shortening your message "
+ "to fit within the character limit or use a pasting service (see below) \n\n"
+ "• If you tried to show someone your code, you can use codeblocks \n(run `!code-blocks` in "
+ "{cmd_channel_mention} for more information) or use a pasting service like: "
+ f"\n\n{URLs.site_schema}{URLs.site_paste}"
+)
+
+DISALLOWED_EMBED_DESCRIPTION = (
+ "It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). "
+ f"We currently allow the following file types: **{', '.join(AntiMalwareConfig.whitelist)}**.\n\n"
+ "Feel free to ask in {meta_channel_mention} if you think this is a mistake."
+)
+
class AntiMalware(Cog):
"""Delete messages which contain attachments with non-whitelisted file extensions."""
@@ -29,34 +51,20 @@ class AntiMalware(Cog):
return
embed = Embed()
- file_extensions = {splitext(attachment.filename.lower())[1] for attachment in message.attachments}
- extensions_blocked = file_extensions - set(AntiMalwareConfig.whitelist)
+ extensions_blocked = self.get_disallowed_extensions(message)
blocked_extensions_str = ', '.join(extensions_blocked)
if ".py" in extensions_blocked:
# Short-circuit on *.py files to provide a pastebin link
- embed.description = (
- "It looks like you tried to attach a Python file - "
- f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}"
- )
+ embed.description = PY_EMBED_DESCRIPTION
elif ".txt" in extensions_blocked:
# Work around Discord AutoConversion of messages longer than 2000 chars to .txt
cmd_channel = self.bot.get_channel(Channels.bot_commands)
- embed.description = (
- "**Uh-oh!** It looks like your message got zapped by our spam filter. "
- "We currently don't allow `.txt` attachments, so here are some tips to help you travel safely: \n\n"
- "• If you attempted to send a message longer than 2000 characters, try shortening your message "
- "to fit within the character limit or use a pasting service (see below) \n\n"
- "• If you tried to show someone your code, you can use codeblocks \n(run `!code-blocks` in "
- f"{cmd_channel.mention} for more information) or use a pasting service like: "
- f"\n\n{URLs.site_schema}{URLs.site_paste}"
- )
+ embed.description = TXT_EMBED_DESCRIPTION.format(cmd_channel_mention=cmd_channel.mention)
elif extensions_blocked:
- whitelisted_types = ', '.join(AntiMalwareConfig.whitelist)
meta_channel = self.bot.get_channel(Channels.meta)
- embed.description = (
- f"It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). "
- f"We currently allow the following file types: **{whitelisted_types}**.\n\n"
- f"Feel free to ask in {meta_channel.mention} if you think this is a mistake."
+ embed.description = DISALLOWED_EMBED_DESCRIPTION.format(
+ blocked_extensions_str=blocked_extensions_str,
+ meta_channel_mention=meta_channel.mention,
)
if embed.description:
@@ -73,6 +81,13 @@ class AntiMalware(Cog):
except NotFound:
log.info(f"Tried to delete message `{message.id}`, but message could not be found.")
+ @classmethod
+ def get_disallowed_extensions(cls, message: Message) -> t.Iterable[str]:
+ """Get an iterable containing all the disallowed extensions of attachments."""
+ file_extensions = {splitext(attachment.filename.lower())[1] for attachment in message.attachments}
+ extensions_blocked = file_extensions - set(AntiMalwareConfig.whitelist)
+ return extensions_blocked
+
def setup(bot: Bot) -> None:
"""Load the AntiMalware cog."""
diff --git a/bot/cogs/antispam.py b/bot/cogs/antispam.py
index d63acbc4a..0bcca578d 100644
--- a/bot/cogs/antispam.py
+++ b/bot/cogs/antispam.py
@@ -94,7 +94,7 @@ class DeletionContext:
await modlog.send_log_message(
icon_url=Icons.filtering,
colour=Colour(Colours.soft_red),
- title=f"Spam detected!",
+ title="Spam detected!",
text=mod_alert_message,
thumbnail=last_message.author.avatar_url_as(static_format="png"),
channel_id=Channels.mod_alerts,
@@ -130,7 +130,7 @@ class AntiSpam(Cog):
body += "\n\n**The cog has been unloaded.**"
await self.mod_log.send_log_message(
- title=f"Error: AntiSpam configuration validation failed!",
+ title="Error: AntiSpam configuration validation failed!",
text=body,
ping_everyone=True,
icon_url=Icons.token_removed,
diff --git a/bot/cogs/bot.py b/bot/cogs/bot.py
index f6aea51c5..a79b37d25 100644
--- a/bot/cogs/bot.py
+++ b/bot/cogs/bot.py
@@ -326,6 +326,8 @@ class BotCog(Cog, name="Bot"):
log.trace("The code consists only of expressions, not sending instructions")
if howto != "":
+ # Increase amount of codeblock correction in stats
+ self.bot.stats.incr("codeblock_corrections")
howto_embed = Embed(description=howto)
bot_message = await msg.channel.send(f"Hey {msg.author.mention}!", embed=howto_embed)
self.codeblock_message_ids[msg.id] = bot_message.id
diff --git a/bot/cogs/defcon.py b/bot/cogs/defcon.py
index 25b0a6ad5..4c0ad5914 100644
--- a/bot/cogs/defcon.py
+++ b/bot/cogs/defcon.py
@@ -81,7 +81,7 @@ class Defcon(Cog):
else:
self.enabled = False
self.days = timedelta(days=0)
- log.info(f"DEFCON disabled")
+ log.info("DEFCON disabled")
await self.update_channel_topic()
diff --git a/bot/cogs/duck_pond.py b/bot/cogs/duck_pond.py
index 1f84a0609..37d1786a2 100644
--- a/bot/cogs/duck_pond.py
+++ b/bot/cogs/duck_pond.py
@@ -117,7 +117,7 @@ class DuckPond(Cog):
avatar_url=message.author.avatar_url
)
except discord.HTTPException:
- log.exception(f"Failed to send an attachment to the webhook")
+ log.exception("Failed to send an attachment to the webhook")
await message.add_reaction("✅")
diff --git a/bot/cogs/error_handler.py b/bot/cogs/error_handler.py
index 23d1eed82..5de961116 100644
--- a/bot/cogs/error_handler.py
+++ b/bot/cogs/error_handler.py
@@ -9,7 +9,7 @@ from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.constants import Channels
from bot.converters import TagNameConverter
-from bot.decorators import InWhitelistCheckFailure
+from bot.utils.checks import InWhitelistCheckFailure
log = logging.getLogger(__name__)
@@ -166,7 +166,7 @@ class ErrorHandler(Cog):
await prepared_help_command
self.bot.stats.incr("errors.missing_required_argument")
elif isinstance(e, errors.TooManyArguments):
- await ctx.send(f"Too many arguments provided.")
+ await ctx.send("Too many arguments provided.")
await prepared_help_command
self.bot.stats.incr("errors.too_many_arguments")
elif isinstance(e, errors.BadArgument):
@@ -206,7 +206,7 @@ class ErrorHandler(Cog):
if isinstance(e, bot_missing_errors):
ctx.bot.stats.incr("errors.bot_permission_error")
await ctx.send(
- f"Sorry, it looks like I don't have the permissions or roles I need to do that."
+ "Sorry, it looks like I don't have the permissions or roles I need to do that."
)
elif isinstance(e, (InWhitelistCheckFailure, errors.NoPrivateMessage)):
ctx.bot.stats.incr("errors.wrong_channel_or_dm_error")
diff --git a/bot/cogs/filtering.py b/bot/cogs/filtering.py
index 6a703f5a1..1d9fddb12 100644
--- a/bot/cogs/filtering.py
+++ b/bot/cogs/filtering.py
@@ -4,7 +4,7 @@ from typing import Optional, Union
import discord.errors
from dateutil.relativedelta import relativedelta
-from discord import Colour, DMChannel, Member, Message, TextChannel
+from discord import Colour, Member, Message, TextChannel
from discord.ext.commands import Cog
from discord.utils import escape_markdown
@@ -161,8 +161,10 @@ class Filtering(Cog):
match = await _filter["function"](msg)
if match:
- # If this is a filter (not a watchlist), we should delete the message.
- if _filter["type"] == "filter":
+ is_private = msg.channel.type is discord.ChannelType.private
+
+ # If this is a filter (not a watchlist) and not in a DM, delete the message.
+ if _filter["type"] == "filter" and not is_private:
try:
# Embeds (can?) trigger both the `on_message` and `on_message_edit`
# event handlers, triggering filtering twice for the same message.
@@ -181,7 +183,7 @@ class Filtering(Cog):
if _filter["user_notification"]:
await self.notify_member(msg.author, _filter["notification_msg"], msg.channel)
- if isinstance(msg.channel, DMChannel):
+ if is_private:
channel_str = "via DM"
else:
channel_str = f"in {msg.channel.mention}"
@@ -212,7 +214,9 @@ class Filtering(Cog):
additional_embeds = None
additional_embeds_msg = None
- if filter_name == "filter_invites":
+ # The function returns True for invalid invites.
+ # They have no data so additional embeds can't be created for them.
+ if filter_name == "filter_invites" and match is not True:
additional_embeds = []
for invite, data in match.items():
embed = discord.Embed(description=(
diff --git a/bot/cogs/help_channels.py b/bot/cogs/help_channels.py
index 1bd1f9d68..70cef339a 100644
--- a/bot/cogs/help_channels.py
+++ b/bot/cogs/help_channels.py
@@ -24,18 +24,8 @@ ASKING_GUIDE_URL = "https://pythondiscord.com/pages/asking-good-questions/"
MAX_CHANNELS_PER_CATEGORY = 50
EXCLUDED_CHANNELS = (constants.Channels.how_to_get_help,)
-AVAILABLE_TOPIC = """
-This channel is available. Feel free to ask a question in order to claim this channel!
-"""
-
-IN_USE_TOPIC = """
-This channel is currently in use. If you'd like to discuss a different problem, please claim a new \
-channel from the Help: Available category.
-"""
-
-DORMANT_TOPIC = """
-This channel is temporarily archived. If you'd like to ask a question, please use one of the \
-channels in the Help: Available category.
+HELP_CHANNEL_TOPIC = """
+This is a Python help channel. You can claim your own help channel in the Python Help: Available category.
"""
AVAILABLE_MSG = f"""
@@ -64,11 +54,6 @@ question to maximize your chance of getting a good answer. If you're not sure ho
through our guide for [asking a good question]({ASKING_GUIDE_URL}).
"""
-AVAILABLE_EMOJI = "✅"
-IN_USE_ANSWERED_EMOJI = "⌛"
-IN_USE_UNANSWERED_EMOJI = "⏳"
-NAME_SEPARATOR = "|"
-
CoroutineFunc = t.Callable[..., t.Coroutine]
@@ -196,7 +181,7 @@ class HelpChannels(Scheduler, commands.Cog):
return None
log.debug(f"Creating a new dormant channel named {name}.")
- return await self.dormant_category.create_text_channel(name)
+ return await self.dormant_category.create_text_channel(name, topic=HELP_CHANNEL_TOPIC)
def create_name_queue(self) -> deque:
"""Return a queue of element names to use for creating new channels."""
@@ -391,7 +376,7 @@ class HelpChannels(Scheduler, commands.Cog):
self.in_use_category = await self.try_get_channel(constants.Categories.help_in_use)
self.dormant_category = await self.try_get_channel(constants.Categories.help_dormant)
except discord.HTTPException:
- log.exception(f"Failed to get a category; cog will be removed")
+ log.exception("Failed to get a category; cog will be removed")
self.bot.remove_cog(self.qualified_name)
async def init_cog(self) -> None:
@@ -438,13 +423,13 @@ class HelpChannels(Scheduler, commands.Cog):
"""Return True if `member` has the 'Help Cooldown' role."""
return any(constants.Roles.help_cooldown == role.id for role in member.roles)
- def is_dormant_message(self, message: t.Optional[discord.Message]) -> bool:
- """Return True if the contents of the `message` match `DORMANT_MSG`."""
+ def match_bot_embed(self, message: t.Optional[discord.Message], description: str) -> bool:
+ """Return `True` if the bot's `message`'s embed description matches `description`."""
if not message or not message.embeds:
return False
embed = message.embeds[0]
- return message.author == self.bot.user and embed.description.strip() == DORMANT_MSG.strip()
+ return message.author == self.bot.user and embed.description.strip() == description.strip()
@staticmethod
def is_in_category(channel: discord.TextChannel, category_id: int) -> bool:
@@ -461,7 +446,11 @@ class HelpChannels(Scheduler, commands.Cog):
"""
log.trace(f"Handling in-use channel #{channel} ({channel.id}).")
- idle_seconds = constants.HelpChannels.idle_minutes * 60
+ if not await self.is_empty(channel):
+ idle_seconds = constants.HelpChannels.idle_minutes * 60
+ else:
+ idle_seconds = constants.HelpChannels.deleted_idle_minutes * 60
+
time_elapsed = await self.get_idle_time(channel)
if time_elapsed is None or time_elapsed >= idle_seconds:
@@ -538,8 +527,6 @@ class HelpChannels(Scheduler, commands.Cog):
await self.move_to_bottom_position(
channel=channel,
category_id=constants.Categories.help_available,
- name=f"{AVAILABLE_EMOJI}{NAME_SEPARATOR}{self.get_clean_channel_name(channel)}",
- topic=AVAILABLE_TOPIC,
)
self.report_stats()
@@ -555,8 +542,6 @@ class HelpChannels(Scheduler, commands.Cog):
await self.move_to_bottom_position(
channel=channel,
category_id=constants.Categories.help_dormant,
- name=self.get_clean_channel_name(channel),
- topic=DORMANT_TOPIC,
)
self.bot.stats.incr(f"help.dormant_calls.{caller}")
@@ -589,8 +574,6 @@ class HelpChannels(Scheduler, commands.Cog):
await self.move_to_bottom_position(
channel=channel,
category_id=constants.Categories.help_in_use,
- name=f"{IN_USE_UNANSWERED_EMOJI}{NAME_SEPARATOR}{self.get_clean_channel_name(channel)}",
- topic=IN_USE_TOPIC,
)
timeout = constants.HelpChannels.idle_minutes * 60
@@ -656,18 +639,16 @@ class HelpChannels(Scheduler, commands.Cog):
# Check if there is an entry in unanswered (does not persist across restarts)
if channel.id in self.unanswered:
- claimant_id = self.help_channel_claimants[channel].id
+ claimant = self.help_channel_claimants.get(channel)
+ if not claimant:
+ # The mapping for this channel was lost, we can't do anything.
+ return
# Check the message did not come from the claimant
- if claimant_id != message.author.id:
+ if claimant.id != message.author.id:
# Mark the channel as answered
self.unanswered[channel.id] = False
- # Change the emoji in the channel name to signify activity
- log.trace(f"#{channel} ({channel.id}) has been answered; changing its emoji")
- name = self.get_clean_channel_name(channel)
- await channel.edit(name=f"{IN_USE_ANSWERED_EMOJI}{NAME_SEPARATOR}{name}")
-
@commands.Cog.listener()
async def on_message(self, message: discord.Message) -> None:
"""Move an available channel to the In Use category and replace it with a dormant one."""
@@ -713,6 +694,32 @@ class HelpChannels(Scheduler, commands.Cog):
# be put in the queue.
await self.move_to_available()
+ @commands.Cog.listener()
+ async def on_message_delete(self, msg: discord.Message) -> None:
+ """
+ Reschedule an in-use channel to become dormant sooner if the channel is empty.
+
+ The new time for the dormant task is configured with `HelpChannels.deleted_idle_minutes`.
+ """
+ if not self.is_in_category(msg.channel, constants.Categories.help_in_use):
+ return
+
+ if not await self.is_empty(msg.channel):
+ return
+
+ log.info(f"Claimant of #{msg.channel} ({msg.author}) deleted message, channel is empty now. Rescheduling task.")
+
+ # Cancel existing dormant task before scheduling new.
+ self.cancel_task(msg.channel.id)
+
+ task = TaskData(constants.HelpChannels.deleted_idle_minutes * 60, self.move_idle_channel(msg.channel))
+ self.schedule_task(msg.channel.id, task)
+
+ async def is_empty(self, channel: discord.TextChannel) -> bool:
+ """Return True if the most recent message in `channel` is the bot's `AVAILABLE_MSG`."""
+ msg = await self.get_last_message(channel)
+ return self.match_bot_embed(msg, AVAILABLE_MSG)
+
async def reset_send_permissions(self) -> None:
"""Reset send permissions in the Available category for claimants."""
log.trace("Resetting send permissions in the Available category.")
@@ -788,7 +795,7 @@ class HelpChannels(Scheduler, commands.Cog):
embed = discord.Embed(description=AVAILABLE_MSG)
msg = await self.get_last_message(channel)
- if self.is_dormant_message(msg):
+ if self.match_bot_embed(msg, DORMANT_MSG):
log.trace(f"Found dormant message {msg.id} in {channel_info}; editing it.")
await msg.edit(embed=embed)
else:
diff --git a/bot/cogs/information.py b/bot/cogs/information.py
index ef2f308ca..f0eb3a1ea 100644
--- a/bot/cogs/information.py
+++ b/bot/cogs/information.py
@@ -12,9 +12,9 @@ from discord.utils import escape_markdown
from bot import constants
from bot.bot import Bot
-from bot.decorators import InWhitelistCheckFailure, in_whitelist, with_role
+from bot.decorators import in_whitelist, with_role
from bot.pagination import LinePaginator
-from bot.utils.checks import cooldown_with_role_bypass, with_role_check
+from bot.utils.checks import InWhitelistCheckFailure, cooldown_with_role_bypass, with_role_check
from bot.utils.time import time_since
log = logging.getLogger(__name__)
diff --git a/bot/cogs/moderation/management.py b/bot/cogs/moderation/management.py
index edfdfd9e2..c39c7f3bc 100644
--- a/bot/cogs/moderation/management.py
+++ b/bot/cogs/moderation/management.py
@@ -12,7 +12,7 @@ from bot.bot import Bot
from bot.converters import Expiry, InfractionSearchQuery, allowed_strings, proxy_user
from bot.pagination import LinePaginator
from bot.utils import time
-from bot.utils.checks import in_channel_check, with_role_check
+from bot.utils.checks import in_whitelist_check, with_role_check
from . import utils
from .infractions import Infractions
from .modlog import ModLog
@@ -49,8 +49,8 @@ class ModManagement(commands.Cog):
async def infraction_edit(
self,
ctx: Context,
- infraction_id: t.Union[int, allowed_strings("l", "last", "recent")],
- duration: t.Union[Expiry, allowed_strings("p", "permanent"), None],
+ infraction_id: t.Union[int, allowed_strings("l", "last", "recent")], # noqa: F821
+ duration: t.Union[Expiry, allowed_strings("p", "permanent"), None], # noqa: F821
*,
reason: str = None
) -> None:
@@ -83,14 +83,14 @@ class ModManagement(commands.Cog):
"actor__id": ctx.author.id,
"ordering": "-inserted_at"
}
- infractions = await self.bot.api_client.get(f"bot/infractions", params=params)
+ infractions = await self.bot.api_client.get("bot/infractions", params=params)
if infractions:
old_infraction = infractions[0]
infraction_id = old_infraction["id"]
else:
await ctx.send(
- f":x: Couldn't find most recent infraction; you have never given an infraction."
+ ":x: Couldn't find most recent infraction; you have never given an infraction."
)
return
else:
@@ -224,7 +224,7 @@ class ModManagement(commands.Cog):
) -> None:
"""Send a paginated embed of infractions for the specified user."""
if not infractions:
- await ctx.send(f":warning: No infractions could be found for that query.")
+ await ctx.send(":warning: No infractions could be found for that query.")
return
lines = tuple(
@@ -283,10 +283,16 @@ class ModManagement(commands.Cog):
# This cannot be static (must have a __func__ attribute).
def cog_check(self, ctx: Context) -> bool:
- """Only allow moderators from moderator channels to invoke the commands in this cog."""
+ """Only allow moderators inside moderator channels to invoke the commands in this cog."""
checks = [
with_role_check(ctx, *constants.MODERATION_ROLES),
- in_channel_check(ctx, *constants.MODERATION_CHANNELS)
+ in_whitelist_check(
+ ctx,
+ channels=constants.MODERATION_CHANNELS,
+ categories=[constants.Categories.modmail],
+ redirect=None,
+ fail_silently=True,
+ )
]
return all(checks)
diff --git a/bot/cogs/moderation/scheduler.py b/bot/cogs/moderation/scheduler.py
index 1c7786df4..f0a3ad1b1 100644
--- a/bot/cogs/moderation/scheduler.py
+++ b/bot/cogs/moderation/scheduler.py
@@ -91,7 +91,7 @@ class InfractionScheduler(Scheduler):
log.trace(f"Applying {infr_type} infraction #{id_} to {user}.")
# Default values for the confirmation message and mod log.
- confirm_msg = f":ok_hand: applied"
+ confirm_msg = ":ok_hand: applied"
# Specifying an expiry for a note or warning makes no sense.
if infr_type in ("note", "warning"):
@@ -138,7 +138,7 @@ class InfractionScheduler(Scheduler):
self.schedule_task(infraction["id"], infraction)
except discord.HTTPException as e:
# Accordingly display that applying the infraction failed.
- confirm_msg = f":x: failed to apply"
+ confirm_msg = ":x: failed to apply"
expiry_msg = ""
log_content = ctx.author.mention
log_title = "failed to apply"
@@ -295,7 +295,7 @@ class InfractionScheduler(Scheduler):
log.warning(f"Failed to pardon {infr_type} infraction #{id_} for {user}.")
else:
- confirm_msg = f":ok_hand: pardoned"
+ confirm_msg = ":ok_hand: pardoned"
log_title = "pardoned"
log.info(f"Pardoned {infr_type} infraction #{id_} for {user}.")
@@ -370,7 +370,7 @@ class InfractionScheduler(Scheduler):
)
except discord.Forbidden:
log.warning(f"Failed to deactivate infraction #{id_} ({type_}): bot lacks permissions.")
- log_text["Failure"] = f"The bot lacks permissions to do this (role hierarchy?)"
+ log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)"
log_content = mod_role.mention
except discord.HTTPException as e:
log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
@@ -419,7 +419,7 @@ class InfractionScheduler(Scheduler):
# Send a log message to the mod log.
if send_log:
- log_title = f"expiration failed" if "Failure" in log_text else "expired"
+ log_title = "expiration failed" if "Failure" in log_text else "expired"
user = self.bot.get_user(user_id)
avatar = user.avatar_url_as(static_format="png") if user else None
diff --git a/bot/cogs/moderation/silence.py b/bot/cogs/moderation/silence.py
index 1ef3967a9..25febfa51 100644
--- a/bot/cogs/moderation/silence.py
+++ b/bot/cogs/moderation/silence.py
@@ -91,7 +91,7 @@ class Silence(commands.Cog):
await ctx.send(f"{Emojis.check_mark} silenced current channel for {duration} minute(s).")
await asyncio.sleep(duration*60)
- log.info(f"Unsilencing channel after set delay.")
+ log.info("Unsilencing channel after set delay.")
await ctx.invoke(self.unsilence)
@commands.command(aliases=("unhush",))
diff --git a/bot/cogs/python_news.py b/bot/cogs/python_news.py
index 57ce61638..d15d0371e 100644
--- a/bot/cogs/python_news.py
+++ b/bot/cogs/python_news.py
@@ -109,6 +109,9 @@ class PythonNews(Cog):
)
payload["data"]["pep"].append(pep_nr)
+ # Increase overall PEP new stat
+ self.bot.stats.incr("python_news.posted.pep")
+
if msg.channel.is_news():
log.trace("Publishing PEP annnouncement because it was in a news channel")
await msg.publish()
@@ -150,6 +153,7 @@ class PythonNews(Cog):
if (
thread_information["thread_id"] in existing_news["data"][maillist]
+ or 'Re: ' in thread_information["subject"]
or new_date.date() < date.today()
):
continue
@@ -168,6 +172,9 @@ class PythonNews(Cog):
)
payload["data"][maillist].append(thread_information["thread_id"])
+ # Increase this specific maillist counter in stats
+ self.bot.stats.incr(f"python_news.posted.{maillist.replace('-', '_')}")
+
if msg.channel.is_news():
log.trace("Publishing mailing list message because it was in a news channel")
await msg.publish()
diff --git a/bot/cogs/snekbox.py b/bot/cogs/snekbox.py
index 6ef659f28..a2a7574d4 100644
--- a/bot/cogs/snekbox.py
+++ b/bot/cogs/snekbox.py
@@ -206,6 +206,12 @@ class Snekbox(Cog):
if paste_link:
msg = f"{msg}\nFull output: {paste_link}"
+ # Collect stats of eval fails + successes
+ if icon == ":x:":
+ self.bot.stats.incr("snekbox.python.fail")
+ else:
+ self.bot.stats.incr("snekbox.python.success")
+
response = await ctx.send(msg)
self.bot.loop.create_task(
wait_for_deletion(response, user_ids=(ctx.author.id,), client=ctx.bot)
@@ -293,6 +299,18 @@ class Snekbox(Cog):
await ctx.send_help(ctx.command)
return
+ if Roles.helpers in (role.id for role in ctx.author.roles):
+ self.bot.stats.incr("snekbox_usages.roles.helpers")
+ else:
+ self.bot.stats.incr("snekbox_usages.roles.developers")
+
+ if ctx.channel.category_id == Categories.help_in_use:
+ self.bot.stats.incr("snekbox_usages.channels.help")
+ elif ctx.channel.id == Channels.bot_commands:
+ self.bot.stats.incr("snekbox_usages.channels.bot_commands")
+ else:
+ self.bot.stats.incr("snekbox_usages.channels.topical")
+
log.info(f"Received code from {ctx.author} for evaluation:\n{code}")
while True:
diff --git a/bot/cogs/stats.py b/bot/cogs/stats.py
index d253db913..4ebb6423c 100644
--- a/bot/cogs/stats.py
+++ b/bot/cogs/stats.py
@@ -2,9 +2,11 @@ import string
from datetime import datetime
from discord import Member, Message, Status
-from discord.ext.commands import Bot, Cog, Context
+from discord.ext.commands import Cog, Context
+from discord.ext.tasks import loop
-from bot.constants import Channels, Guild, Stats as StatConf
+from bot.bot import Bot
+from bot.constants import Categories, Channels, Guild, Stats as StatConf
CHANNEL_NAME_OVERRIDES = {
@@ -23,6 +25,7 @@ class Stats(Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.last_presence_update = None
+ self.update_guild_boost.start()
@Cog.listener()
async def on_message(self, message: Message) -> None:
@@ -33,6 +36,12 @@ class Stats(Cog):
if message.guild.id != Guild.id:
return
+ if message.channel.category.id == Categories.modmail:
+ if message.channel.id != Channels.incidents:
+ # Do not report modmail channels to stats, there are too many
+ # of them for interesting statistics to be drawn out of this.
+ return
+
reformatted_name = message.channel.name.replace('-', '_')
if CHANNEL_NAME_OVERRIDES.get(message.channel.id):
@@ -59,7 +68,7 @@ class Stats(Cog):
if member.guild.id != Guild.id:
return
- self.bot.stats.gauge(f"guild.total_members", len(member.guild.members))
+ self.bot.stats.gauge("guild.total_members", len(member.guild.members))
@Cog.listener()
async def on_member_leave(self, member: Member) -> None:
@@ -67,7 +76,7 @@ class Stats(Cog):
if member.guild.id != Guild.id:
return
- self.bot.stats.gauge(f"guild.total_members", len(member.guild.members))
+ self.bot.stats.gauge("guild.total_members", len(member.guild.members))
@Cog.listener()
async def on_member_update(self, _before: Member, after: Member) -> None:
@@ -101,6 +110,18 @@ class Stats(Cog):
self.bot.stats.gauge("guild.status.do_not_disturb", dnd)
self.bot.stats.gauge("guild.status.offline", offline)
+ @loop(hours=1)
+ async def update_guild_boost(self) -> None:
+ """Post the server boost level and tier every hour."""
+ await self.bot.wait_until_guild_available()
+ g = self.bot.get_guild(Guild.id)
+ self.bot.stats.gauge("boost.amount", g.premium_subscription_count)
+ self.bot.stats.gauge("boost.tier", g.premium_tier)
+
+ def cog_unload(self) -> None:
+ """Stop the boost statistic task on unload of the Cog."""
+ self.update_guild_boost.stop()
+
def setup(bot: Bot) -> None:
"""Load the stats cog."""
diff --git a/bot/cogs/tags.py b/bot/cogs/tags.py
index a813ffff5..6f03a3475 100644
--- a/bot/cogs/tags.py
+++ b/bot/cogs/tags.py
@@ -4,7 +4,7 @@ import time
from pathlib import Path
from typing import Callable, Dict, Iterable, List, Optional
-from discord import Colour, Embed
+from discord import Colour, Embed, Member
from discord.ext.commands import Cog, Context, group
from bot import constants
@@ -35,21 +35,36 @@ class Tags(Cog):
@staticmethod
def get_tags() -> dict:
"""Get all tags."""
- # Save all tags in memory.
cache = {}
- tag_files = Path("bot", "resources", "tags").iterdir()
- for file in tag_files:
- tag_title = file.stem
- tag = {
- "title": tag_title,
- "embed": {
- "description": file.read_text(encoding="utf-8")
+
+ base_path = Path("bot", "resources", "tags")
+ for file in base_path.glob("**/*"):
+ if file.is_file():
+ tag_title = file.stem
+ tag = {
+ "title": tag_title,
+ "embed": {
+ "description": file.read_text(encoding="utf8"),
+ },
+ "restricted_to": "developers",
}
- }
- cache[tag_title] = tag
+
+ # Convert to a list to allow negative indexing.
+ parents = list(file.relative_to(base_path).parents)
+ if len(parents) > 1:
+ # -1 would be '.' hence -2 is used as the index.
+ tag["restricted_to"] = parents[-2].name
+
+ cache[tag_title] = tag
+
return cache
@staticmethod
+ def check_accessibility(user: Member, tag: dict) -> bool:
+ """Check if user can access a tag."""
+ return tag["restricted_to"].lower() in [role.name.lower() for role in user.roles]
+
+ @staticmethod
def _fuzzy_search(search: str, target: str) -> float:
"""A simple scoring algorithm based on how many letters are found / total, with order in mind."""
current, index = 0, 0
@@ -93,7 +108,7 @@ class Tags(Cog):
return self._get_suggestions(tag_name)
return found
- def _get_tags_via_content(self, check: Callable[[Iterable], bool], keywords: str) -> list:
+ def _get_tags_via_content(self, check: Callable[[Iterable], bool], keywords: str, user: Member) -> list:
"""
Search for tags via contents.
@@ -114,7 +129,8 @@ class Tags(Cog):
matching_tags = []
for tag in self._cache.values():
- if check(query in tag['embed']['description'].casefold() for query in keywords_processed):
+ matches = (query in tag['embed']['description'].casefold() for query in keywords_processed)
+ if self.check_accessibility(user, tag) and check(matches):
matching_tags.append(tag)
return matching_tags
@@ -152,7 +168,7 @@ class Tags(Cog):
Only search for tags that has ALL the keywords.
"""
- matching_tags = self._get_tags_via_content(all, keywords)
+ matching_tags = self._get_tags_via_content(all, keywords, ctx.author)
await self._send_matching_tags(ctx, keywords, matching_tags)
@search_tag_content.command(name='any')
@@ -162,7 +178,7 @@ class Tags(Cog):
Search for tags that has ANY of the keywords.
"""
- matching_tags = self._get_tags_via_content(any, keywords or 'any')
+ matching_tags = self._get_tags_via_content(any, keywords or 'any', ctx.author)
await self._send_matching_tags(ctx, keywords, matching_tags)
@tags_group.command(name='get', aliases=('show', 'g'))
@@ -198,7 +214,13 @@ class Tags(Cog):
return
if tag_name is not None:
- founds = self._get_tag(tag_name)
+ temp_founds = self._get_tag(tag_name)
+
+ founds = []
+
+ for found_tag in temp_founds:
+ if self.check_accessibility(ctx.author, found_tag):
+ founds.append(found_tag)
if len(founds) == 1:
tag = founds[0]
@@ -237,7 +259,10 @@ class Tags(Cog):
else:
embed: Embed = Embed(title="**Current tags**")
await LinePaginator.paginate(
- sorted(f"**»** {tag['title']}" for tag in tags),
+ sorted(
+ f"**»** {tag['title']}" for tag in tags
+ if self.check_accessibility(ctx.author, tag)
+ ),
ctx,
embed,
footer_text=FOOTER_TEXT,
diff --git a/bot/cogs/utils.py b/bot/cogs/utils.py
index 6b59d37c8..73b4a1c0a 100644
--- a/bot/cogs/utils.py
+++ b/bot/cogs/utils.py
@@ -253,8 +253,8 @@ class Utils(Cog):
async def send_pep_zero(self, ctx: Context) -> None:
"""Send information about PEP 0."""
pep_embed = Embed(
- title=f"**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
- description=f"[Link](https://www.python.org/dev/peps/)"
+ title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
+ description="[Link](https://www.python.org/dev/peps/)"
)
pep_embed.set_thumbnail(url=ICON_URL)
pep_embed.add_field(name="Status", value="Active")
diff --git a/bot/cogs/verification.py b/bot/cogs/verification.py
index 77e8b5706..99be3cdaa 100644
--- a/bot/cogs/verification.py
+++ b/bot/cogs/verification.py
@@ -9,8 +9,8 @@ from discord.ext.commands import Cog, Context, command
from bot import constants
from bot.bot import Bot
from bot.cogs.moderation import ModLog
-from bot.decorators import InWhitelistCheckFailure, in_whitelist, without_role
-from bot.utils.checks import without_role_check
+from bot.decorators import in_whitelist, without_role
+from bot.utils.checks import InWhitelistCheckFailure, without_role_check
log = logging.getLogger(__name__)
diff --git a/bot/cogs/watchchannels/talentpool.py b/bot/cogs/watchchannels/talentpool.py
index 896a7b404..14547105f 100644
--- a/bot/cogs/watchchannels/talentpool.py
+++ b/bot/cogs/watchchannels/talentpool.py
@@ -61,7 +61,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
return
if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles):
- await ctx.send(f":x: Nominating staff members, eh? Here's a cookie :cookie:")
+ await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:")
return
if not await self.fetch_user_cache():
diff --git a/bot/cogs/watchchannels/watchchannel.py b/bot/cogs/watchchannels/watchchannel.py
index ac1aa38ee..436778c46 100644
--- a/bot/cogs/watchchannels/watchchannel.py
+++ b/bot/cogs/watchchannels/watchchannel.py
@@ -82,7 +82,7 @@ class WatchChannel(metaclass=CogABCMeta):
exc = self._consume_task.exception()
if exc:
self.log.exception(
- f"The message queue consume task has failed with:",
+ "The message queue consume task has failed with:",
exc_info=exc
)
return False
@@ -146,7 +146,7 @@ class WatchChannel(metaclass=CogABCMeta):
try:
data = await self.bot.api_client.get(self.api_endpoint, params=self.api_default_params)
except ResponseCodeError as err:
- self.log.exception(f"Failed to fetch the watched users from the API", exc_info=err)
+ self.log.exception("Failed to fetch the watched users from the API", exc_info=err)
return False
self.watched_users = defaultdict(dict)
@@ -173,7 +173,7 @@ class WatchChannel(metaclass=CogABCMeta):
self.log.trace(f"Sleeping {BigBrotherConfig.log_delay} seconds before consuming message queue")
await asyncio.sleep(BigBrotherConfig.log_delay)
- self.log.trace(f"Started consuming the message queue")
+ self.log.trace("Started consuming the message queue")
# If the previous consumption Task failed, first consume the existing comsumption_queue
if not self.consumption_queue:
@@ -208,7 +208,7 @@ class WatchChannel(metaclass=CogABCMeta):
await self.webhook.send(content=content, username=username, avatar_url=avatar_url, embed=embed)
except discord.HTTPException as exc:
self.log.exception(
- f"Failed to send a message to the webhook",
+ "Failed to send a message to the webhook",
exc_info=exc
)
@@ -254,7 +254,7 @@ class WatchChannel(metaclass=CogABCMeta):
)
except discord.HTTPException as exc:
self.log.exception(
- f"Failed to send an attachment to the webhook",
+ "Failed to send an attachment to the webhook",
exc_info=exc
)
@@ -327,13 +327,13 @@ class WatchChannel(metaclass=CogABCMeta):
def cog_unload(self) -> None:
"""Takes care of unloading the cog and canceling the consumption task."""
- self.log.trace(f"Unloading the cog")
+ self.log.trace("Unloading the cog")
if self._consume_task and not self._consume_task.done():
self._consume_task.cancel()
try:
self._consume_task.result()
except asyncio.CancelledError as e:
self.log.exception(
- f"The consume task was canceled. Messages may be lost.",
+ "The consume task was canceled. Messages may be lost.",
exc_info=e
)
diff --git a/bot/constants.py b/bot/constants.py
index fd280e9de..b31a9c99e 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -15,7 +15,7 @@ import os
from collections.abc import Mapping
from enum import Enum
from pathlib import Path
-from typing import Dict, List
+from typing import Dict, List, Optional
import yaml
@@ -198,7 +198,18 @@ class Bot(metaclass=YAMLGetter):
prefix: str
token: str
- sentry_dsn: str
+ sentry_dsn: Optional[str]
+
+
+class Redis(metaclass=YAMLGetter):
+ section = "bot"
+ subsection = "redis"
+
+ host: str
+ port: int
+ password: Optional[str]
+ use_fakeredis: bool # If this is True, Bot will use fakeredis.aioredis
+
class Filter(metaclass=YAMLGetter):
section = "filter"
@@ -365,6 +376,7 @@ class Categories(metaclass=YAMLGetter):
help_available: int
help_in_use: int
help_dormant: int
+ modmail: int
class Channels(metaclass=YAMLGetter):
@@ -384,6 +396,7 @@ class Channels(metaclass=YAMLGetter):
esoteric: int
helpers: int
how_to_get_help: int
+ incidents: int
message_log: int
meta: int
mod_alerts: int
@@ -448,7 +461,7 @@ class Guild(metaclass=YAMLGetter):
class Keys(metaclass=YAMLGetter):
section = "keys"
- site_api: str
+ site_api: Optional[str]
class URLs(metaclass=YAMLGetter):
@@ -491,8 +504,8 @@ class Reddit(metaclass=YAMLGetter):
section = "reddit"
subreddits: list
- client_id: str
- secret: str
+ client_id: Optional[str]
+ secret: Optional[str]
class Wolfram(metaclass=YAMLGetter):
@@ -500,7 +513,7 @@ class Wolfram(metaclass=YAMLGetter):
user_limit_day: int
guild_limit_day: int
- key: str
+ key: Optional[str]
class AntiSpam(metaclass=YAMLGetter):
@@ -541,6 +554,7 @@ class HelpChannels(metaclass=YAMLGetter):
claim_minutes: int
cmd_whitelist: List[int]
idle_minutes: int
+ deleted_idle_minutes: int
max_available: int
max_total_channels: int
name_prefix: str
@@ -609,13 +623,10 @@ PROJECT_ROOT = os.path.abspath(os.path.join(BOT_DIR, os.pardir))
MODERATION_ROLES = Guild.moderation_roles
STAFF_ROLES = Guild.staff_roles
-# Roles combinations
+# Channel combinations
STAFF_CHANNELS = Guild.staff_channels
-
-# Default Channel combinations
MODERATION_CHANNELS = Guild.moderation_channels
-
# Bot replies
NEGATIVE_REPLIES = [
"Noooooo!!",
diff --git a/bot/decorators.py b/bot/decorators.py
index 306f0830c..500197c89 100644
--- a/bot/decorators.py
+++ b/bot/decorators.py
@@ -9,37 +9,21 @@ from weakref import WeakValueDictionary
from discord import Colour, Embed, Member
from discord.errors import NotFound
from discord.ext import commands
-from discord.ext.commands import CheckFailure, Cog, Context
+from discord.ext.commands import Cog, Context
from bot.constants import Channels, ERROR_REPLIES, RedirectOutput
-from bot.utils.checks import with_role_check, without_role_check
+from bot.utils.checks import in_whitelist_check, with_role_check, without_role_check
log = logging.getLogger(__name__)
-class InWhitelistCheckFailure(CheckFailure):
- """Raised when the `in_whitelist` check fails."""
-
- def __init__(self, redirect_channel: Optional[int]) -> None:
- self.redirect_channel = redirect_channel
-
- if redirect_channel:
- redirect_message = f" here. Please use the <#{redirect_channel}> channel instead"
- else:
- redirect_message = ""
-
- error_message = f"You are not allowed to use that command{redirect_message}."
-
- super().__init__(error_message)
-
-
def in_whitelist(
*,
channels: Container[int] = (),
categories: Container[int] = (),
roles: Container[int] = (),
redirect: Optional[int] = Channels.bot_commands,
-
+ fail_silently: bool = False,
) -> Callable:
"""
Check if a command was issued in a whitelisted context.
@@ -54,36 +38,9 @@ def in_whitelist(
redirected to the `redirect` channel that was passed (default: #bot-commands) or simply
told that they're not allowed to use this particular command (if `None` was passed).
"""
- if redirect and redirect not in channels:
- # It does not make sense for the channel whitelist to not contain the redirection
- # channel (if applicable). That's why we add the redirection channel to the `channels`
- # container if it's not already in it. As we allow any container type to be passed,
- # we first create a tuple in order to safely add the redirection channel.
- #
- # Note: It's possible for the redirect channel to be in a whitelisted category, but
- # there's no easy way to check that and as a channel can easily be moved in and out of
- # categories, it's probably not wise to rely on its category in any case.
- channels = tuple(channels) + (redirect,)
-
def predicate(ctx: Context) -> bool:
- """Check if a command was issued in a whitelisted context."""
- if channels and ctx.channel.id in channels:
- log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they are in a whitelisted channel.")
- return True
-
- # Only check the category id if we have a category whitelist and the channel has a `category_id`
- if categories and hasattr(ctx.channel, "category_id") and ctx.channel.category_id in categories:
- log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they are in a whitelisted category.")
- return True
-
- # Only check the roles whitelist if we have one and ensure the author's roles attribute returns
- # an iterable to prevent breakage in DM channels (for if we ever decide to enable commands there).
- if roles and any(r.id in roles for r in getattr(ctx.author, "roles", ())):
- log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they have a whitelisted role.")
- return True
-
- log.trace(f"{ctx.author} may not use the `{ctx.command.name}` command within this context.")
- raise InWhitelistCheckFailure(redirect)
+ """Check if command was issued in a whitelisted context."""
+ return in_whitelist_check(ctx, channels, categories, roles, redirect, fail_silently)
return commands.check(predicate)
@@ -121,7 +78,7 @@ def locked() -> Callable:
embed = Embed()
embed.colour = Colour.red()
- log.debug(f"User tried to invoke a locked command.")
+ log.debug("User tried to invoke a locked command.")
embed.description = (
"You're already using this command. Please wait until it is done before you use it again."
)
diff --git a/bot/pagination.py b/bot/pagination.py
index b0c4b70e2..2aa3590ba 100644
--- a/bot/pagination.py
+++ b/bot/pagination.py
@@ -147,7 +147,7 @@ class LinePaginator(Paginator):
if not lines:
if exception_on_empty_embed:
- log.exception(f"Pagination asked for empty lines iterable")
+ log.exception("Pagination asked for empty lines iterable")
raise EmptyPaginatorEmbed("No lines to paginate")
log.debug("No lines to add to paginator, adding '(nothing to display)' message")
@@ -357,7 +357,7 @@ class ImagePaginator(Paginator):
if not pages:
if exception_on_empty_embed:
- log.exception(f"Pagination asked for empty image list")
+ log.exception("Pagination asked for empty image list")
raise EmptyPaginatorEmbed("No images to paginate")
log.debug("No images to add to paginator, adding '(no images to display)' message")
diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py
index 9b32e515d..c5a12d5e3 100644
--- a/bot/utils/__init__.py
+++ b/bot/utils/__init__.py
@@ -2,6 +2,10 @@ from abc import ABCMeta
from discord.ext.commands import CogMeta
+from bot.utils.redis_cache import RedisCache
+
+__all__ = ['RedisCache', 'CogABCMeta']
+
class CogABCMeta(CogMeta, ABCMeta):
"""Metaclass for ABCs meant to be implemented as Cogs."""
diff --git a/bot/utils/checks.py b/bot/utils/checks.py
index db56c347c..f0ef36302 100644
--- a/bot/utils/checks.py
+++ b/bot/utils/checks.py
@@ -1,12 +1,94 @@
import datetime
import logging
-from typing import Callable, Iterable
+from typing import Callable, Container, Iterable, Optional
-from discord.ext.commands import BucketType, Cog, Command, CommandOnCooldown, Context, Cooldown, CooldownMapping
+from discord.ext.commands import (
+ BucketType,
+ CheckFailure,
+ Cog,
+ Command,
+ CommandOnCooldown,
+ Context,
+ Cooldown,
+ CooldownMapping,
+)
+
+from bot import constants
log = logging.getLogger(__name__)
+class InWhitelistCheckFailure(CheckFailure):
+ """Raised when the `in_whitelist` check fails."""
+
+ def __init__(self, redirect_channel: Optional[int]) -> None:
+ self.redirect_channel = redirect_channel
+
+ if redirect_channel:
+ redirect_message = f" here. Please use the <#{redirect_channel}> channel instead"
+ else:
+ redirect_message = ""
+
+ error_message = f"You are not allowed to use that command{redirect_message}."
+
+ super().__init__(error_message)
+
+
+def in_whitelist_check(
+ ctx: Context,
+ channels: Container[int] = (),
+ categories: Container[int] = (),
+ roles: Container[int] = (),
+ redirect: Optional[int] = constants.Channels.bot_commands,
+ fail_silently: bool = False,
+) -> bool:
+ """
+ Check if a command was issued in a whitelisted context.
+
+ The whitelists that can be provided are:
+
+ - `channels`: a container with channel ids for whitelisted channels
+ - `categories`: a container with category ids for whitelisted categories
+ - `roles`: a container with with role ids for whitelisted roles
+
+ If the command was invoked in a context that was not whitelisted, the member is either
+ redirected to the `redirect` channel that was passed (default: #bot-commands) or simply
+ told that they're not allowed to use this particular command (if `None` was passed).
+ """
+ if redirect and redirect not in channels:
+ # It does not make sense for the channel whitelist to not contain the redirection
+ # channel (if applicable). That's why we add the redirection channel to the `channels`
+ # container if it's not already in it. As we allow any container type to be passed,
+ # we first create a tuple in order to safely add the redirection channel.
+ #
+ # Note: It's possible for the redirect channel to be in a whitelisted category, but
+ # there's no easy way to check that and as a channel can easily be moved in and out of
+ # categories, it's probably not wise to rely on its category in any case.
+ channels = tuple(channels) + (redirect,)
+
+ if channels and ctx.channel.id in channels:
+ log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they are in a whitelisted channel.")
+ return True
+
+ # Only check the category id if we have a category whitelist and the channel has a `category_id`
+ if categories and hasattr(ctx.channel, "category_id") and ctx.channel.category_id in categories:
+ log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they are in a whitelisted category.")
+ return True
+
+ # Only check the roles whitelist if we have one and ensure the author's roles attribute returns
+ # an iterable to prevent breakage in DM channels (for if we ever decide to enable commands there).
+ if roles and any(r.id in roles for r in getattr(ctx.author, "roles", ())):
+ log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they have a whitelisted role.")
+ return True
+
+ log.trace(f"{ctx.author} may not use the `{ctx.command.name}` command within this context.")
+
+ # Some commands are secret, and should produce no feedback at all.
+ if not fail_silently:
+ raise InWhitelistCheckFailure(redirect)
+ return False
+
+
def with_role_check(ctx: Context, *role_ids: int) -> bool:
"""Returns True if the user has any one of the roles in role_ids."""
if not ctx.guild: # Return False in a DM
@@ -38,14 +120,6 @@ def without_role_check(ctx: Context, *role_ids: int) -> bool:
return check
-def in_channel_check(ctx: Context, *channel_ids: int) -> bool:
- """Checks if the command was executed inside the list of specified channels."""
- check = ctx.channel.id in channel_ids
- log.trace(f"{ctx.author} tried to call the '{ctx.command.name}' command. "
- f"The result of the in_channel check was {check}.")
- return check
-
-
def cooldown_with_role_bypass(rate: int, per: float, type: BucketType = BucketType.default, *,
bypass_roles: Iterable[int]) -> Callable:
"""
diff --git a/bot/utils/messages.py b/bot/utils/messages.py
index e969ee590..de8e186f3 100644
--- a/bot/utils/messages.py
+++ b/bot/utils/messages.py
@@ -100,7 +100,7 @@ async def send_attachments(
log.warning(f"{failure_msg} with status {e.status}.")
if link_large and large:
- desc = f"\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large)
+ desc = "\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large)
embed = Embed(description=desc)
embed.set_footer(text="Attachments exceed upload size limit.")
diff --git a/bot/utils/redis_cache.py b/bot/utils/redis_cache.py
new file mode 100644
index 000000000..de80cee84
--- /dev/null
+++ b/bot/utils/redis_cache.py
@@ -0,0 +1,409 @@
+from __future__ import annotations
+
+import asyncio
+import logging
+from functools import partialmethod
+from typing import Any, Dict, ItemsView, Optional, Tuple, Union
+
+from bot.bot import Bot
+
+log = logging.getLogger(__name__)
+
+# Type aliases
+RedisKeyType = Union[str, int]
+RedisValueType = Union[str, int, float]
+RedisKeyOrValue = Union[RedisKeyType, RedisValueType]
+
+# Prefix tuples
+_PrefixTuple = Tuple[Tuple[str, Any], ...]
+_VALUE_PREFIXES = (
+ ("f|", float),
+ ("i|", int),
+ ("s|", str),
+)
+_KEY_PREFIXES = (
+ ("i|", int),
+ ("s|", str),
+)
+
+
+class NoBotInstanceError(RuntimeError):
+ """Raised when RedisCache is created without an available bot instance on the owner class."""
+
+
+class NoNamespaceError(RuntimeError):
+ """Raised when RedisCache has no namespace, for example if it is not assigned to a class attribute."""
+
+
+class NoParentInstanceError(RuntimeError):
+ """Raised when the parent instance is available, for example if called by accessing the parent class directly."""
+
+
+class RedisCache:
+ """
+ A simplified interface for a Redis connection.
+
+ We implement several convenient methods that are fairly similar to have a dict
+ behaves, and should be familiar to Python users. The biggest difference is that
+ all the public methods in this class are coroutines, and must be awaited.
+
+ Because of limitations in Redis, this cache will only accept strings, integers and
+ floats both for keys and values.
+
+ Please note that this class MUST be created as a class attribute, and that that class
+ must also contain an attribute with an instance of our Bot. See `__get__` and `__set_name__`
+ for more information about how this works.
+
+ Simple example for how to use this:
+
+ class SomeCog(Cog):
+ # To initialize a valid RedisCache, just add it as a class attribute here.
+ # Do not add it to the __init__ method or anywhere else, it MUST be a class
+ # attribute. Do not pass any parameters.
+ cache = RedisCache()
+
+ async def my_method(self):
+
+ # Now we're ready to use the RedisCache.
+ # One thing to note here is that this will not work unless
+ # we access self.cache through an _instance_ of this class.
+ #
+ # For example, attempting to use SomeCog.cache will _not_ work,
+ # you _must_ instantiate the class first and use that instance.
+ #
+ # Now we can store some stuff in the cache just by doing this.
+ # This data will persist through restarts!
+ await self.cache.set("key", "value")
+
+ # To get the data, simply do this.
+ value = await self.cache.get("key")
+
+ # Other methods work more or less like a dictionary.
+ # Checking if something is in the cache
+ await self.cache.contains("key")
+
+ # iterating the cache
+ async for key, value in self.cache.items():
+ print(value)
+
+ # We can even iterate in a comprehension!
+ consumed = [value async for key, value in self.cache.items()]
+ """
+
+ _namespaces = []
+
+ def __init__(self) -> None:
+ """Initialize the RedisCache."""
+ self._namespace = None
+ self.bot = None
+ self._increment_lock = None
+
+ def _set_namespace(self, namespace: str) -> None:
+ """Try to set the namespace, but do not permit collisions."""
+ # We need a unique namespace, to prevent collisions. This loop
+ # will try appending underscores to the end of the namespace until
+ # it finds one that is unique.
+ #
+ # For example, if `john` and `john_` are both taken, the namespace will
+ # be `john__` at the end of this loop.
+ while namespace in self._namespaces:
+ namespace += "_"
+
+ log.trace(f"RedisCache setting namespace to {self._namespace}")
+ self._namespaces.append(namespace)
+ self._namespace = namespace
+
+ @staticmethod
+ def _to_typestring(key_or_value: RedisKeyOrValue, prefixes: _PrefixTuple) -> str:
+ """Turn a valid Redis type into a typestring."""
+ for prefix, _type in prefixes:
+ if isinstance(key_or_value, _type):
+ return f"{prefix}{key_or_value}"
+ raise TypeError(f"RedisCache._to_typestring only supports the following: {prefixes}.")
+
+ @staticmethod
+ def _from_typestring(key_or_value: Union[bytes, str], prefixes: _PrefixTuple) -> RedisKeyOrValue:
+ """Deserialize a typestring into a valid Redis type."""
+ # Stuff that comes out of Redis will be bytestrings, so let's decode those.
+ if isinstance(key_or_value, bytes):
+ key_or_value = key_or_value.decode('utf-8')
+
+ # Now we convert our unicode string back into the type it originally was.
+ for prefix, _type in prefixes:
+ if key_or_value.startswith(prefix):
+ return _type(key_or_value[len(prefix):])
+ raise TypeError(f"RedisCache._from_typestring only supports the following: {prefixes}.")
+
+ # Add some nice partials to call our generic typestring converters.
+ # These are basically methods that will fill in some of the parameters for you, so that
+ # any call to _key_to_typestring will be like calling _to_typestring with the two parameters
+ # at `prefixes` and `types_string` pre-filled.
+ #
+ # See https://docs.python.org/3/library/functools.html#functools.partialmethod
+ _key_to_typestring = partialmethod(_to_typestring, prefixes=_KEY_PREFIXES)
+ _value_to_typestring = partialmethod(_to_typestring, prefixes=_VALUE_PREFIXES)
+ _key_from_typestring = partialmethod(_from_typestring, prefixes=_KEY_PREFIXES)
+ _value_from_typestring = partialmethod(_from_typestring, prefixes=_VALUE_PREFIXES)
+
+ def _dict_from_typestring(self, dictionary: Dict) -> Dict:
+ """Turns all contents of a dict into valid Redis types."""
+ return {self._key_from_typestring(key): self._value_from_typestring(value) for key, value in dictionary.items()}
+
+ def _dict_to_typestring(self, dictionary: Dict) -> Dict:
+ """Turns all contents of a dict into typestrings."""
+ return {self._key_to_typestring(key): self._value_to_typestring(value) for key, value in dictionary.items()}
+
+ async def _validate_cache(self) -> None:
+ """Validate that the RedisCache is ready to be used."""
+ if self._namespace is None:
+ error_message = (
+ "Critical error: RedisCache has no namespace. "
+ "This object must be initialized as a class attribute."
+ )
+ log.error(error_message)
+ raise NoNamespaceError(error_message)
+
+ if self.bot is None:
+ error_message = (
+ "Critical error: RedisCache has no `Bot` instance. "
+ "This happens when the class RedisCache was created in doesn't "
+ "have a Bot instance. Please make sure that you're instantiating "
+ "the RedisCache inside a class that has a Bot instance attribute."
+ )
+ log.error(error_message)
+ raise NoBotInstanceError(error_message)
+
+ if not self.bot.redis_closed:
+ await self.bot.redis_ready.wait()
+
+ def __set_name__(self, owner: Any, attribute_name: str) -> None:
+ """
+ Set the namespace to Class.attribute_name.
+
+ Called automatically when this class is constructed inside a class as an attribute.
+
+ This class MUST be created as a class attribute in a class, otherwise it will raise
+ exceptions whenever a method is used. This is because it uses this method to create
+ a namespace like `MyCog.my_class_attribute` which is used as a hash name when we store
+ stuff in Redis, to prevent collisions.
+ """
+ self._set_namespace(f"{owner.__name__}.{attribute_name}")
+
+ def __get__(self, instance: RedisCache, owner: Any) -> RedisCache:
+ """
+ This is called if the RedisCache is a class attribute, and is accessed.
+
+ The class this object is instantiated in must contain an attribute with an
+ instance of Bot. This is because Bot contains our redis_session, which is
+ the mechanism by which we will communicate with the Redis server.
+
+ Any attempt to use RedisCache in a class that does not have a Bot instance
+ will fail. It is mostly intended to be used inside of a Cog, although theoretically
+ it should work in any class that has a Bot instance.
+ """
+ if self.bot:
+ return self
+
+ if self._namespace is None:
+ error_message = "RedisCache must be a class attribute."
+ log.error(error_message)
+ raise NoNamespaceError(error_message)
+
+ if instance is None:
+ error_message = (
+ "You must access the RedisCache instance through the cog instance "
+ "before accessing it using the cog's class object."
+ )
+ log.error(error_message)
+ raise NoParentInstanceError(error_message)
+
+ for attribute in vars(instance).values():
+ if isinstance(attribute, Bot):
+ self.bot = attribute
+ self._redis = self.bot.redis_session
+ return self
+ else:
+ error_message = (
+ "Critical error: RedisCache has no `Bot` instance. "
+ "This happens when the class RedisCache was created in doesn't "
+ "have a Bot instance. Please make sure that you're instantiating "
+ "the RedisCache inside a class that has a Bot instance attribute."
+ )
+ log.error(error_message)
+ raise NoBotInstanceError(error_message)
+
+ def __repr__(self) -> str:
+ """Return a beautiful representation of this object instance."""
+ return f"RedisCache(namespace={self._namespace!r})"
+
+ async def set(self, key: RedisKeyType, value: RedisValueType) -> None:
+ """Store an item in the Redis cache."""
+ await self._validate_cache()
+
+ # Convert to a typestring and then set it
+ key = self._key_to_typestring(key)
+ value = self._value_to_typestring(value)
+
+ log.trace(f"Setting {key} to {value}.")
+ await self._redis.hset(self._namespace, key, value)
+
+ async def get(self, key: RedisKeyType, default: Optional[RedisValueType] = None) -> Optional[RedisValueType]:
+ """Get an item from the Redis cache."""
+ await self._validate_cache()
+ key = self._key_to_typestring(key)
+
+ log.trace(f"Attempting to retrieve {key}.")
+ value = await self._redis.hget(self._namespace, key)
+
+ if value is None:
+ log.trace(f"Value not found, returning default value {default}")
+ return default
+ else:
+ value = self._value_from_typestring(value)
+ log.trace(f"Value found, returning value {value}")
+ return value
+
+ async def delete(self, key: RedisKeyType) -> None:
+ """
+ Delete an item from the Redis cache.
+
+ If we try to delete a key that does not exist, it will simply be ignored.
+
+ See https://redis.io/commands/hdel for more info on how this works.
+ """
+ await self._validate_cache()
+ key = self._key_to_typestring(key)
+
+ log.trace(f"Attempting to delete {key}.")
+ return await self._redis.hdel(self._namespace, key)
+
+ async def contains(self, key: RedisKeyType) -> bool:
+ """
+ Check if a key exists in the Redis cache.
+
+ Return True if the key exists, otherwise False.
+ """
+ await self._validate_cache()
+ key = self._key_to_typestring(key)
+ exists = await self._redis.hexists(self._namespace, key)
+
+ log.trace(f"Testing if {key} exists in the RedisCache - Result is {exists}")
+ return exists
+
+ async def items(self) -> ItemsView:
+ """
+ Fetch all the key/value pairs in the cache.
+
+ Returns a normal ItemsView, like you would get from dict.items().
+
+ Keep in mind that these items are just a _copy_ of the data in the
+ RedisCache - any changes you make to them will not be reflected
+ into the RedisCache itself. If you want to change these, you need
+ to make a .set call.
+
+ Example:
+ items = await my_cache.items()
+ for key, value in items:
+ # Iterate like a normal dictionary
+ """
+ await self._validate_cache()
+ items = self._dict_from_typestring(
+ await self._redis.hgetall(self._namespace)
+ ).items()
+
+ log.trace(f"Retrieving all key/value pairs from cache, total of {len(items)} items.")
+ return items
+
+ async def length(self) -> int:
+ """Return the number of items in the Redis cache."""
+ await self._validate_cache()
+ number_of_items = await self._redis.hlen(self._namespace)
+ log.trace(f"Returning length. Result is {number_of_items}.")
+ return number_of_items
+
+ async def to_dict(self) -> Dict:
+ """Convert to dict and return."""
+ return {key: value for key, value in await self.items()}
+
+ async def clear(self) -> None:
+ """Deletes the entire hash from the Redis cache."""
+ await self._validate_cache()
+ log.trace("Clearing the cache of all key/value pairs.")
+ await self._redis.delete(self._namespace)
+
+ async def pop(self, key: RedisKeyType, default: Optional[RedisValueType] = None) -> RedisValueType:
+ """Get the item, remove it from the cache, and provide a default if not found."""
+ log.trace(f"Attempting to pop {key}.")
+ value = await self.get(key, default)
+
+ log.trace(
+ f"Attempting to delete item with key '{key}' from the cache. "
+ "If this key doesn't exist, nothing will happen."
+ )
+ await self.delete(key)
+
+ return value
+
+ async def update(self, items: Dict[RedisKeyType, RedisValueType]) -> None:
+ """
+ Update the Redis cache with multiple values.
+
+ This works exactly like dict.update from a normal dictionary. You pass
+ a dictionary with one or more key/value pairs into this method. If the keys
+ do not exist in the RedisCache, they are created. If they do exist, the values
+ are updated with the new ones from `items`.
+
+ Please note that keys and the values in the `items` dictionary
+ must consist of valid RedisKeyTypes and RedisValueTypes.
+ """
+ await self._validate_cache()
+ log.trace(f"Updating the cache with the following items:\n{items}")
+ await self._redis.hmset_dict(self._namespace, self._dict_to_typestring(items))
+
+ async def increment(self, key: RedisKeyType, amount: Optional[int, float] = 1) -> None:
+ """
+ Increment the value by `amount`.
+
+ This works for both floats and ints, but will raise a TypeError
+ if you try to do it for any other type of value.
+
+ This also supports negative amounts, although it would provide better
+ readability to use .decrement() for that.
+ """
+ log.trace(f"Attempting to increment/decrement the value with the key {key} by {amount}.")
+
+ # We initialize the lock here, because we need to ensure we get it
+ # running on the same loop as the calling coroutine.
+ #
+ # If we initialized the lock in the __init__, the loop that the coroutine this method
+ # would be called from might not exist yet, and so the lock would be on a different
+ # loop, which would raise RuntimeErrors.
+ if self._increment_lock is None:
+ self._increment_lock = asyncio.Lock()
+
+ # Since this has several API calls, we need a lock to prevent race conditions
+ async with self._increment_lock:
+ value = await self.get(key)
+
+ # Can't increment a non-existing value
+ if value is None:
+ error_message = "The provided key does not exist!"
+ log.error(error_message)
+ raise KeyError(error_message)
+
+ # If it does exist, and it's an int or a float, increment and set it.
+ if isinstance(value, int) or isinstance(value, float):
+ value += amount
+ await self.set(key, value)
+ else:
+ error_message = "You may only increment or decrement values that are integers or floats."
+ log.error(error_message)
+ raise TypeError(error_message)
+
+ async def decrement(self, key: RedisKeyType, amount: Optional[int, float] = 1) -> None:
+ """
+ Decrement the value by `amount`.
+
+ Basically just does the opposite of .increment.
+ """
+ await self.increment(key, -amount)
diff --git a/config-default.yml b/config-default.yml
index c0b5b062f..2c85f5ef3 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -3,6 +3,12 @@ bot:
token: !ENV "BOT_TOKEN"
sentry_dsn: !ENV "BOT_SENTRY_DSN"
+ redis:
+ host: "redis"
+ port: 6379
+ password: !ENV "REDIS_PASSWORD"
+ use_fakeredis: false
+
stats:
statsd_host: "graphite"
presence_update_timeout: 300
@@ -118,6 +124,7 @@ guild:
help_available: 691405807388196926
help_in_use: 696958401460043776
help_dormant: 691405908919451718
+ modmail: 714494672835444826
channels:
announcements: 354619224620138496
@@ -164,6 +171,7 @@ guild:
mod_spam: &MOD_SPAM 620607373828030464
organisation: &ORGANISATION 551789653284356126
staff_lounge: &STAFF_LOUNGE 464905259261755392
+ incidents: 714214212200562749
# Voice
admins_voice: &ADMINS_VOICE 500734494840717332
@@ -288,6 +296,7 @@ filter:
- 81384788765712384 # Discord API
- 613425648685547541 # Discord Developers
- 185590609631903755 # Blender Hub
+ - 420324994703163402 # /r/FlutterDev
domain_blacklist:
- pornhub.com
@@ -318,6 +327,7 @@ filter:
- poweredbysecurity.online
- ssteam.site
- steamwalletgift.com
+ - discord.gift
word_watchlist:
- goo+ks*
@@ -531,6 +541,10 @@ help_channels:
# Allowed duration of inactivity before making a channel dormant
idle_minutes: 30
+ # Allowed duration of inactivity when question message deleted
+ # and no one other sent before message making channel dormant.
+ deleted_idle_minutes: 5
+
# Maximum number of channels to put in the available category
max_available: 2
diff --git a/docker-compose.yml b/docker-compose.yml
index 11deceae8..9884e35f0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -12,6 +12,11 @@ services:
POSTGRES_PASSWORD: pysite
POSTGRES_USER: pysite
+ redis:
+ image: redis:5.0.9
+ ports:
+ - "127.0.0.1:6379:6379"
+
web:
image: pythondiscord/site:latest
command: ["run", "--debug"]
@@ -41,6 +46,7 @@ services:
tty: true
depends_on:
- web
+ - redis
environment:
BOT_TOKEN: ${BOT_TOKEN}
BOT_API_KEY: badbot13m0n8f570f942013fc818f234916ca531
diff --git a/tests/bot/cogs/test_antimalware.py b/tests/bot/cogs/test_antimalware.py
new file mode 100644
index 000000000..f219fc1ba
--- /dev/null
+++ b/tests/bot/cogs/test_antimalware.py
@@ -0,0 +1,159 @@
+import unittest
+from unittest.mock import AsyncMock, Mock, patch
+
+from discord import NotFound
+
+from bot.cogs import antimalware
+from bot.constants import AntiMalware as AntiMalwareConfig, Channels, STAFF_ROLES
+from tests.helpers import MockAttachment, MockBot, MockMessage, MockRole
+
+MODULE = "bot.cogs.antimalware"
+
+
+@patch(f"{MODULE}.AntiMalwareConfig.whitelist", new=[".first", ".second", ".third"])
+class AntiMalwareCogTests(unittest.IsolatedAsyncioTestCase):
+ """Test the AntiMalware cog."""
+
+ def setUp(self):
+ """Sets up fresh objects for each test."""
+ self.bot = MockBot()
+ self.cog = antimalware.AntiMalware(self.bot)
+ self.message = MockMessage()
+
+ async def test_message_with_allowed_attachment(self):
+ """Messages with allowed extensions should not be deleted"""
+ attachment = MockAttachment(filename=f"python{AntiMalwareConfig.whitelist[0]}")
+ self.message.attachments = [attachment]
+
+ await self.cog.on_message(self.message)
+ self.message.delete.assert_not_called()
+
+ async def test_message_without_attachment(self):
+ """Messages without attachments should result in no action."""
+ await self.cog.on_message(self.message)
+ self.message.delete.assert_not_called()
+
+ async def test_direct_message_with_attachment(self):
+ """Direct messages should have no action taken."""
+ attachment = MockAttachment(filename="python.disallowed")
+ self.message.attachments = [attachment]
+ self.message.guild = None
+
+ await self.cog.on_message(self.message)
+
+ self.message.delete.assert_not_called()
+
+ async def test_message_with_illegal_extension_gets_deleted(self):
+ """A message containing an illegal extension should send an embed."""
+ attachment = MockAttachment(filename="python.disallowed")
+ self.message.attachments = [attachment]
+
+ await self.cog.on_message(self.message)
+
+ self.message.delete.assert_called_once()
+
+ async def test_message_send_by_staff(self):
+ """A message send by a member of staff should be ignored."""
+ staff_role = MockRole(id=STAFF_ROLES[0])
+ self.message.author.roles.append(staff_role)
+ attachment = MockAttachment(filename="python.disallowed")
+ self.message.attachments = [attachment]
+
+ await self.cog.on_message(self.message)
+
+ self.message.delete.assert_not_called()
+
+ async def test_python_file_redirect_embed_description(self):
+ """A message containing a .py file should result in an embed redirecting the user to our paste site"""
+ attachment = MockAttachment(filename="python.py")
+ self.message.attachments = [attachment]
+ self.message.channel.send = AsyncMock()
+
+ await self.cog.on_message(self.message)
+ self.message.channel.send.assert_called_once()
+ args, kwargs = self.message.channel.send.call_args
+ embed = kwargs.pop("embed")
+
+ self.assertEqual(embed.description, antimalware.PY_EMBED_DESCRIPTION)
+
+ async def test_txt_file_redirect_embed_description(self):
+ """A message containing a .txt file should result in the correct embed."""
+ attachment = MockAttachment(filename="python.txt")
+ self.message.attachments = [attachment]
+ self.message.channel.send = AsyncMock()
+ antimalware.TXT_EMBED_DESCRIPTION = Mock()
+ antimalware.TXT_EMBED_DESCRIPTION.format.return_value = "test"
+
+ await self.cog.on_message(self.message)
+ self.message.channel.send.assert_called_once()
+ args, kwargs = self.message.channel.send.call_args
+ embed = kwargs.pop("embed")
+ cmd_channel = self.bot.get_channel(Channels.bot_commands)
+
+ self.assertEqual(embed.description, antimalware.TXT_EMBED_DESCRIPTION.format.return_value)
+ antimalware.TXT_EMBED_DESCRIPTION.format.assert_called_with(cmd_channel_mention=cmd_channel.mention)
+
+ async def test_other_disallowed_extention_embed_description(self):
+ """Test the description for a non .py/.txt disallowed extension."""
+ attachment = MockAttachment(filename="python.disallowed")
+ self.message.attachments = [attachment]
+ self.message.channel.send = AsyncMock()
+ antimalware.DISALLOWED_EMBED_DESCRIPTION = Mock()
+ antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value = "test"
+
+ await self.cog.on_message(self.message)
+ self.message.channel.send.assert_called_once()
+ args, kwargs = self.message.channel.send.call_args
+ embed = kwargs.pop("embed")
+ meta_channel = self.bot.get_channel(Channels.meta)
+
+ self.assertEqual(embed.description, antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value)
+ antimalware.DISALLOWED_EMBED_DESCRIPTION.format.assert_called_with(
+ blocked_extensions_str=".disallowed",
+ meta_channel_mention=meta_channel.mention
+ )
+
+ async def test_removing_deleted_message_logs(self):
+ """Removing an already deleted message logs the correct message"""
+ attachment = MockAttachment(filename="python.disallowed")
+ self.message.attachments = [attachment]
+ self.message.delete = AsyncMock(side_effect=NotFound(response=Mock(status=""), message=""))
+
+ with self.assertLogs(logger=antimalware.log, level="INFO"):
+ await self.cog.on_message(self.message)
+ self.message.delete.assert_called_once()
+
+ async def test_message_with_illegal_attachment_logs(self):
+ """Deleting a message with an illegal attachment should result in a log."""
+ attachment = MockAttachment(filename="python.disallowed")
+ self.message.attachments = [attachment]
+
+ with self.assertLogs(logger=antimalware.log, level="INFO"):
+ await self.cog.on_message(self.message)
+
+ async def test_get_disallowed_extensions(self):
+ """The return value should include all non-whitelisted extensions."""
+ test_values = (
+ ([], []),
+ (AntiMalwareConfig.whitelist, []),
+ ([".first"], []),
+ ([".first", ".disallowed"], [".disallowed"]),
+ ([".disallowed"], [".disallowed"]),
+ ([".disallowed", ".illegal"], [".disallowed", ".illegal"]),
+ )
+
+ for extensions, expected_disallowed_extensions in test_values:
+ with self.subTest(extensions=extensions, expected_disallowed_extensions=expected_disallowed_extensions):
+ self.message.attachments = [MockAttachment(filename=f"filename{extension}") for extension in extensions]
+ disallowed_extensions = self.cog.get_disallowed_extensions(self.message)
+ self.assertCountEqual(disallowed_extensions, expected_disallowed_extensions)
+
+
+class AntiMalwareSetupTests(unittest.TestCase):
+ """Tests setup of the `AntiMalware` cog."""
+
+ def test_setup(self):
+ """Setup of the extension should call add_cog."""
+ bot = MockBot()
+ antimalware.setup(bot)
+ bot.add_cog.assert_called_once()
diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py
index 7e6bfc748..a8c0107c6 100644
--- a/tests/bot/cogs/test_duck_pond.py
+++ b/tests/bot/cogs/test_duck_pond.py
@@ -45,7 +45,7 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase):
self.assertEqual(cog.bot, bot)
self.assertEqual(cog.webhook_id, constants.Webhooks.duck_pond)
- bot.loop.create_loop.called_once_with(cog.fetch_webhook())
+ bot.loop.create_task.assert_called_once_with(cog.fetch_webhook())
def test_fetch_webhook_succeeds_without_connectivity_issues(self):
"""The `fetch_webhook` method waits until `READY` event and sets the `webhook` attribute."""
diff --git a/tests/bot/cogs/test_information.py b/tests/bot/cogs/test_information.py
index b5f928dd6..aca6b594f 100644
--- a/tests/bot/cogs/test_information.py
+++ b/tests/bot/cogs/test_information.py
@@ -7,10 +7,9 @@ import discord
from bot import constants
from bot.cogs import information
-from bot.decorators import InWhitelistCheckFailure
+from bot.utils.checks import InWhitelistCheckFailure
from tests import helpers
-
COG_PATH = "bot.cogs.information.Information"
diff --git a/tests/bot/cogs/test_snekbox.py b/tests/bot/cogs/test_snekbox.py
index 14299e766..cf9adbee0 100644
--- a/tests/bot/cogs/test_snekbox.py
+++ b/tests/bot/cogs/test_snekbox.py
@@ -21,7 +21,10 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
"""Post the eval code to the URLs.snekbox_eval_api endpoint."""
resp = MagicMock()
resp.json = AsyncMock(return_value="return")
- self.bot.http_session.post().__aenter__.return_value = resp
+
+ context_manager = MagicMock()
+ context_manager.__aenter__.return_value = resp
+ self.bot.http_session.post.return_value = context_manager
self.assertEqual(await self.cog.post_eval("import random"), "return")
self.bot.http_session.post.assert_called_with(
@@ -41,7 +44,10 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
key = "MarkDiamond"
resp = MagicMock()
resp.json = AsyncMock(return_value={"key": key})
- self.bot.http_session.post().__aenter__.return_value = resp
+
+ context_manager = MagicMock()
+ context_manager.__aenter__.return_value = resp
+ self.bot.http_session.post.return_value = context_manager
self.assertEqual(
await self.cog.upload_output("My awesome output"),
@@ -57,7 +63,10 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
"""Output upload gracefully fallback if the upload fail."""
resp = MagicMock()
resp.json = AsyncMock(side_effect=Exception)
- self.bot.http_session.post().__aenter__.return_value = resp
+
+ context_manager = MagicMock()
+ context_manager.__aenter__.return_value = resp
+ self.bot.http_session.post.return_value = context_manager
log = logging.getLogger("bot.cogs.snekbox")
with self.assertLogs(logger=log, level='ERROR'):
diff --git a/tests/bot/test_constants.py b/tests/bot/test_constants.py
index dae7c066c..f10d6fbe8 100644
--- a/tests/bot/test_constants.py
+++ b/tests/bot/test_constants.py
@@ -1,14 +1,40 @@
import inspect
+import typing
import unittest
from bot import constants
+def is_annotation_instance(value: typing.Any, annotation: typing.Any) -> bool:
+ """
+ Return True if `value` is an instance of the type represented by `annotation`.
+
+ This doesn't account for things like Unions or checking for homogenous types in collections.
+ """
+ origin = typing.get_origin(annotation)
+
+ # This is done in case a bare e.g. `typing.List` is used.
+ # In such case, for the assertion to pass, the type needs to be normalised to e.g. `list`.
+ # `get_origin()` does this normalisation for us.
+ type_ = annotation if origin is None else origin
+
+ return isinstance(value, type_)
+
+
+def is_any_instance(value: typing.Any, types: typing.Collection) -> bool:
+ """Return True if `value` is an instance of any type in `types`."""
+ for type_ in types:
+ if is_annotation_instance(value, type_):
+ return True
+
+ return False
+
+
class ConstantsTests(unittest.TestCase):
"""Tests for our constants."""
def test_section_configuration_matches_type_specification(self):
- """The section annotations should match the actual types of the sections."""
+ """"The section annotations should match the actual types of the sections."""
sections = (
cls
@@ -17,10 +43,15 @@ class ConstantsTests(unittest.TestCase):
)
for section in sections:
for name, annotation in section.__annotations__.items():
- with self.subTest(section=section, name=name, annotation=annotation):
+ with self.subTest(section=section.__name__, name=name, annotation=annotation):
value = getattr(section, name)
+ origin = typing.get_origin(annotation)
+ annotation_args = typing.get_args(annotation)
+ failure_msg = f"{value} is not an instance of {annotation}"
- if getattr(annotation, '_name', None) in ('Dict', 'List'):
- self.skipTest("Cannot validate containers yet.")
-
- self.assertIsInstance(value, annotation)
+ if origin is typing.Union:
+ is_instance = is_any_instance(value, annotation_args)
+ self.assertTrue(is_instance, failure_msg)
+ else:
+ is_instance = is_annotation_instance(value, annotation)
+ self.assertTrue(is_instance, failure_msg)
diff --git a/tests/bot/test_decorators.py b/tests/bot/test_decorators.py
index a17dd3e16..3d450caa0 100644
--- a/tests/bot/test_decorators.py
+++ b/tests/bot/test_decorators.py
@@ -3,10 +3,10 @@ import unittest
import unittest.mock
from bot import constants
-from bot.decorators import InWhitelistCheckFailure, in_whitelist
+from bot.decorators import in_whitelist
+from bot.utils.checks import InWhitelistCheckFailure
from tests import helpers
-
InWhitelistTestCase = collections.namedtuple("WhitelistedContextTestCase", ("kwargs", "ctx", "description"))
diff --git a/tests/bot/utils/test_checks.py b/tests/bot/utils/test_checks.py
index 9610771e5..de72e5748 100644
--- a/tests/bot/utils/test_checks.py
+++ b/tests/bot/utils/test_checks.py
@@ -1,6 +1,8 @@
import unittest
+from unittest.mock import MagicMock
from bot.utils import checks
+from bot.utils.checks import InWhitelistCheckFailure
from tests.helpers import MockContext, MockRole
@@ -42,10 +44,48 @@ class ChecksTests(unittest.TestCase):
self.ctx.author.roles.append(MockRole(id=role_id))
self.assertTrue(checks.without_role_check(self.ctx, role_id + 10))
- def test_in_channel_check_for_correct_channel(self):
- self.ctx.channel.id = 42
- self.assertTrue(checks.in_channel_check(self.ctx, *[42]))
+ def test_in_whitelist_check_correct_channel(self):
+ """`in_whitelist_check` returns `True` if `Context.channel.id` is in the channel list."""
+ channel_id = 3
+ self.ctx.channel.id = channel_id
+ self.assertTrue(checks.in_whitelist_check(self.ctx, [channel_id]))
- def test_in_channel_check_for_incorrect_channel(self):
- self.ctx.channel.id = 42 + 10
- self.assertFalse(checks.in_channel_check(self.ctx, *[42]))
+ def test_in_whitelist_check_incorrect_channel(self):
+ """`in_whitelist_check` raises InWhitelistCheckFailure if there's no channel match."""
+ self.ctx.channel.id = 3
+ with self.assertRaises(InWhitelistCheckFailure):
+ checks.in_whitelist_check(self.ctx, [4])
+
+ def test_in_whitelist_check_correct_category(self):
+ """`in_whitelist_check` returns `True` if `Context.channel.category_id` is in the category list."""
+ category_id = 3
+ self.ctx.channel.category_id = category_id
+ self.assertTrue(checks.in_whitelist_check(self.ctx, categories=[category_id]))
+
+ def test_in_whitelist_check_incorrect_category(self):
+ """`in_whitelist_check` raises InWhitelistCheckFailure if there's no category match."""
+ self.ctx.channel.category_id = 3
+ with self.assertRaises(InWhitelistCheckFailure):
+ checks.in_whitelist_check(self.ctx, categories=[4])
+
+ def test_in_whitelist_check_correct_role(self):
+ """`in_whitelist_check` returns `True` if any of the `Context.author.roles` are in the roles list."""
+ self.ctx.author.roles = (MagicMock(id=1), MagicMock(id=2))
+ self.assertTrue(checks.in_whitelist_check(self.ctx, roles=[2, 6]))
+
+ def test_in_whitelist_check_incorrect_role(self):
+ """`in_whitelist_check` raises InWhitelistCheckFailure if there's no role match."""
+ self.ctx.author.roles = (MagicMock(id=1), MagicMock(id=2))
+ with self.assertRaises(InWhitelistCheckFailure):
+ checks.in_whitelist_check(self.ctx, roles=[4])
+
+ def test_in_whitelist_check_fail_silently(self):
+ """`in_whitelist_check` test no exception raised if `fail_silently` is `True`"""
+ self.assertFalse(checks.in_whitelist_check(self.ctx, roles=[2, 6], fail_silently=True))
+
+ def test_in_whitelist_check_complex(self):
+ """`in_whitelist_check` test with multiple parameters"""
+ self.ctx.author.roles = (MagicMock(id=1), MagicMock(id=2))
+ self.ctx.channel.category_id = 3
+ self.ctx.channel.id = 5
+ self.assertTrue(checks.in_whitelist_check(self.ctx, channels=[1], categories=[8], roles=[2]))
diff --git a/tests/bot/utils/test_redis_cache.py b/tests/bot/utils/test_redis_cache.py
new file mode 100644
index 000000000..8c1a40640
--- /dev/null
+++ b/tests/bot/utils/test_redis_cache.py
@@ -0,0 +1,273 @@
+import asyncio
+import unittest
+
+import fakeredis.aioredis
+
+from bot.utils import RedisCache
+from bot.utils.redis_cache import NoBotInstanceError, NoNamespaceError, NoParentInstanceError
+from tests import helpers
+
+
+class RedisCacheTests(unittest.IsolatedAsyncioTestCase):
+ """Tests the RedisCache class from utils.redis_dict.py."""
+
+ async def asyncSetUp(self): # noqa: N802
+ """Sets up the objects that only have to be initialized once."""
+ self.bot = helpers.MockBot()
+ self.bot.redis_session = await fakeredis.aioredis.create_redis_pool()
+
+ # Okay, so this is necessary so that we can create a clean new
+ # class for every test method, and we want that because it will
+ # ensure we get a fresh loop, which is necessary for test_increment_lock
+ # to be able to pass.
+ class DummyCog:
+ """A dummy cog, for dummies."""
+
+ redis = RedisCache()
+
+ def __init__(self, bot: helpers.MockBot):
+ self.bot = bot
+
+ self.cog = DummyCog(self.bot)
+
+ await self.cog.redis.clear()
+
+ def test_class_attribute_namespace(self):
+ """Test that RedisDict creates a namespace automatically for class attributes."""
+ self.assertEqual(self.cog.redis._namespace, "DummyCog.redis")
+
+ async def test_class_attribute_required(self):
+ """Test that errors are raised when not assigned as a class attribute."""
+ bad_cache = RedisCache()
+ self.assertIs(bad_cache._namespace, None)
+
+ with self.assertRaises(RuntimeError):
+ await bad_cache.set("test", "me_up_deadman")
+
+ def test_namespace_collision(self):
+ """Test that we prevent colliding namespaces."""
+ bob_cache_1 = RedisCache()
+ bob_cache_1._set_namespace("BobRoss")
+ self.assertEqual(bob_cache_1._namespace, "BobRoss")
+
+ bob_cache_2 = RedisCache()
+ bob_cache_2._set_namespace("BobRoss")
+ self.assertEqual(bob_cache_2._namespace, "BobRoss_")
+
+ async def test_set_get_item(self):
+ """Test that users can set and get items from the RedisDict."""
+ test_cases = (
+ ('favorite_fruit', 'melon'),
+ ('favorite_number', 86),
+ ('favorite_fraction', 86.54)
+ )
+
+ # Test that we can get and set different types.
+ for test in test_cases:
+ await self.cog.redis.set(*test)
+ self.assertEqual(await self.cog.redis.get(test[0]), test[1])
+
+ # Test that .get allows a default value
+ self.assertEqual(await self.cog.redis.get('favorite_nothing', "bearclaw"), "bearclaw")
+
+ async def test_set_item_type(self):
+ """Test that .set rejects keys and values that are not permitted."""
+ fruits = ["lemon", "melon", "apple"]
+
+ with self.assertRaises(TypeError):
+ await self.cog.redis.set(fruits, "nice")
+
+ with self.assertRaises(TypeError):
+ await self.cog.redis.set(4.23, "nice")
+
+ async def test_delete_item(self):
+ """Test that .delete allows us to delete stuff from the RedisCache."""
+ # Add an item and verify that it gets added
+ await self.cog.redis.set("internet", "firetruck")
+ self.assertEqual(await self.cog.redis.get("internet"), "firetruck")
+
+ # Delete that item and verify that it gets deleted
+ await self.cog.redis.delete("internet")
+ self.assertIs(await self.cog.redis.get("internet"), None)
+
+ async def test_contains(self):
+ """Test that we can check membership with .contains."""
+ await self.cog.redis.set('favorite_country', "Burkina Faso")
+
+ self.assertIs(await self.cog.redis.contains('favorite_country'), True)
+ self.assertIs(await self.cog.redis.contains('favorite_dentist'), False)
+
+ async def test_items(self):
+ """Test that the RedisDict can be iterated."""
+ # Set up our test cases in the Redis cache
+ test_cases = [
+ ('favorite_turtle', 'Donatello'),
+ ('second_favorite_turtle', 'Leonardo'),
+ ('third_favorite_turtle', 'Raphael'),
+ ]
+ for key, value in test_cases:
+ await self.cog.redis.set(key, value)
+
+ # Consume the AsyncIterator into a regular list, easier to compare that way.
+ redis_items = [item for item in await self.cog.redis.items()]
+
+ # These sequences are probably in the same order now, but probably
+ # isn't good enough for tests. Let's not rely on .hgetall always
+ # returning things in sequence, and just sort both lists to be safe.
+ redis_items = sorted(redis_items)
+ test_cases = sorted(test_cases)
+
+ # If these are equal now, everything works fine.
+ self.assertSequenceEqual(test_cases, redis_items)
+
+ async def test_length(self):
+ """Test that we can get the correct .length from the RedisDict."""
+ await self.cog.redis.set('one', 1)
+ await self.cog.redis.set('two', 2)
+ await self.cog.redis.set('three', 3)
+ self.assertEqual(await self.cog.redis.length(), 3)
+
+ await self.cog.redis.set('four', 4)
+ self.assertEqual(await self.cog.redis.length(), 4)
+
+ async def test_to_dict(self):
+ """Test that the .to_dict method returns a workable dictionary copy."""
+ copy = await self.cog.redis.to_dict()
+ local_copy = {key: value for key, value in await self.cog.redis.items()}
+ self.assertIs(type(copy), dict)
+ self.assertDictEqual(copy, local_copy)
+
+ async def test_clear(self):
+ """Test that the .clear method removes the entire hash."""
+ await self.cog.redis.set('teddy', 'with me')
+ await self.cog.redis.set('in my dreams', 'you have a weird hat')
+ self.assertEqual(await self.cog.redis.length(), 2)
+
+ await self.cog.redis.clear()
+ self.assertEqual(await self.cog.redis.length(), 0)
+
+ async def test_pop(self):
+ """Test that we can .pop an item from the RedisDict."""
+ await self.cog.redis.set('john', 'was afraid')
+
+ self.assertEqual(await self.cog.redis.pop('john'), 'was afraid')
+ self.assertEqual(await self.cog.redis.pop('pete', 'breakneck'), 'breakneck')
+ self.assertEqual(await self.cog.redis.length(), 0)
+
+ async def test_update(self):
+ """Test that we can .update the RedisDict with multiple items."""
+ await self.cog.redis.set("reckfried", "lona")
+ await self.cog.redis.set("bel air", "prince")
+ await self.cog.redis.update({
+ "reckfried": "jona",
+ "mega": "hungry, though",
+ })
+
+ result = {
+ "reckfried": "jona",
+ "bel air": "prince",
+ "mega": "hungry, though",
+ }
+ self.assertDictEqual(await self.cog.redis.to_dict(), result)
+
+ def test_typestring_conversion(self):
+ """Test the typestring-related helper functions."""
+ conversion_tests = (
+ (12, "i|12"),
+ (12.4, "f|12.4"),
+ ("cowabunga", "s|cowabunga"),
+ )
+
+ # Test conversion to typestring
+ for _input, expected in conversion_tests:
+ self.assertEqual(self.cog.redis._value_to_typestring(_input), expected)
+
+ # Test conversion from typestrings
+ for _input, expected in conversion_tests:
+ self.assertEqual(self.cog.redis._value_from_typestring(expected), _input)
+
+ # Test that exceptions are raised on invalid input
+ with self.assertRaises(TypeError):
+ self.cog.redis._value_to_typestring(["internet"])
+ self.cog.redis._value_from_typestring("o|firedog")
+
+ async def test_increment_decrement(self):
+ """Test .increment and .decrement methods."""
+ await self.cog.redis.set("entropic", 5)
+ await self.cog.redis.set("disentropic", 12.5)
+
+ # Test default increment
+ await self.cog.redis.increment("entropic")
+ self.assertEqual(await self.cog.redis.get("entropic"), 6)
+
+ # Test default decrement
+ await self.cog.redis.decrement("entropic")
+ self.assertEqual(await self.cog.redis.get("entropic"), 5)
+
+ # Test float increment with float
+ await self.cog.redis.increment("disentropic", 2.0)
+ self.assertEqual(await self.cog.redis.get("disentropic"), 14.5)
+
+ # Test float increment with int
+ await self.cog.redis.increment("disentropic", 2)
+ self.assertEqual(await self.cog.redis.get("disentropic"), 16.5)
+
+ # Test negative increments, because why not.
+ await self.cog.redis.increment("entropic", -5)
+ self.assertEqual(await self.cog.redis.get("entropic"), 0)
+
+ # Negative decrements? Sure.
+ await self.cog.redis.decrement("entropic", -5)
+ self.assertEqual(await self.cog.redis.get("entropic"), 5)
+
+ # What about if we use a negative float to decrement an int?
+ # This should convert the type into a float.
+ await self.cog.redis.decrement("entropic", -2.5)
+ self.assertEqual(await self.cog.redis.get("entropic"), 7.5)
+
+ # Let's test that they raise the right errors
+ with self.assertRaises(KeyError):
+ await self.cog.redis.increment("doesn't_exist!")
+
+ await self.cog.redis.set("stringthing", "stringthing")
+ with self.assertRaises(TypeError):
+ await self.cog.redis.increment("stringthing")
+
+ async def test_increment_lock(self):
+ """Test that we can't produce a race condition in .increment."""
+ await self.cog.redis.set("test_key", 0)
+ tasks = []
+
+ # Increment this a lot in different tasks
+ for _ in range(100):
+ task = asyncio.create_task(
+ self.cog.redis.increment("test_key", 1)
+ )
+ tasks.append(task)
+ await asyncio.gather(*tasks)
+
+ # Confirm that the value has been incremented the exact right number of times.
+ value = await self.cog.redis.get("test_key")
+ self.assertEqual(value, 100)
+
+ async def test_exceptions_raised(self):
+ """Testing that the various RuntimeErrors are reachable."""
+ class MyCog:
+ cache = RedisCache()
+
+ def __init__(self):
+ self.other_cache = RedisCache()
+
+ cog = MyCog()
+
+ # Raises "No Bot instance"
+ with self.assertRaises(NoBotInstanceError):
+ await cog.cache.get("john")
+
+ # Raises "RedisCache has no namespace"
+ with self.assertRaises(NoNamespaceError):
+ await cog.other_cache.get("was")
+
+ # Raises "You must access the RedisCache instance through the cog instance"
+ with self.assertRaises(NoParentInstanceError):
+ await MyCog.cache.get("afraid")
diff --git a/tests/helpers.py b/tests/helpers.py
index 91d814b3a..faa839370 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -4,12 +4,15 @@ import collections
import itertools
import logging
import unittest.mock
+from asyncio import AbstractEventLoop
from typing import Iterable, Optional
import discord
+from aiohttp import ClientSession
from discord.ext.commands import Context
from bot.api import APIClient
+from bot.async_stats import AsyncStatsClient
from bot.bot import Bot
@@ -268,10 +271,16 @@ class MockAPIClient(CustomMockMixin, unittest.mock.MagicMock):
spec_set = APIClient
-# Create a Bot instance to get a realistic MagicMock of `discord.ext.commands.Bot`
-bot_instance = Bot(command_prefix=unittest.mock.MagicMock())
-bot_instance.http_session = None
-bot_instance.api_client = None
+def _get_mock_loop() -> unittest.mock.Mock:
+ """Return a mocked asyncio.AbstractEventLoop."""
+ loop = unittest.mock.create_autospec(spec=AbstractEventLoop, spec_set=True)
+
+ # Since calling `create_task` on our MockBot does not actually schedule the coroutine object
+ # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object
+ # to prevent "has not been awaited"-warnings.
+ loop.create_task.side_effect = lambda coroutine: coroutine.close()
+
+ return loop
class MockBot(CustomMockMixin, unittest.mock.MagicMock):
@@ -281,17 +290,16 @@ class MockBot(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.ext.commands.Bot` instances.
For more information, see the `MockGuild` docstring.
"""
- spec_set = bot_instance
- additional_spec_asyncs = ("wait_for",)
+ spec_set = Bot(command_prefix=unittest.mock.MagicMock(), loop=_get_mock_loop())
+ additional_spec_asyncs = ("wait_for", "redis_ready")
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
- self.api_client = MockAPIClient()
- # Since calling `create_task` on our MockBot does not actually schedule the coroutine object
- # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object
- # to prevent "has not been awaited"-warnings.
- self.loop.create_task.side_effect = lambda coroutine: coroutine.close()
+ self.loop = _get_mock_loop()
+ self.api_client = MockAPIClient(loop=self.loop)
+ self.http_session = unittest.mock.create_autospec(spec=ClientSession, spec_set=True)
+ self.stats = unittest.mock.create_autospec(spec=AsyncStatsClient, spec_set=True)
# Create a TextChannel instance to get a realistic MagicMock of `discord.TextChannel`