aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Sebastiaan Zeeff <[email protected]>2020-05-28 12:49:01 +0200
committerGravatar GitHub <[email protected]>2020-05-28 12:49:01 +0200
commitaa5aa241cfb7e98df300436ffa923edc30b728ea (patch)
treef5ffb73b24fec7d08f3dd4718208f459555d5571
parentMerge pull request #961 from python-discord/moderation_commands_in_modmail_ca... (diff)
parentMove the `self.redis_closed` into session create. (diff)
Merge pull request #947 from python-discord/redis_persistence
RedisCache - Data Persistence
-rw-r--r--Pipfile26
-rw-r--r--Pipfile.lock308
-rw-r--r--azure-pipelines.yml1
-rw-r--r--bot/bot.py45
-rw-r--r--bot/cogs/antispam.py4
-rw-r--r--bot/cogs/defcon.py2
-rw-r--r--bot/cogs/duck_pond.py2
-rw-r--r--bot/cogs/help_channels.py2
-rw-r--r--bot/cogs/moderation/scheduler.py10
-rw-r--r--bot/cogs/moderation/silence.py2
-rw-r--r--bot/cogs/stats.py4
-rw-r--r--bot/cogs/utils.py4
-rw-r--r--bot/cogs/watchchannels/talentpool.py2
-rw-r--r--bot/cogs/watchchannels/watchchannel.py14
-rw-r--r--bot/constants.py23
-rw-r--r--bot/pagination.py4
-rw-r--r--bot/utils/__init__.py4
-rw-r--r--bot/utils/messages.py2
-rw-r--r--bot/utils/redis_cache.py409
-rw-r--r--config-default.yml6
-rw-r--r--docker-compose.yml6
-rw-r--r--tests/bot/cogs/test_duck_pond.py2
-rw-r--r--tests/bot/cogs/test_snekbox.py15
-rw-r--r--tests/bot/test_constants.py43
-rw-r--r--tests/bot/utils/test_redis_cache.py273
-rw-r--r--tests/helpers.py30
26 files changed, 1061 insertions, 182 deletions
diff --git a/Pipfile b/Pipfile
index 14c9ef926..b42ca6d58 100644
--- a/Pipfile
+++ b/Pipfile
@@ -4,25 +4,27 @@ verify_ssl = true
name = "pypi"
[packages]
-discord-py = "~=1.3.2"
+aio-pika = "~=6.1"
aiodns = "~=2.0"
aiohttp = "~=3.5"
-sphinx = "~=2.2"
-markdownify = "~=0.4"
-lxml = "~=4.4"
-pyyaml = "~=5.1"
+aioredis = "~=1.3.1"
+beautifulsoup4 = "~=4.9"
+colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}
+coloredlogs = "~=14.0"
+deepdiff = "~=4.0"
+discord.py = "~=1.3.2"
+fakeredis = "~=1.4"
+feedparser = "~=5.2"
fuzzywuzzy = "~=0.17"
-aio-pika = "~=6.1"
+lxml = "~=4.4"
+markdownify = "~=0.4"
+more_itertools = "~=8.2"
python-dateutil = "~=2.8"
-deepdiff = "~=4.0"
+pyyaml = "~=5.1"
requests = "~=2.22"
-more_itertools = "~=8.2"
sentry-sdk = "~=0.14"
-coloredlogs = "~=14.0"
-colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}
+sphinx = "~=2.2"
statsd = "~=3.3"
-feedparser = "~=5.2"
-beautifulsoup4 = "~=4.9"
[dev-packages]
coverage = "~=5.0"
diff --git a/Pipfile.lock b/Pipfile.lock
index 4e7050a13..0e591710c 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "64620e7e825c74fd3010821fb30843b19f5dafb2b5a1f6eafedc0a5febd99b69"
+ "sha256": "0297accc3d614d3da8080b89d56ef7fe489c28a0ada8102df396a604af7ee330"
},
"pipfile-spec": 6,
"requires": {
@@ -18,11 +18,11 @@
"default": {
"aio-pika": {
"hashes": [
- "sha256:9e4614636296e0040055bd6b304e97a38cc9796669ef391fc9b36649831d43ee",
- "sha256:c9d242b3c7142d64b185feb6c5cce4154962610e89ec2e9b52bd69ef01f89b2f"
+ "sha256:c4cbbeb85b3c7bf81bc127371846cd949e6231717ce1e6ac7ee1dd5ede21f866",
+ "sha256:ec7fef24f588d90314873463ab4f2c3debce0bd8830e49e3786586be96bc2e8e"
],
"index": "pypi",
- "version": "==6.6.0"
+ "version": "==6.6.1"
},
"aiodns": {
"hashes": [
@@ -50,12 +50,20 @@
"index": "pypi",
"version": "==3.6.2"
},
+ "aioredis": {
+ "hashes": [
+ "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a",
+ "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"
+ ],
+ "index": "pypi",
+ "version": "==1.3.1"
+ },
"aiormq": {
"hashes": [
- "sha256:286e0b0772075580466e45f98f051b9728a9316b9c36f0c14c7bc1409be375b0",
- "sha256:7ed7d6df6b57af7f8bce7d1ebcbdfc32b676192e46703e81e9e217316e56b5bd"
+ "sha256:41a9d4eb17db805f30ed172f3f609fe0c2b16657fb15b1b67df19d251dd93c0d",
+ "sha256:7c19477a9450824cb79f9949fd238f4148e2c0dca67756a2868863c387209f04"
],
- "version": "==3.2.1"
+ "version": "==3.2.2"
},
"alabaster": {
"hashes": [
@@ -87,12 +95,12 @@
},
"beautifulsoup4": {
"hashes": [
- "sha256:594ca51a10d2b3443cbac41214e12dbb2a1cd57e1a7344659849e2e20ba6a8d8",
- "sha256:a4bbe77fd30670455c5296242967a123ec28c37e9702a8a81bd2f20a4baf0368",
- "sha256:d4e96ac9b0c3a6d3f0caae2e4124e6055c5dcafde8e2f831ff194c104f0775a0"
+ "sha256:73cc4d115b96f79c7d77c1c7f7a0a8d4c57860d1041df407dd1aae7f07a77fd7",
+ "sha256:a6237df3c32ccfaee4fd201c8f5f9d9df619b93121d01353a64a73ce8c6ef9a8",
+ "sha256:e718f2342e2e099b640a34ab782407b7b676f47ee272d6739e60b8ea23829f2c"
],
"index": "pypi",
- "version": "==4.9.0"
+ "version": "==4.9.1"
},
"certifi": {
"hashes": [
@@ -166,11 +174,19 @@
"index": "pypi",
"version": "==4.3.2"
},
- "discord-py": {
+ "discord": {
"hashes": [
- "sha256:406871b06d86c3dc49fba63238519f28628dac946fef8a0e22988ff58ec05580"
+ "sha256:9d4debb4a37845543bd4b92cb195bc53a302797333e768e70344222857ff1559",
+ "sha256:ff6653655e342e7721dfb3f10421345fd852c2a33f2cca912b1c39b3778a9429"
],
"index": "pypi",
+ "version": "==1.0.1"
+ },
+ "discord.py": {
+ "hashes": [
+ "sha256:406871b06d86c3dc49fba63238519f28628dac946fef8a0e22988ff58ec05580",
+ "sha256:ad00e34c72d2faa8db2157b651d05f3c415d7d05078e7e41dc9e8dc240051beb"
+ ],
"version": "==1.3.3"
},
"docutils": {
@@ -180,6 +196,14 @@
],
"version": "==0.16"
},
+ "fakeredis": {
+ "hashes": [
+ "sha256:4d170886865a91dbc8b7f8cbd4e5d488f4c5f2f25dfae127f001617bbe9e8f97",
+ "sha256:647b2593d349d9d4e566c8dadb2e4c71ba35be5bdc4f1f7ac2d565a12a965053"
+ ],
+ "index": "pypi",
+ "version": "==1.4.1"
+ },
"feedparser": {
"hashes": [
"sha256:bd030652c2d08532c034c27fcd7c85868e7fa3cb2b17f230a44a6bbc92519bf9",
@@ -197,6 +221,51 @@
"index": "pypi",
"version": "==0.18.0"
},
+ "hiredis": {
+ "hashes": [
+ "sha256:01b577f84c20ecc9c07fc4c184231b08e3c3942de096fa99978e053de231c423",
+ "sha256:01ff0900134166961c9e339df77c33b72f7edc5cb41739f0babcd9faa345926e",
+ "sha256:03ed34a13316d0c34213c4fd46e0fa3a5299073f4d4f08e93fed8c2108b399b3",
+ "sha256:040436e91df5143aff9e0debb49530d0b17a6bd52200ce568621c31ef581b10d",
+ "sha256:091eb38fbf968d1c5b703e412bbbd25f43a7967d8400842cee33a5a07b33c27b",
+ "sha256:102f9b9dc6ed57feb3a7c9bdf7e71cb7c278fe8df1edfcfe896bc3e0c2be9447",
+ "sha256:2b4b392c7e3082860c8371fab3ae762139090f9115819e12d9f56060f9ede05d",
+ "sha256:2c9cc0b986397b833073f466e6b9e9c70d1d4dc2c2c1b3e9cae3a23102ff296c",
+ "sha256:2fa65a9df683bca72073cd77709ddeb289ea2b114d3775d225fbbcc5faf808c5",
+ "sha256:38437a681f17c975fd22349e72c29bc643f8e7eb2d6dc5df419eac59afa4d7ce",
+ "sha256:3b3428fa3cf1ee178807b52c9bee8950ab94cd4eaa9bfae8c1bbae3c49501d34",
+ "sha256:3dd8c2fae7f5494978facb0e93297dd627b1a3f536f3b070cf0a7d9157a07dcb",
+ "sha256:4414a96c212e732723b5c3d7c04d386ebbb2ec359e1de646322cbc3f875cbd0d",
+ "sha256:48c627581ad4ef60adbac980981407939acf13a0e18f093502c7b542223c4f19",
+ "sha256:4a60e71625a2d78d8ab84dfb2fa2cfd9458c964b6e6c04fea76d9ade153fb371",
+ "sha256:585ace09f434e43d8a8dbeb366865b1a044d7c06319b3c7372a0a00e63b860f4",
+ "sha256:74b364b3f06c9cf0a53f7df611045bc9437ed972a283fa1f0b12537236d23ddc",
+ "sha256:75c65c3850e89e9daa68d1b9bedd5806f177d60aa5a7b0953b4829481cfc1f72",
+ "sha256:7f052de8bf744730a9120dbdc67bfeb7605a01f69fb8e7ba5c475af33c24e145",
+ "sha256:8113a7d5e87ecf57cd4ae263cc9e429adb9a3e59f5a7768da5d3312a8d0a051a",
+ "sha256:84857ce239eb8ed191ac78e77ff65d52902f00f30f4ee83bf80eb71da73b70e6",
+ "sha256:8644a48ddc4a40b3e3a6b9443f396c2ee353afb2d45656c4fc68d04a82e8e3f7",
+ "sha256:936aa565e673536e8a211e43ec43197406f24cd1f290138bd143765079c8ba00",
+ "sha256:9afeb88c67bbc663b9f27385c496da056d06ad87f55df6e393e1516cfecb0461",
+ "sha256:9d62cc7880110e4f83b0a51d218f465d3095e2751fbddd34e553dbd106a929ff",
+ "sha256:a1fadd062fc8d647ff39220c57ea2b48c99bb73f18223828ec97f88fc27e7898",
+ "sha256:a7754a783b1e5d6f627c19d099b178059c62f782ab62b4d8ba165b9fbc2ee34c",
+ "sha256:aa59dd63bb3f736de4fc2d080114429d5d369dfb3265f771778e8349d67a97a4",
+ "sha256:ae2ee0992f8de249715435942137843a93db204dd7db1e7cc9bdc5a8436443e8",
+ "sha256:b36842d7cf32929d568f37ec5b3173b72b2ec6572dec4d6be6ce774762215aee",
+ "sha256:bcbf9379c553b5facc6c04c1e5569b44b38ff16bcbf354676287698d61ee0c92",
+ "sha256:cbccbda6f1c62ab460449d9c85fdf24d0d32a6bf45176581151e53cc26a5d910",
+ "sha256:d0caf98dfb8af395d6732bd16561c0a2458851bea522e39f12f04802dbf6f502",
+ "sha256:d6456afeddba036def1a36d8a2758eca53202308d83db20ab5d0b66590919627",
+ "sha256:dbaef9a21a4f10bc281684ee4124f169e62bb533c2a92b55f8c06f64f9af7b8f",
+ "sha256:dce84916c09aaece006272b37234ae84a8ed13abb3a4d341a23933b8701abfb5",
+ "sha256:eb8c9c8b9869539d58d60ff4a28373a22514d40495911451343971cb4835b7a9",
+ "sha256:efc98b14ee3a8595e40b1425e8d42f5fd26f11a7b215a81ef9259068931754f4",
+ "sha256:fa2dc05b87d97acc1c6ae63f3e0f39eae5246565232484b08db6bf2dc1580678",
+ "sha256:fe7d6ce9f6a5fbe24f09d95ea93e9c7271abc4e1565da511e1449b107b4d7848"
+ ],
+ "version": "==1.0.1"
+ },
"humanfriendly": {
"hashes": [
"sha256:bf52ec91244819c780341a3438d5d7b09f431d3f113a475147ac9b7b167a3d12",
@@ -227,36 +296,36 @@
},
"lxml": {
"hashes": [
- "sha256:06d4e0bbb1d62e38ae6118406d7cdb4693a3fa34ee3762238bcb96c9e36a93cd",
- "sha256:0701f7965903a1c3f6f09328c1278ac0eee8f56f244e66af79cb224b7ef3801c",
- "sha256:1f2c4ec372bf1c4a2c7e4bb20845e8bcf8050365189d86806bad1e3ae473d081",
- "sha256:4235bc124fdcf611d02047d7034164897ade13046bda967768836629bc62784f",
- "sha256:5828c7f3e615f3975d48f40d4fe66e8a7b25f16b5e5705ffe1d22e43fb1f6261",
- "sha256:585c0869f75577ac7a8ff38d08f7aac9033da2c41c11352ebf86a04652758b7a",
- "sha256:5d467ce9c5d35b3bcc7172c06320dddb275fea6ac2037f72f0a4d7472035cea9",
- "sha256:63dbc21efd7e822c11d5ddbedbbb08cd11a41e0032e382a0fd59b0b08e405a3a",
- "sha256:7bc1b221e7867f2e7ff1933165c0cec7153dce93d0cdba6554b42a8beb687bdb",
- "sha256:8620ce80f50d023d414183bf90cc2576c2837b88e00bea3f33ad2630133bbb60",
- "sha256:8a0ebda56ebca1a83eb2d1ac266649b80af8dd4b4a3502b2c1e09ac2f88fe128",
- "sha256:90ed0e36455a81b25b7034038e40880189169c308a3df360861ad74da7b68c1a",
- "sha256:95e67224815ef86924fbc2b71a9dbd1f7262384bca4bc4793645794ac4200717",
- "sha256:afdb34b715daf814d1abea0317b6d672476b498472f1e5aacbadc34ebbc26e89",
- "sha256:b4b2c63cc7963aedd08a5f5a454c9f67251b1ac9e22fd9d72836206c42dc2a72",
- "sha256:d068f55bda3c2c3fcaec24bd083d9e2eede32c583faf084d6e4b9daaea77dde8",
- "sha256:d5b3c4b7edd2e770375a01139be11307f04341ec709cf724e0f26ebb1eef12c3",
- "sha256:deadf4df349d1dcd7b2853a2c8796593cc346600726eff680ed8ed11812382a7",
- "sha256:df533af6f88080419c5a604d0d63b2c33b1c0c4409aba7d0cb6de305147ea8c8",
- "sha256:e4aa948eb15018a657702fee0b9db47e908491c64d36b4a90f59a64741516e77",
- "sha256:e5d842c73e4ef6ed8c1bd77806bf84a7cb535f9c0cf9b2c74d02ebda310070e1",
- "sha256:ebec08091a22c2be870890913bdadd86fcd8e9f0f22bcb398abd3af914690c15",
- "sha256:edc15fcfd77395e24543be48871c251f38132bb834d9fdfdad756adb6ea37679",
- "sha256:f2b74784ed7e0bc2d02bd53e48ad6ba523c9b36c194260b7a5045071abbb1012",
- "sha256:fa071559f14bd1e92077b1b5f6c22cf09756c6de7139370249eb372854ce51e6",
- "sha256:fd52e796fee7171c4361d441796b64df1acfceb51f29e545e812f16d023c4bbc",
- "sha256:fe976a0f1ef09b3638778024ab9fb8cde3118f203364212c198f71341c0715ca"
- ],
- "index": "pypi",
- "version": "==4.5.0"
+ "sha256:06748c7192eab0f48e3d35a7adae609a329c6257495d5e53878003660dc0fec6",
+ "sha256:0790ddca3f825dd914978c94c2545dbea5f56f008b050e835403714babe62a5f",
+ "sha256:1aa7a6197c1cdd65d974f3e4953764eee3d9c7b67e3966616b41fab7f8f516b7",
+ "sha256:22c6d34fdb0e65d5f782a4d1a1edb52e0a8365858dafb1c08cb1d16546cf0786",
+ "sha256:2754d4406438c83144f9ffd3628bbe2dcc6d62b20dbc5c1ec4bc4385e5d44b42",
+ "sha256:27ee0faf8077c7c1a589573b1450743011117f1aa1a91d5ae776bbc5ca6070f2",
+ "sha256:2b02c106709466a93ed424454ce4c970791c486d5fcdf52b0d822a7e29789626",
+ "sha256:2d1ddce96cf15f1254a68dba6935e6e0f1fe39247de631c115e84dd404a6f031",
+ "sha256:4f282737d187ae723b2633856085c31ae5d4d432968b7f3f478a48a54835f5c4",
+ "sha256:51bb4edeb36d24ec97eb3e6a6007be128b720114f9a875d6b370317d62ac80b9",
+ "sha256:7eee37c1b9815e6505847aa5e68f192e8a1b730c5c7ead39ff317fde9ce29448",
+ "sha256:7fd88cb91a470b383aafad554c3fe1ccf6dfb2456ff0e84b95335d582a799804",
+ "sha256:9144ce36ca0824b29ebc2e02ca186e54040ebb224292072250467190fb613b96",
+ "sha256:925baf6ff1ef2c45169f548cc85204433e061360bfa7d01e1be7ae38bef73194",
+ "sha256:a636346c6c0e1092ffc202d97ec1843a75937d8c98aaf6771348ad6422e44bb0",
+ "sha256:a87dbee7ad9dce3aaefada2081843caf08a44a8f52e03e0a4cc5819f8398f2f4",
+ "sha256:a9e3b8011388e7e373565daa5e92f6c9cb844790dc18e43073212bb3e76f7007",
+ "sha256:afb53edf1046599991fb4a7d03e601ab5f5422a5435c47ee6ba91ec3b61416a6",
+ "sha256:b26719890c79a1dae7d53acac5f089d66fd8cc68a81f4e4bd355e45470dc25e1",
+ "sha256:b7462cdab6fffcda853338e1741ce99706cdf880d921b5a769202ea7b94e8528",
+ "sha256:b77975465234ff49fdad871c08aa747aae06f5e5be62866595057c43f8d2f62c",
+ "sha256:c47a8a5d00060122ca5908909478abce7bbf62d812e3fc35c6c802df8fb01fe7",
+ "sha256:c79e5debbe092e3c93ca4aee44c9a7631bdd407b2871cb541b979fd350bbbc29",
+ "sha256:d8d40e0121ca1606aa9e78c28a3a7d88a05c06b3ca61630242cded87d8ce55fa",
+ "sha256:ee2be8b8f72a2772e72ab926a3bccebf47bb727bda41ae070dc91d1fb759b726",
+ "sha256:f95d28193c3863132b1f55c1056036bf580b5a488d908f7d22a04ace8935a3a9",
+ "sha256:fadd2a63a2bfd7fb604508e553d1cf68eca250b2fbdbd81213b5f6f2fbf23529"
+ ],
+ "index": "pypi",
+ "version": "==4.5.1"
},
"markdownify": {
"hashes": [
@@ -305,46 +374,46 @@
},
"more-itertools": {
"hashes": [
- "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c",
- "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"
+ "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be",
+ "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982"
],
"index": "pypi",
- "version": "==8.2.0"
+ "version": "==8.3.0"
},
"multidict": {
"hashes": [
- "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1",
- "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35",
- "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928",
- "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969",
- "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e",
- "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78",
- "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1",
- "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136",
- "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8",
- "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2",
- "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e",
- "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4",
- "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5",
- "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd",
- "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab",
- "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20",
- "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3"
- ],
- "version": "==4.7.5"
+ "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a",
+ "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000",
+ "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2",
+ "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507",
+ "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5",
+ "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7",
+ "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d",
+ "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463",
+ "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19",
+ "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3",
+ "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b",
+ "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c",
+ "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87",
+ "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7",
+ "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430",
+ "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255",
+ "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"
+ ],
+ "version": "==4.7.6"
},
"ordered-set": {
"hashes": [
- "sha256:a7bfa858748c73b096e43db14eb23e2bc714a503f990c89fac8fab9b0ee79724"
+ "sha256:a31008c57f9c9776b12eb8841b1f61d1e4d70dfbbe8875ccfa2403c54af3d51b"
],
- "version": "==3.1.1"
+ "version": "==4.0.1"
},
"packaging": {
"hashes": [
- "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3",
- "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"
+ "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
+ "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
- "version": "==20.3"
+ "version": "==20.4"
},
"pamqp": {
"hashes": [
@@ -418,10 +487,10 @@
},
"pytz": {
"hashes": [
- "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d",
- "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"
+ "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
+ "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
],
- "version": "==2019.3"
+ "version": "==2020.1"
},
"pyyaml": {
"hashes": [
@@ -440,6 +509,13 @@
"index": "pypi",
"version": "==5.3.1"
},
+ "redis": {
+ "hashes": [
+ "sha256:2ef11f489003f151777c064c5dbc6653dfb9f3eade159bcadc524619fddc2242",
+ "sha256:6d65e84bc58091140081ee9d9c187aab0480097750fac44239307a3bdf0b1251"
+ ],
+ "version": "==3.5.2"
+ },
"requests": {
"hashes": [
"sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
@@ -450,18 +526,18 @@
},
"sentry-sdk": {
"hashes": [
- "sha256:23808d571d2461a4ce3784ec12bbee5bdb8c026c143fe79d36cef8a6d653e71f",
- "sha256:bb90a4e19c7233a580715fc986cc44be2c48fc10b31e71580a2037e1c94b6950"
+ "sha256:0e5e947d0f7a969314aa23669a94a9712be5a688ff069ff7b9fc36c66adc160c",
+ "sha256:799a8bf76b012e3030a881be00e97bc0b922ce35dde699c6537122b751d80e2c"
],
"index": "pypi",
- "version": "==0.14.3"
+ "version": "==0.14.4"
},
"six": {
"hashes": [
- "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
- "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
+ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
+ "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
- "version": "==1.14.0"
+ "version": "==1.15.0"
},
"snowballstemmer": {
"hashes": [
@@ -470,12 +546,19 @@
],
"version": "==2.0.0"
},
+ "sortedcontainers": {
+ "hashes": [
+ "sha256:974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a",
+ "sha256:d9e96492dd51fae31e60837736b38fe42a187b5404c16606ff7ee7cd582d4c60"
+ ],
+ "version": "==2.1.0"
+ },
"soupsieve": {
"hashes": [
- "sha256:e914534802d7ffd233242b785229d5ba0766a7f487385e3f714446a07bf540ae",
- "sha256:fcd71e08c0aee99aca1b73f45478549ee7e7fc006d51b37bec9e9def7dc22b69"
+ "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55",
+ "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232"
],
- "version": "==2.0"
+ "version": "==2.0.1"
},
"sphinx": {
"hashes": [
@@ -595,10 +678,10 @@
"develop": {
"appdirs": {
"hashes": [
- "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92",
- "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"
+ "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41",
+ "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"
],
- "version": "==1.4.3"
+ "version": "==1.4.4"
},
"attrs": {
"hashes": [
@@ -657,13 +740,6 @@
],
"version": "==0.3.0"
},
- "entrypoints": {
- "hashes": [
- "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
- "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
- ],
- "version": "==0.3"
- },
"filelock": {
"hashes": [
"sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59",
@@ -673,11 +749,11 @@
},
"flake8": {
"hashes": [
- "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb",
- "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca"
+ "sha256:c69ac1668e434d37a2d2880b3ca9aafd54b3a10a3ac1ab101d22f29e29cf8634",
+ "sha256:ccaa799ef9893cebe69fdfefed76865aeaefbb94cb8545617b2298786a4de9a5"
],
"index": "pypi",
- "version": "==3.7.9"
+ "version": "==3.8.2"
},
"flake8-annotations": {
"hashes": [
@@ -743,10 +819,10 @@
},
"identify": {
"hashes": [
- "sha256:2bb8760d97d8df4408f4e805883dad26a2d076f04be92a10a3e43f09c6060742",
- "sha256:faffea0fd8ec86bb146ac538ac350ed0c73908326426d387eded0bcc9d077522"
+ "sha256:0f3c3aac62b51b86fea6ff52fe8ff9e06f57f10411502443809064d23e16f1c2",
+ "sha256:f9ad3d41f01e98eb066b6e05c5b184fd1e925fadec48eb165b4e01c72a1ef3a7"
],
- "version": "==1.4.14"
+ "version": "==1.4.16"
},
"mccabe": {
"hashes": [
@@ -771,18 +847,18 @@
},
"pre-commit": {
"hashes": [
- "sha256:487c675916e6f99d355ec5595ad77b325689d423ef4839db1ed2f02f639c9522",
- "sha256:c0aa11bce04a7b46c5544723aedf4e81a4d5f64ad1205a30a9ea12d5e81969e1"
+ "sha256:5559e09afcac7808933951ffaf4ff9aac524f31efbc3f24d021540b6c579813c",
+ "sha256:703e2e34cbe0eedb0d319eff9f7b83e2022bb5a3ab5289a6a8841441076514d0"
],
"index": "pypi",
- "version": "==2.2.0"
+ "version": "==2.4.0"
},
"pycodestyle": {
"hashes": [
- "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
- "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
+ "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
],
- "version": "==2.5.0"
+ "version": "==2.6.0"
},
"pydocstyle": {
"hashes": [
@@ -793,10 +869,10 @@
},
"pyflakes": {
"hashes": [
- "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
- "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
+ "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
],
- "version": "==2.1.1"
+ "version": "==2.2.0"
},
"pyyaml": {
"hashes": [
@@ -817,10 +893,10 @@
},
"six": {
"hashes": [
- "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
- "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
+ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
+ "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
- "version": "==1.14.0"
+ "version": "==1.15.0"
},
"snowballstemmer": {
"hashes": [
@@ -831,10 +907,10 @@
},
"toml": {
"hashes": [
- "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
- "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
+ "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
+ "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
],
- "version": "==0.10.0"
+ "version": "==0.10.1"
},
"unittest-xml-reporting": {
"hashes": [
@@ -846,10 +922,10 @@
},
"virtualenv": {
"hashes": [
- "sha256:5021396e8f03d0d002a770da90e31e61159684db2859d0ba4850fbea752aa675",
- "sha256:ac53ade75ca189bc97b6c1d9ec0f1a50efe33cbf178ae09452dcd9fd309013c1"
+ "sha256:a116629d4e7f4d03433b8afa27f43deba09d48bc48f5ecefa4f015a178efb6cf",
+ "sha256:a730548b27366c5e6cbdf6f97406d861cccece2e22275e8e1a757aeff5e00c70"
],
- "version": "==20.0.18"
+ "version": "==20.0.21"
}
}
}
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index d56675029..4500cb6e8 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -22,6 +22,7 @@ jobs:
REDDIT_CLIENT_ID: spam
REDDIT_SECRET: ham
WOLFRAM_API_KEY: baz
+ REDIS_PASSWORD: ''
steps:
- task: UsePythonVersion@0
diff --git a/bot/bot.py b/bot/bot.py
index a85a22aa9..313652d11 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -5,7 +5,9 @@ import warnings
from typing import Optional
import aiohttp
+import aioredis
import discord
+import fakeredis.aioredis
from discord.ext import commands
from sentry_sdk import push_scope
@@ -28,6 +30,9 @@ class Bot(commands.Bot):
super().__init__(*args, **kwargs)
self.http_session: Optional[aiohttp.ClientSession] = None
+ self.redis_session: Optional[aioredis.Redis] = None
+ self.redis_ready = asyncio.Event()
+ self.redis_closed = False
self.api_client = api.APIClient(loop=self.loop)
self._connector = None
@@ -44,6 +49,30 @@ class Bot(commands.Bot):
self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot")
+ async def _create_redis_session(self) -> None:
+ """
+ Create the Redis connection pool, and then open the redis event gate.
+
+ If constants.Redis.use_fakeredis is True, we'll set up a fake redis pool instead
+ of attempting to communicate with a real Redis server. This is useful because it
+ means contributors don't necessarily need to get Redis running locally just
+ to run the bot.
+
+ The fakeredis cache won't have persistence across restarts, but that
+ usually won't matter for local bot testing.
+ """
+ if constants.Redis.use_fakeredis:
+ log.info("Using fakeredis instead of communicating with a real Redis server.")
+ self.redis_session = await fakeredis.aioredis.create_redis_pool()
+ else:
+ self.redis_session = await aioredis.create_redis_pool(
+ address=(constants.Redis.host, constants.Redis.port),
+ password=constants.Redis.password,
+ )
+
+ self.redis_closed = False
+ self.redis_ready.set()
+
def add_cog(self, cog: commands.Cog) -> None:
"""Adds a "cog" to the bot and logs the operation."""
super().add_cog(cog)
@@ -78,6 +107,12 @@ class Bot(commands.Bot):
if self.stats._transport:
self.stats._transport.close()
+ if self.redis_session:
+ self.redis_closed = True
+ self.redis_session.close()
+ self.redis_ready.clear()
+ await self.redis_session.wait_closed()
+
async def login(self, *args, **kwargs) -> None:
"""Re-create the connector and set up sessions before logging into Discord."""
self._recreate()
@@ -85,7 +120,7 @@ class Bot(commands.Bot):
await super().login(*args, **kwargs)
def _recreate(self) -> None:
- """Re-create the connector, aiohttp session, and the APIClient."""
+ """Re-create the connector, aiohttp session, the APIClient and the Redis session."""
# Use asyncio for DNS resolution instead of threads so threads aren't spammed.
# Doesn't seem to have any state with regards to being closed, so no need to worry?
self._resolver = aiohttp.AsyncResolver()
@@ -96,6 +131,14 @@ class Bot(commands.Bot):
"The previous connector was not closed; it will remain open and be overwritten"
)
+ if self.redis_session and not self.redis_session.closed:
+ log.warning(
+ "The previous redis pool was not closed; it will remain open and be overwritten"
+ )
+
+ # Create the redis session
+ self.loop.create_task(self._create_redis_session())
+
# Use AF_INET as its socket family to prevent HTTPS related problems both locally
# and in production.
self._connector = aiohttp.TCPConnector(
diff --git a/bot/cogs/antispam.py b/bot/cogs/antispam.py
index d63acbc4a..0bcca578d 100644
--- a/bot/cogs/antispam.py
+++ b/bot/cogs/antispam.py
@@ -94,7 +94,7 @@ class DeletionContext:
await modlog.send_log_message(
icon_url=Icons.filtering,
colour=Colour(Colours.soft_red),
- title=f"Spam detected!",
+ title="Spam detected!",
text=mod_alert_message,
thumbnail=last_message.author.avatar_url_as(static_format="png"),
channel_id=Channels.mod_alerts,
@@ -130,7 +130,7 @@ class AntiSpam(Cog):
body += "\n\n**The cog has been unloaded.**"
await self.mod_log.send_log_message(
- title=f"Error: AntiSpam configuration validation failed!",
+ title="Error: AntiSpam configuration validation failed!",
text=body,
ping_everyone=True,
icon_url=Icons.token_removed,
diff --git a/bot/cogs/defcon.py b/bot/cogs/defcon.py
index 25b0a6ad5..4c0ad5914 100644
--- a/bot/cogs/defcon.py
+++ b/bot/cogs/defcon.py
@@ -81,7 +81,7 @@ class Defcon(Cog):
else:
self.enabled = False
self.days = timedelta(days=0)
- log.info(f"DEFCON disabled")
+ log.info("DEFCON disabled")
await self.update_channel_topic()
diff --git a/bot/cogs/duck_pond.py b/bot/cogs/duck_pond.py
index 1f84a0609..37d1786a2 100644
--- a/bot/cogs/duck_pond.py
+++ b/bot/cogs/duck_pond.py
@@ -117,7 +117,7 @@ class DuckPond(Cog):
avatar_url=message.author.avatar_url
)
except discord.HTTPException:
- log.exception(f"Failed to send an attachment to the webhook")
+ log.exception("Failed to send an attachment to the webhook")
await message.add_reaction("✅")
diff --git a/bot/cogs/help_channels.py b/bot/cogs/help_channels.py
index f0e6746f0..d2a55fba6 100644
--- a/bot/cogs/help_channels.py
+++ b/bot/cogs/help_channels.py
@@ -391,7 +391,7 @@ class HelpChannels(Scheduler, commands.Cog):
self.in_use_category = await self.try_get_channel(constants.Categories.help_in_use)
self.dormant_category = await self.try_get_channel(constants.Categories.help_dormant)
except discord.HTTPException:
- log.exception(f"Failed to get a category; cog will be removed")
+ log.exception("Failed to get a category; cog will be removed")
self.bot.remove_cog(self.qualified_name)
async def init_cog(self) -> None:
diff --git a/bot/cogs/moderation/scheduler.py b/bot/cogs/moderation/scheduler.py
index dc42bee2e..012432e60 100644
--- a/bot/cogs/moderation/scheduler.py
+++ b/bot/cogs/moderation/scheduler.py
@@ -91,7 +91,7 @@ class InfractionScheduler(Scheduler):
log.trace(f"Applying {infr_type} infraction #{id_} to {user}.")
# Default values for the confirmation message and mod log.
- confirm_msg = f":ok_hand: applied"
+ confirm_msg = ":ok_hand: applied"
# Specifying an expiry for a note or warning makes no sense.
if infr_type in ("note", "warning"):
@@ -154,7 +154,7 @@ class InfractionScheduler(Scheduler):
self.schedule_task(infraction["id"], infraction)
except discord.HTTPException as e:
# Accordingly display that applying the infraction failed.
- confirm_msg = f":x: failed to apply"
+ confirm_msg = ":x: failed to apply"
expiry_msg = ""
log_content = ctx.author.mention
log_title = "failed to apply"
@@ -281,7 +281,7 @@ class InfractionScheduler(Scheduler):
log.warning(f"Failed to pardon {infr_type} infraction #{id_} for {user}.")
else:
- confirm_msg = f":ok_hand: pardoned"
+ confirm_msg = ":ok_hand: pardoned"
log_title = "pardoned"
log.info(f"Pardoned {infr_type} infraction #{id_} for {user}.")
@@ -353,7 +353,7 @@ class InfractionScheduler(Scheduler):
)
except discord.Forbidden:
log.warning(f"Failed to deactivate infraction #{id_} ({type_}): bot lacks permissions.")
- log_text["Failure"] = f"The bot lacks permissions to do this (role hierarchy?)"
+ log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)"
log_content = mod_role.mention
except discord.HTTPException as e:
log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
@@ -402,7 +402,7 @@ class InfractionScheduler(Scheduler):
# Send a log message to the mod log.
if send_log:
- log_title = f"expiration failed" if "Failure" in log_text else "expired"
+ log_title = "expiration failed" if "Failure" in log_text else "expired"
user = self.bot.get_user(user_id)
avatar = user.avatar_url_as(static_format="png") if user else None
diff --git a/bot/cogs/moderation/silence.py b/bot/cogs/moderation/silence.py
index 1ef3967a9..25febfa51 100644
--- a/bot/cogs/moderation/silence.py
+++ b/bot/cogs/moderation/silence.py
@@ -91,7 +91,7 @@ class Silence(commands.Cog):
await ctx.send(f"{Emojis.check_mark} silenced current channel for {duration} minute(s).")
await asyncio.sleep(duration*60)
- log.info(f"Unsilencing channel after set delay.")
+ log.info("Unsilencing channel after set delay.")
await ctx.invoke(self.unsilence)
@commands.command(aliases=("unhush",))
diff --git a/bot/cogs/stats.py b/bot/cogs/stats.py
index 14409ecb0..4ebb6423c 100644
--- a/bot/cogs/stats.py
+++ b/bot/cogs/stats.py
@@ -68,7 +68,7 @@ class Stats(Cog):
if member.guild.id != Guild.id:
return
- self.bot.stats.gauge(f"guild.total_members", len(member.guild.members))
+ self.bot.stats.gauge("guild.total_members", len(member.guild.members))
@Cog.listener()
async def on_member_leave(self, member: Member) -> None:
@@ -76,7 +76,7 @@ class Stats(Cog):
if member.guild.id != Guild.id:
return
- self.bot.stats.gauge(f"guild.total_members", len(member.guild.members))
+ self.bot.stats.gauge("guild.total_members", len(member.guild.members))
@Cog.listener()
async def on_member_update(self, _before: Member, after: Member) -> None:
diff --git a/bot/cogs/utils.py b/bot/cogs/utils.py
index 6b59d37c8..73b4a1c0a 100644
--- a/bot/cogs/utils.py
+++ b/bot/cogs/utils.py
@@ -253,8 +253,8 @@ class Utils(Cog):
async def send_pep_zero(self, ctx: Context) -> None:
"""Send information about PEP 0."""
pep_embed = Embed(
- title=f"**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
- description=f"[Link](https://www.python.org/dev/peps/)"
+ title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
+ description="[Link](https://www.python.org/dev/peps/)"
)
pep_embed.set_thumbnail(url=ICON_URL)
pep_embed.add_field(name="Status", value="Active")
diff --git a/bot/cogs/watchchannels/talentpool.py b/bot/cogs/watchchannels/talentpool.py
index 9a85c68c2..cd9c7e555 100644
--- a/bot/cogs/watchchannels/talentpool.py
+++ b/bot/cogs/watchchannels/talentpool.py
@@ -61,7 +61,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
return
if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles):
- await ctx.send(f":x: Nominating staff members, eh? Here's a cookie :cookie:")
+ await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:")
return
if not await self.fetch_user_cache():
diff --git a/bot/cogs/watchchannels/watchchannel.py b/bot/cogs/watchchannels/watchchannel.py
index 479820444..643cd46e4 100644
--- a/bot/cogs/watchchannels/watchchannel.py
+++ b/bot/cogs/watchchannels/watchchannel.py
@@ -82,7 +82,7 @@ class WatchChannel(metaclass=CogABCMeta):
exc = self._consume_task.exception()
if exc:
self.log.exception(
- f"The message queue consume task has failed with:",
+ "The message queue consume task has failed with:",
exc_info=exc
)
return False
@@ -146,7 +146,7 @@ class WatchChannel(metaclass=CogABCMeta):
try:
data = await self.bot.api_client.get(self.api_endpoint, params=self.api_default_params)
except ResponseCodeError as err:
- self.log.exception(f"Failed to fetch the watched users from the API", exc_info=err)
+ self.log.exception("Failed to fetch the watched users from the API", exc_info=err)
return False
self.watched_users = defaultdict(dict)
@@ -173,7 +173,7 @@ class WatchChannel(metaclass=CogABCMeta):
self.log.trace(f"Sleeping {BigBrotherConfig.log_delay} seconds before consuming message queue")
await asyncio.sleep(BigBrotherConfig.log_delay)
- self.log.trace(f"Started consuming the message queue")
+ self.log.trace("Started consuming the message queue")
# If the previous consumption Task failed, first consume the existing comsumption_queue
if not self.consumption_queue:
@@ -208,7 +208,7 @@ class WatchChannel(metaclass=CogABCMeta):
await self.webhook.send(content=content, username=username, avatar_url=avatar_url, embed=embed)
except discord.HTTPException as exc:
self.log.exception(
- f"Failed to send a message to the webhook",
+ "Failed to send a message to the webhook",
exc_info=exc
)
@@ -254,7 +254,7 @@ class WatchChannel(metaclass=CogABCMeta):
)
except discord.HTTPException as exc:
self.log.exception(
- f"Failed to send an attachment to the webhook",
+ "Failed to send an attachment to the webhook",
exc_info=exc
)
@@ -326,13 +326,13 @@ class WatchChannel(metaclass=CogABCMeta):
def cog_unload(self) -> None:
"""Takes care of unloading the cog and canceling the consumption task."""
- self.log.trace(f"Unloading the cog")
+ self.log.trace("Unloading the cog")
if self._consume_task and not self._consume_task.done():
self._consume_task.cancel()
try:
self._consume_task.result()
except asyncio.CancelledError as e:
self.log.exception(
- f"The consume task was canceled. Messages may be lost.",
+ "The consume task was canceled. Messages may be lost.",
exc_info=e
)
diff --git a/bot/constants.py b/bot/constants.py
index 2ce5355be..b31a9c99e 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -15,7 +15,7 @@ import os
from collections.abc import Mapping
from enum import Enum
from pathlib import Path
-from typing import Dict, List
+from typing import Dict, List, Optional
import yaml
@@ -198,7 +198,18 @@ class Bot(metaclass=YAMLGetter):
prefix: str
token: str
- sentry_dsn: str
+ sentry_dsn: Optional[str]
+
+
+class Redis(metaclass=YAMLGetter):
+ section = "bot"
+ subsection = "redis"
+
+ host: str
+ port: int
+ password: Optional[str]
+ use_fakeredis: bool # If this is True, Bot will use fakeredis.aioredis
+
class Filter(metaclass=YAMLGetter):
section = "filter"
@@ -450,7 +461,7 @@ class Guild(metaclass=YAMLGetter):
class Keys(metaclass=YAMLGetter):
section = "keys"
- site_api: str
+ site_api: Optional[str]
class URLs(metaclass=YAMLGetter):
@@ -493,8 +504,8 @@ class Reddit(metaclass=YAMLGetter):
section = "reddit"
subreddits: list
- client_id: str
- secret: str
+ client_id: Optional[str]
+ secret: Optional[str]
class Wolfram(metaclass=YAMLGetter):
@@ -502,7 +513,7 @@ class Wolfram(metaclass=YAMLGetter):
user_limit_day: int
guild_limit_day: int
- key: str
+ key: Optional[str]
class AntiSpam(metaclass=YAMLGetter):
diff --git a/bot/pagination.py b/bot/pagination.py
index b0c4b70e2..2aa3590ba 100644
--- a/bot/pagination.py
+++ b/bot/pagination.py
@@ -147,7 +147,7 @@ class LinePaginator(Paginator):
if not lines:
if exception_on_empty_embed:
- log.exception(f"Pagination asked for empty lines iterable")
+ log.exception("Pagination asked for empty lines iterable")
raise EmptyPaginatorEmbed("No lines to paginate")
log.debug("No lines to add to paginator, adding '(nothing to display)' message")
@@ -357,7 +357,7 @@ class ImagePaginator(Paginator):
if not pages:
if exception_on_empty_embed:
- log.exception(f"Pagination asked for empty image list")
+ log.exception("Pagination asked for empty image list")
raise EmptyPaginatorEmbed("No images to paginate")
log.debug("No images to add to paginator, adding '(no images to display)' message")
diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py
index 9b32e515d..c5a12d5e3 100644
--- a/bot/utils/__init__.py
+++ b/bot/utils/__init__.py
@@ -2,6 +2,10 @@ from abc import ABCMeta
from discord.ext.commands import CogMeta
+from bot.utils.redis_cache import RedisCache
+
+__all__ = ['RedisCache', 'CogABCMeta']
+
class CogABCMeta(CogMeta, ABCMeta):
"""Metaclass for ABCs meant to be implemented as Cogs."""
diff --git a/bot/utils/messages.py b/bot/utils/messages.py
index e969ee590..de8e186f3 100644
--- a/bot/utils/messages.py
+++ b/bot/utils/messages.py
@@ -100,7 +100,7 @@ async def send_attachments(
log.warning(f"{failure_msg} with status {e.status}.")
if link_large and large:
- desc = f"\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large)
+ desc = "\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large)
embed = Embed(description=desc)
embed.set_footer(text="Attachments exceed upload size limit.")
diff --git a/bot/utils/redis_cache.py b/bot/utils/redis_cache.py
new file mode 100644
index 000000000..de80cee84
--- /dev/null
+++ b/bot/utils/redis_cache.py
@@ -0,0 +1,409 @@
+from __future__ import annotations
+
+import asyncio
+import logging
+from functools import partialmethod
+from typing import Any, Dict, ItemsView, Optional, Tuple, Union
+
+from bot.bot import Bot
+
+log = logging.getLogger(__name__)
+
+# Type aliases
+RedisKeyType = Union[str, int]
+RedisValueType = Union[str, int, float]
+RedisKeyOrValue = Union[RedisKeyType, RedisValueType]
+
+# Prefix tuples
+_PrefixTuple = Tuple[Tuple[str, Any], ...]
+_VALUE_PREFIXES = (
+ ("f|", float),
+ ("i|", int),
+ ("s|", str),
+)
+_KEY_PREFIXES = (
+ ("i|", int),
+ ("s|", str),
+)
+
+
+class NoBotInstanceError(RuntimeError):
+ """Raised when RedisCache is created without an available bot instance on the owner class."""
+
+
+class NoNamespaceError(RuntimeError):
+ """Raised when RedisCache has no namespace, for example if it is not assigned to a class attribute."""
+
+
+class NoParentInstanceError(RuntimeError):
+ """Raised when the parent instance is available, for example if called by accessing the parent class directly."""
+
+
+class RedisCache:
+ """
+ A simplified interface for a Redis connection.
+
+ We implement several convenient methods that are fairly similar to have a dict
+ behaves, and should be familiar to Python users. The biggest difference is that
+ all the public methods in this class are coroutines, and must be awaited.
+
+ Because of limitations in Redis, this cache will only accept strings, integers and
+ floats both for keys and values.
+
+ Please note that this class MUST be created as a class attribute, and that that class
+ must also contain an attribute with an instance of our Bot. See `__get__` and `__set_name__`
+ for more information about how this works.
+
+ Simple example for how to use this:
+
+ class SomeCog(Cog):
+ # To initialize a valid RedisCache, just add it as a class attribute here.
+ # Do not add it to the __init__ method or anywhere else, it MUST be a class
+ # attribute. Do not pass any parameters.
+ cache = RedisCache()
+
+ async def my_method(self):
+
+ # Now we're ready to use the RedisCache.
+ # One thing to note here is that this will not work unless
+ # we access self.cache through an _instance_ of this class.
+ #
+ # For example, attempting to use SomeCog.cache will _not_ work,
+ # you _must_ instantiate the class first and use that instance.
+ #
+ # Now we can store some stuff in the cache just by doing this.
+ # This data will persist through restarts!
+ await self.cache.set("key", "value")
+
+ # To get the data, simply do this.
+ value = await self.cache.get("key")
+
+ # Other methods work more or less like a dictionary.
+ # Checking if something is in the cache
+ await self.cache.contains("key")
+
+ # iterating the cache
+ async for key, value in self.cache.items():
+ print(value)
+
+ # We can even iterate in a comprehension!
+ consumed = [value async for key, value in self.cache.items()]
+ """
+
+ _namespaces = []
+
+ def __init__(self) -> None:
+ """Initialize the RedisCache."""
+ self._namespace = None
+ self.bot = None
+ self._increment_lock = None
+
+ def _set_namespace(self, namespace: str) -> None:
+ """Try to set the namespace, but do not permit collisions."""
+ # We need a unique namespace, to prevent collisions. This loop
+ # will try appending underscores to the end of the namespace until
+ # it finds one that is unique.
+ #
+ # For example, if `john` and `john_` are both taken, the namespace will
+ # be `john__` at the end of this loop.
+ while namespace in self._namespaces:
+ namespace += "_"
+
+ log.trace(f"RedisCache setting namespace to {self._namespace}")
+ self._namespaces.append(namespace)
+ self._namespace = namespace
+
+ @staticmethod
+ def _to_typestring(key_or_value: RedisKeyOrValue, prefixes: _PrefixTuple) -> str:
+ """Turn a valid Redis type into a typestring."""
+ for prefix, _type in prefixes:
+ if isinstance(key_or_value, _type):
+ return f"{prefix}{key_or_value}"
+ raise TypeError(f"RedisCache._to_typestring only supports the following: {prefixes}.")
+
+ @staticmethod
+ def _from_typestring(key_or_value: Union[bytes, str], prefixes: _PrefixTuple) -> RedisKeyOrValue:
+ """Deserialize a typestring into a valid Redis type."""
+ # Stuff that comes out of Redis will be bytestrings, so let's decode those.
+ if isinstance(key_or_value, bytes):
+ key_or_value = key_or_value.decode('utf-8')
+
+ # Now we convert our unicode string back into the type it originally was.
+ for prefix, _type in prefixes:
+ if key_or_value.startswith(prefix):
+ return _type(key_or_value[len(prefix):])
+ raise TypeError(f"RedisCache._from_typestring only supports the following: {prefixes}.")
+
+ # Add some nice partials to call our generic typestring converters.
+ # These are basically methods that will fill in some of the parameters for you, so that
+ # any call to _key_to_typestring will be like calling _to_typestring with the two parameters
+ # at `prefixes` and `types_string` pre-filled.
+ #
+ # See https://docs.python.org/3/library/functools.html#functools.partialmethod
+ _key_to_typestring = partialmethod(_to_typestring, prefixes=_KEY_PREFIXES)
+ _value_to_typestring = partialmethod(_to_typestring, prefixes=_VALUE_PREFIXES)
+ _key_from_typestring = partialmethod(_from_typestring, prefixes=_KEY_PREFIXES)
+ _value_from_typestring = partialmethod(_from_typestring, prefixes=_VALUE_PREFIXES)
+
+ def _dict_from_typestring(self, dictionary: Dict) -> Dict:
+ """Turns all contents of a dict into valid Redis types."""
+ return {self._key_from_typestring(key): self._value_from_typestring(value) for key, value in dictionary.items()}
+
+ def _dict_to_typestring(self, dictionary: Dict) -> Dict:
+ """Turns all contents of a dict into typestrings."""
+ return {self._key_to_typestring(key): self._value_to_typestring(value) for key, value in dictionary.items()}
+
+ async def _validate_cache(self) -> None:
+ """Validate that the RedisCache is ready to be used."""
+ if self._namespace is None:
+ error_message = (
+ "Critical error: RedisCache has no namespace. "
+ "This object must be initialized as a class attribute."
+ )
+ log.error(error_message)
+ raise NoNamespaceError(error_message)
+
+ if self.bot is None:
+ error_message = (
+ "Critical error: RedisCache has no `Bot` instance. "
+ "This happens when the class RedisCache was created in doesn't "
+ "have a Bot instance. Please make sure that you're instantiating "
+ "the RedisCache inside a class that has a Bot instance attribute."
+ )
+ log.error(error_message)
+ raise NoBotInstanceError(error_message)
+
+ if not self.bot.redis_closed:
+ await self.bot.redis_ready.wait()
+
+ def __set_name__(self, owner: Any, attribute_name: str) -> None:
+ """
+ Set the namespace to Class.attribute_name.
+
+ Called automatically when this class is constructed inside a class as an attribute.
+
+ This class MUST be created as a class attribute in a class, otherwise it will raise
+ exceptions whenever a method is used. This is because it uses this method to create
+ a namespace like `MyCog.my_class_attribute` which is used as a hash name when we store
+ stuff in Redis, to prevent collisions.
+ """
+ self._set_namespace(f"{owner.__name__}.{attribute_name}")
+
+ def __get__(self, instance: RedisCache, owner: Any) -> RedisCache:
+ """
+ This is called if the RedisCache is a class attribute, and is accessed.
+
+ The class this object is instantiated in must contain an attribute with an
+ instance of Bot. This is because Bot contains our redis_session, which is
+ the mechanism by which we will communicate with the Redis server.
+
+ Any attempt to use RedisCache in a class that does not have a Bot instance
+ will fail. It is mostly intended to be used inside of a Cog, although theoretically
+ it should work in any class that has a Bot instance.
+ """
+ if self.bot:
+ return self
+
+ if self._namespace is None:
+ error_message = "RedisCache must be a class attribute."
+ log.error(error_message)
+ raise NoNamespaceError(error_message)
+
+ if instance is None:
+ error_message = (
+ "You must access the RedisCache instance through the cog instance "
+ "before accessing it using the cog's class object."
+ )
+ log.error(error_message)
+ raise NoParentInstanceError(error_message)
+
+ for attribute in vars(instance).values():
+ if isinstance(attribute, Bot):
+ self.bot = attribute
+ self._redis = self.bot.redis_session
+ return self
+ else:
+ error_message = (
+ "Critical error: RedisCache has no `Bot` instance. "
+ "This happens when the class RedisCache was created in doesn't "
+ "have a Bot instance. Please make sure that you're instantiating "
+ "the RedisCache inside a class that has a Bot instance attribute."
+ )
+ log.error(error_message)
+ raise NoBotInstanceError(error_message)
+
+ def __repr__(self) -> str:
+ """Return a beautiful representation of this object instance."""
+ return f"RedisCache(namespace={self._namespace!r})"
+
+ async def set(self, key: RedisKeyType, value: RedisValueType) -> None:
+ """Store an item in the Redis cache."""
+ await self._validate_cache()
+
+ # Convert to a typestring and then set it
+ key = self._key_to_typestring(key)
+ value = self._value_to_typestring(value)
+
+ log.trace(f"Setting {key} to {value}.")
+ await self._redis.hset(self._namespace, key, value)
+
+ async def get(self, key: RedisKeyType, default: Optional[RedisValueType] = None) -> Optional[RedisValueType]:
+ """Get an item from the Redis cache."""
+ await self._validate_cache()
+ key = self._key_to_typestring(key)
+
+ log.trace(f"Attempting to retrieve {key}.")
+ value = await self._redis.hget(self._namespace, key)
+
+ if value is None:
+ log.trace(f"Value not found, returning default value {default}")
+ return default
+ else:
+ value = self._value_from_typestring(value)
+ log.trace(f"Value found, returning value {value}")
+ return value
+
+ async def delete(self, key: RedisKeyType) -> None:
+ """
+ Delete an item from the Redis cache.
+
+ If we try to delete a key that does not exist, it will simply be ignored.
+
+ See https://redis.io/commands/hdel for more info on how this works.
+ """
+ await self._validate_cache()
+ key = self._key_to_typestring(key)
+
+ log.trace(f"Attempting to delete {key}.")
+ return await self._redis.hdel(self._namespace, key)
+
+ async def contains(self, key: RedisKeyType) -> bool:
+ """
+ Check if a key exists in the Redis cache.
+
+ Return True if the key exists, otherwise False.
+ """
+ await self._validate_cache()
+ key = self._key_to_typestring(key)
+ exists = await self._redis.hexists(self._namespace, key)
+
+ log.trace(f"Testing if {key} exists in the RedisCache - Result is {exists}")
+ return exists
+
+ async def items(self) -> ItemsView:
+ """
+ Fetch all the key/value pairs in the cache.
+
+ Returns a normal ItemsView, like you would get from dict.items().
+
+ Keep in mind that these items are just a _copy_ of the data in the
+ RedisCache - any changes you make to them will not be reflected
+ into the RedisCache itself. If you want to change these, you need
+ to make a .set call.
+
+ Example:
+ items = await my_cache.items()
+ for key, value in items:
+ # Iterate like a normal dictionary
+ """
+ await self._validate_cache()
+ items = self._dict_from_typestring(
+ await self._redis.hgetall(self._namespace)
+ ).items()
+
+ log.trace(f"Retrieving all key/value pairs from cache, total of {len(items)} items.")
+ return items
+
+ async def length(self) -> int:
+ """Return the number of items in the Redis cache."""
+ await self._validate_cache()
+ number_of_items = await self._redis.hlen(self._namespace)
+ log.trace(f"Returning length. Result is {number_of_items}.")
+ return number_of_items
+
+ async def to_dict(self) -> Dict:
+ """Convert to dict and return."""
+ return {key: value for key, value in await self.items()}
+
+ async def clear(self) -> None:
+ """Deletes the entire hash from the Redis cache."""
+ await self._validate_cache()
+ log.trace("Clearing the cache of all key/value pairs.")
+ await self._redis.delete(self._namespace)
+
+ async def pop(self, key: RedisKeyType, default: Optional[RedisValueType] = None) -> RedisValueType:
+ """Get the item, remove it from the cache, and provide a default if not found."""
+ log.trace(f"Attempting to pop {key}.")
+ value = await self.get(key, default)
+
+ log.trace(
+ f"Attempting to delete item with key '{key}' from the cache. "
+ "If this key doesn't exist, nothing will happen."
+ )
+ await self.delete(key)
+
+ return value
+
+ async def update(self, items: Dict[RedisKeyType, RedisValueType]) -> None:
+ """
+ Update the Redis cache with multiple values.
+
+ This works exactly like dict.update from a normal dictionary. You pass
+ a dictionary with one or more key/value pairs into this method. If the keys
+ do not exist in the RedisCache, they are created. If they do exist, the values
+ are updated with the new ones from `items`.
+
+ Please note that keys and the values in the `items` dictionary
+ must consist of valid RedisKeyTypes and RedisValueTypes.
+ """
+ await self._validate_cache()
+ log.trace(f"Updating the cache with the following items:\n{items}")
+ await self._redis.hmset_dict(self._namespace, self._dict_to_typestring(items))
+
+ async def increment(self, key: RedisKeyType, amount: Optional[int, float] = 1) -> None:
+ """
+ Increment the value by `amount`.
+
+ This works for both floats and ints, but will raise a TypeError
+ if you try to do it for any other type of value.
+
+ This also supports negative amounts, although it would provide better
+ readability to use .decrement() for that.
+ """
+ log.trace(f"Attempting to increment/decrement the value with the key {key} by {amount}.")
+
+ # We initialize the lock here, because we need to ensure we get it
+ # running on the same loop as the calling coroutine.
+ #
+ # If we initialized the lock in the __init__, the loop that the coroutine this method
+ # would be called from might not exist yet, and so the lock would be on a different
+ # loop, which would raise RuntimeErrors.
+ if self._increment_lock is None:
+ self._increment_lock = asyncio.Lock()
+
+ # Since this has several API calls, we need a lock to prevent race conditions
+ async with self._increment_lock:
+ value = await self.get(key)
+
+ # Can't increment a non-existing value
+ if value is None:
+ error_message = "The provided key does not exist!"
+ log.error(error_message)
+ raise KeyError(error_message)
+
+ # If it does exist, and it's an int or a float, increment and set it.
+ if isinstance(value, int) or isinstance(value, float):
+ value += amount
+ await self.set(key, value)
+ else:
+ error_message = "You may only increment or decrement values that are integers or floats."
+ log.error(error_message)
+ raise TypeError(error_message)
+
+ async def decrement(self, key: RedisKeyType, amount: Optional[int, float] = 1) -> None:
+ """
+ Decrement the value by `amount`.
+
+ Basically just does the opposite of .increment.
+ """
+ await self.increment(key, -amount)
diff --git a/config-default.yml b/config-default.yml
index 7edfb131f..2c85f5ef3 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -3,6 +3,12 @@ bot:
token: !ENV "BOT_TOKEN"
sentry_dsn: !ENV "BOT_SENTRY_DSN"
+ redis:
+ host: "redis"
+ port: 6379
+ password: !ENV "REDIS_PASSWORD"
+ use_fakeredis: false
+
stats:
statsd_host: "graphite"
presence_update_timeout: 300
diff --git a/docker-compose.yml b/docker-compose.yml
index 11deceae8..9884e35f0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -12,6 +12,11 @@ services:
POSTGRES_PASSWORD: pysite
POSTGRES_USER: pysite
+ redis:
+ image: redis:5.0.9
+ ports:
+ - "127.0.0.1:6379:6379"
+
web:
image: pythondiscord/site:latest
command: ["run", "--debug"]
@@ -41,6 +46,7 @@ services:
tty: true
depends_on:
- web
+ - redis
environment:
BOT_TOKEN: ${BOT_TOKEN}
BOT_API_KEY: badbot13m0n8f570f942013fc818f234916ca531
diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py
index 7e6bfc748..a8c0107c6 100644
--- a/tests/bot/cogs/test_duck_pond.py
+++ b/tests/bot/cogs/test_duck_pond.py
@@ -45,7 +45,7 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase):
self.assertEqual(cog.bot, bot)
self.assertEqual(cog.webhook_id, constants.Webhooks.duck_pond)
- bot.loop.create_loop.called_once_with(cog.fetch_webhook())
+ bot.loop.create_task.assert_called_once_with(cog.fetch_webhook())
def test_fetch_webhook_succeeds_without_connectivity_issues(self):
"""The `fetch_webhook` method waits until `READY` event and sets the `webhook` attribute."""
diff --git a/tests/bot/cogs/test_snekbox.py b/tests/bot/cogs/test_snekbox.py
index 14299e766..cf9adbee0 100644
--- a/tests/bot/cogs/test_snekbox.py
+++ b/tests/bot/cogs/test_snekbox.py
@@ -21,7 +21,10 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
"""Post the eval code to the URLs.snekbox_eval_api endpoint."""
resp = MagicMock()
resp.json = AsyncMock(return_value="return")
- self.bot.http_session.post().__aenter__.return_value = resp
+
+ context_manager = MagicMock()
+ context_manager.__aenter__.return_value = resp
+ self.bot.http_session.post.return_value = context_manager
self.assertEqual(await self.cog.post_eval("import random"), "return")
self.bot.http_session.post.assert_called_with(
@@ -41,7 +44,10 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
key = "MarkDiamond"
resp = MagicMock()
resp.json = AsyncMock(return_value={"key": key})
- self.bot.http_session.post().__aenter__.return_value = resp
+
+ context_manager = MagicMock()
+ context_manager.__aenter__.return_value = resp
+ self.bot.http_session.post.return_value = context_manager
self.assertEqual(
await self.cog.upload_output("My awesome output"),
@@ -57,7 +63,10 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):
"""Output upload gracefully fallback if the upload fail."""
resp = MagicMock()
resp.json = AsyncMock(side_effect=Exception)
- self.bot.http_session.post().__aenter__.return_value = resp
+
+ context_manager = MagicMock()
+ context_manager.__aenter__.return_value = resp
+ self.bot.http_session.post.return_value = context_manager
log = logging.getLogger("bot.cogs.snekbox")
with self.assertLogs(logger=log, level='ERROR'):
diff --git a/tests/bot/test_constants.py b/tests/bot/test_constants.py
index dae7c066c..f10d6fbe8 100644
--- a/tests/bot/test_constants.py
+++ b/tests/bot/test_constants.py
@@ -1,14 +1,40 @@
import inspect
+import typing
import unittest
from bot import constants
+def is_annotation_instance(value: typing.Any, annotation: typing.Any) -> bool:
+ """
+ Return True if `value` is an instance of the type represented by `annotation`.
+
+ This doesn't account for things like Unions or checking for homogenous types in collections.
+ """
+ origin = typing.get_origin(annotation)
+
+ # This is done in case a bare e.g. `typing.List` is used.
+ # In such case, for the assertion to pass, the type needs to be normalised to e.g. `list`.
+ # `get_origin()` does this normalisation for us.
+ type_ = annotation if origin is None else origin
+
+ return isinstance(value, type_)
+
+
+def is_any_instance(value: typing.Any, types: typing.Collection) -> bool:
+ """Return True if `value` is an instance of any type in `types`."""
+ for type_ in types:
+ if is_annotation_instance(value, type_):
+ return True
+
+ return False
+
+
class ConstantsTests(unittest.TestCase):
"""Tests for our constants."""
def test_section_configuration_matches_type_specification(self):
- """The section annotations should match the actual types of the sections."""
+ """"The section annotations should match the actual types of the sections."""
sections = (
cls
@@ -17,10 +43,15 @@ class ConstantsTests(unittest.TestCase):
)
for section in sections:
for name, annotation in section.__annotations__.items():
- with self.subTest(section=section, name=name, annotation=annotation):
+ with self.subTest(section=section.__name__, name=name, annotation=annotation):
value = getattr(section, name)
+ origin = typing.get_origin(annotation)
+ annotation_args = typing.get_args(annotation)
+ failure_msg = f"{value} is not an instance of {annotation}"
- if getattr(annotation, '_name', None) in ('Dict', 'List'):
- self.skipTest("Cannot validate containers yet.")
-
- self.assertIsInstance(value, annotation)
+ if origin is typing.Union:
+ is_instance = is_any_instance(value, annotation_args)
+ self.assertTrue(is_instance, failure_msg)
+ else:
+ is_instance = is_annotation_instance(value, annotation)
+ self.assertTrue(is_instance, failure_msg)
diff --git a/tests/bot/utils/test_redis_cache.py b/tests/bot/utils/test_redis_cache.py
new file mode 100644
index 000000000..8c1a40640
--- /dev/null
+++ b/tests/bot/utils/test_redis_cache.py
@@ -0,0 +1,273 @@
+import asyncio
+import unittest
+
+import fakeredis.aioredis
+
+from bot.utils import RedisCache
+from bot.utils.redis_cache import NoBotInstanceError, NoNamespaceError, NoParentInstanceError
+from tests import helpers
+
+
+class RedisCacheTests(unittest.IsolatedAsyncioTestCase):
+ """Tests the RedisCache class from utils.redis_dict.py."""
+
+ async def asyncSetUp(self): # noqa: N802
+ """Sets up the objects that only have to be initialized once."""
+ self.bot = helpers.MockBot()
+ self.bot.redis_session = await fakeredis.aioredis.create_redis_pool()
+
+ # Okay, so this is necessary so that we can create a clean new
+ # class for every test method, and we want that because it will
+ # ensure we get a fresh loop, which is necessary for test_increment_lock
+ # to be able to pass.
+ class DummyCog:
+ """A dummy cog, for dummies."""
+
+ redis = RedisCache()
+
+ def __init__(self, bot: helpers.MockBot):
+ self.bot = bot
+
+ self.cog = DummyCog(self.bot)
+
+ await self.cog.redis.clear()
+
+ def test_class_attribute_namespace(self):
+ """Test that RedisDict creates a namespace automatically for class attributes."""
+ self.assertEqual(self.cog.redis._namespace, "DummyCog.redis")
+
+ async def test_class_attribute_required(self):
+ """Test that errors are raised when not assigned as a class attribute."""
+ bad_cache = RedisCache()
+ self.assertIs(bad_cache._namespace, None)
+
+ with self.assertRaises(RuntimeError):
+ await bad_cache.set("test", "me_up_deadman")
+
+ def test_namespace_collision(self):
+ """Test that we prevent colliding namespaces."""
+ bob_cache_1 = RedisCache()
+ bob_cache_1._set_namespace("BobRoss")
+ self.assertEqual(bob_cache_1._namespace, "BobRoss")
+
+ bob_cache_2 = RedisCache()
+ bob_cache_2._set_namespace("BobRoss")
+ self.assertEqual(bob_cache_2._namespace, "BobRoss_")
+
+ async def test_set_get_item(self):
+ """Test that users can set and get items from the RedisDict."""
+ test_cases = (
+ ('favorite_fruit', 'melon'),
+ ('favorite_number', 86),
+ ('favorite_fraction', 86.54)
+ )
+
+ # Test that we can get and set different types.
+ for test in test_cases:
+ await self.cog.redis.set(*test)
+ self.assertEqual(await self.cog.redis.get(test[0]), test[1])
+
+ # Test that .get allows a default value
+ self.assertEqual(await self.cog.redis.get('favorite_nothing', "bearclaw"), "bearclaw")
+
+ async def test_set_item_type(self):
+ """Test that .set rejects keys and values that are not permitted."""
+ fruits = ["lemon", "melon", "apple"]
+
+ with self.assertRaises(TypeError):
+ await self.cog.redis.set(fruits, "nice")
+
+ with self.assertRaises(TypeError):
+ await self.cog.redis.set(4.23, "nice")
+
+ async def test_delete_item(self):
+ """Test that .delete allows us to delete stuff from the RedisCache."""
+ # Add an item and verify that it gets added
+ await self.cog.redis.set("internet", "firetruck")
+ self.assertEqual(await self.cog.redis.get("internet"), "firetruck")
+
+ # Delete that item and verify that it gets deleted
+ await self.cog.redis.delete("internet")
+ self.assertIs(await self.cog.redis.get("internet"), None)
+
+ async def test_contains(self):
+ """Test that we can check membership with .contains."""
+ await self.cog.redis.set('favorite_country', "Burkina Faso")
+
+ self.assertIs(await self.cog.redis.contains('favorite_country'), True)
+ self.assertIs(await self.cog.redis.contains('favorite_dentist'), False)
+
+ async def test_items(self):
+ """Test that the RedisDict can be iterated."""
+ # Set up our test cases in the Redis cache
+ test_cases = [
+ ('favorite_turtle', 'Donatello'),
+ ('second_favorite_turtle', 'Leonardo'),
+ ('third_favorite_turtle', 'Raphael'),
+ ]
+ for key, value in test_cases:
+ await self.cog.redis.set(key, value)
+
+ # Consume the AsyncIterator into a regular list, easier to compare that way.
+ redis_items = [item for item in await self.cog.redis.items()]
+
+ # These sequences are probably in the same order now, but probably
+ # isn't good enough for tests. Let's not rely on .hgetall always
+ # returning things in sequence, and just sort both lists to be safe.
+ redis_items = sorted(redis_items)
+ test_cases = sorted(test_cases)
+
+ # If these are equal now, everything works fine.
+ self.assertSequenceEqual(test_cases, redis_items)
+
+ async def test_length(self):
+ """Test that we can get the correct .length from the RedisDict."""
+ await self.cog.redis.set('one', 1)
+ await self.cog.redis.set('two', 2)
+ await self.cog.redis.set('three', 3)
+ self.assertEqual(await self.cog.redis.length(), 3)
+
+ await self.cog.redis.set('four', 4)
+ self.assertEqual(await self.cog.redis.length(), 4)
+
+ async def test_to_dict(self):
+ """Test that the .to_dict method returns a workable dictionary copy."""
+ copy = await self.cog.redis.to_dict()
+ local_copy = {key: value for key, value in await self.cog.redis.items()}
+ self.assertIs(type(copy), dict)
+ self.assertDictEqual(copy, local_copy)
+
+ async def test_clear(self):
+ """Test that the .clear method removes the entire hash."""
+ await self.cog.redis.set('teddy', 'with me')
+ await self.cog.redis.set('in my dreams', 'you have a weird hat')
+ self.assertEqual(await self.cog.redis.length(), 2)
+
+ await self.cog.redis.clear()
+ self.assertEqual(await self.cog.redis.length(), 0)
+
+ async def test_pop(self):
+ """Test that we can .pop an item from the RedisDict."""
+ await self.cog.redis.set('john', 'was afraid')
+
+ self.assertEqual(await self.cog.redis.pop('john'), 'was afraid')
+ self.assertEqual(await self.cog.redis.pop('pete', 'breakneck'), 'breakneck')
+ self.assertEqual(await self.cog.redis.length(), 0)
+
+ async def test_update(self):
+ """Test that we can .update the RedisDict with multiple items."""
+ await self.cog.redis.set("reckfried", "lona")
+ await self.cog.redis.set("bel air", "prince")
+ await self.cog.redis.update({
+ "reckfried": "jona",
+ "mega": "hungry, though",
+ })
+
+ result = {
+ "reckfried": "jona",
+ "bel air": "prince",
+ "mega": "hungry, though",
+ }
+ self.assertDictEqual(await self.cog.redis.to_dict(), result)
+
+ def test_typestring_conversion(self):
+ """Test the typestring-related helper functions."""
+ conversion_tests = (
+ (12, "i|12"),
+ (12.4, "f|12.4"),
+ ("cowabunga", "s|cowabunga"),
+ )
+
+ # Test conversion to typestring
+ for _input, expected in conversion_tests:
+ self.assertEqual(self.cog.redis._value_to_typestring(_input), expected)
+
+ # Test conversion from typestrings
+ for _input, expected in conversion_tests:
+ self.assertEqual(self.cog.redis._value_from_typestring(expected), _input)
+
+ # Test that exceptions are raised on invalid input
+ with self.assertRaises(TypeError):
+ self.cog.redis._value_to_typestring(["internet"])
+ self.cog.redis._value_from_typestring("o|firedog")
+
+ async def test_increment_decrement(self):
+ """Test .increment and .decrement methods."""
+ await self.cog.redis.set("entropic", 5)
+ await self.cog.redis.set("disentropic", 12.5)
+
+ # Test default increment
+ await self.cog.redis.increment("entropic")
+ self.assertEqual(await self.cog.redis.get("entropic"), 6)
+
+ # Test default decrement
+ await self.cog.redis.decrement("entropic")
+ self.assertEqual(await self.cog.redis.get("entropic"), 5)
+
+ # Test float increment with float
+ await self.cog.redis.increment("disentropic", 2.0)
+ self.assertEqual(await self.cog.redis.get("disentropic"), 14.5)
+
+ # Test float increment with int
+ await self.cog.redis.increment("disentropic", 2)
+ self.assertEqual(await self.cog.redis.get("disentropic"), 16.5)
+
+ # Test negative increments, because why not.
+ await self.cog.redis.increment("entropic", -5)
+ self.assertEqual(await self.cog.redis.get("entropic"), 0)
+
+ # Negative decrements? Sure.
+ await self.cog.redis.decrement("entropic", -5)
+ self.assertEqual(await self.cog.redis.get("entropic"), 5)
+
+ # What about if we use a negative float to decrement an int?
+ # This should convert the type into a float.
+ await self.cog.redis.decrement("entropic", -2.5)
+ self.assertEqual(await self.cog.redis.get("entropic"), 7.5)
+
+ # Let's test that they raise the right errors
+ with self.assertRaises(KeyError):
+ await self.cog.redis.increment("doesn't_exist!")
+
+ await self.cog.redis.set("stringthing", "stringthing")
+ with self.assertRaises(TypeError):
+ await self.cog.redis.increment("stringthing")
+
+ async def test_increment_lock(self):
+ """Test that we can't produce a race condition in .increment."""
+ await self.cog.redis.set("test_key", 0)
+ tasks = []
+
+ # Increment this a lot in different tasks
+ for _ in range(100):
+ task = asyncio.create_task(
+ self.cog.redis.increment("test_key", 1)
+ )
+ tasks.append(task)
+ await asyncio.gather(*tasks)
+
+ # Confirm that the value has been incremented the exact right number of times.
+ value = await self.cog.redis.get("test_key")
+ self.assertEqual(value, 100)
+
+ async def test_exceptions_raised(self):
+ """Testing that the various RuntimeErrors are reachable."""
+ class MyCog:
+ cache = RedisCache()
+
+ def __init__(self):
+ self.other_cache = RedisCache()
+
+ cog = MyCog()
+
+ # Raises "No Bot instance"
+ with self.assertRaises(NoBotInstanceError):
+ await cog.cache.get("john")
+
+ # Raises "RedisCache has no namespace"
+ with self.assertRaises(NoNamespaceError):
+ await cog.other_cache.get("was")
+
+ # Raises "You must access the RedisCache instance through the cog instance"
+ with self.assertRaises(NoParentInstanceError):
+ await MyCog.cache.get("afraid")
diff --git a/tests/helpers.py b/tests/helpers.py
index 2b79a6c2a..13283339b 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -4,12 +4,15 @@ import collections
import itertools
import logging
import unittest.mock
+from asyncio import AbstractEventLoop
from typing import Iterable, Optional
import discord
+from aiohttp import ClientSession
from discord.ext.commands import Context
from bot.api import APIClient
+from bot.async_stats import AsyncStatsClient
from bot.bot import Bot
@@ -264,10 +267,16 @@ class MockAPIClient(CustomMockMixin, unittest.mock.MagicMock):
spec_set = APIClient
-# Create a Bot instance to get a realistic MagicMock of `discord.ext.commands.Bot`
-bot_instance = Bot(command_prefix=unittest.mock.MagicMock())
-bot_instance.http_session = None
-bot_instance.api_client = None
+def _get_mock_loop() -> unittest.mock.Mock:
+ """Return a mocked asyncio.AbstractEventLoop."""
+ loop = unittest.mock.create_autospec(spec=AbstractEventLoop, spec_set=True)
+
+ # Since calling `create_task` on our MockBot does not actually schedule the coroutine object
+ # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object
+ # to prevent "has not been awaited"-warnings.
+ loop.create_task.side_effect = lambda coroutine: coroutine.close()
+
+ return loop
class MockBot(CustomMockMixin, unittest.mock.MagicMock):
@@ -277,17 +286,16 @@ class MockBot(CustomMockMixin, unittest.mock.MagicMock):
Instances of this class will follow the specifications of `discord.ext.commands.Bot` instances.
For more information, see the `MockGuild` docstring.
"""
- spec_set = bot_instance
- additional_spec_asyncs = ("wait_for",)
+ spec_set = Bot(command_prefix=unittest.mock.MagicMock(), loop=_get_mock_loop())
+ additional_spec_asyncs = ("wait_for", "redis_ready")
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
- self.api_client = MockAPIClient()
- # Since calling `create_task` on our MockBot does not actually schedule the coroutine object
- # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object
- # to prevent "has not been awaited"-warnings.
- self.loop.create_task.side_effect = lambda coroutine: coroutine.close()
+ self.loop = _get_mock_loop()
+ self.api_client = MockAPIClient(loop=self.loop)
+ self.http_session = unittest.mock.create_autospec(spec=ClientSession, spec_set=True)
+ self.stats = unittest.mock.create_autospec(spec=AsyncStatsClient, spec_set=True)
# Create a TextChannel instance to get a realistic MagicMock of `discord.TextChannel`