aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Karlis S <[email protected]>2021-01-03 07:57:49 +0000
committerGravatar Karlis S <[email protected]>2021-01-03 07:57:49 +0000
commit3f52698dbf9bd8277b8ec5c0c25c1d56bd159a8e (patch)
tree34cb166f6b98562443a79d42f987989293ec5da2
parentRe-lock Pipfile (diff)
parentMerge pull request #1334 from python-discord/bug/precommit-pycharm (diff)
Merge branch 'master' into branding-manager
-rw-r--r--.github/workflows/build.yml2
-rw-r--r--.github/workflows/sentry_release.yml24
-rw-r--r--.pre-commit-config.yaml2
-rw-r--r--Dockerfile11
-rw-r--r--Pipfile6
-rw-r--r--Pipfile.lock299
-rw-r--r--bot/api.py73
-rw-r--r--bot/bot.py102
-rw-r--r--bot/constants.py5
-rw-r--r--bot/exts/info/information.py20
-rw-r--r--bot/exts/info/pep.py164
-rw-r--r--bot/exts/info/reddit.py2
-rw-r--r--bot/exts/moderation/infraction/_scheduler.py34
-rw-r--r--bot/exts/moderation/silence.py14
-rw-r--r--bot/exts/moderation/verification.py62
-rw-r--r--bot/exts/moderation/voice_gate.py11
-rw-r--r--bot/exts/moderation/watchchannels/_watchchannel.py17
-rw-r--r--bot/exts/utils/bot.py4
-rw-r--r--bot/exts/utils/clean.py2
-rw-r--r--bot/exts/utils/utils.py142
-rw-r--r--bot/log.py5
-rw-r--r--config-default.yml4
-rw-r--r--tests/bot/exts/info/test_information.py2
-rw-r--r--tests/bot/exts/moderation/test_silence.py15
-rw-r--r--tests/bot/test_api.py8
-rw-r--r--tests/helpers.py2
26 files changed, 549 insertions, 483 deletions
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 6152f1543..6c97e8784 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -55,3 +55,5 @@ jobs:
tags: |
ghcr.io/python-discord/bot:latest
ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }}
+ build-args: |
+ git_sha=${{ github.sha }}
diff --git a/.github/workflows/sentry_release.yml b/.github/workflows/sentry_release.yml
new file mode 100644
index 000000000..b8d92e90a
--- /dev/null
+++ b/.github/workflows/sentry_release.yml
@@ -0,0 +1,24 @@
+name: Create Sentry release
+
+on:
+ push:
+ branches:
+ - master
+
+jobs:
+ create_sentry_release:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@master
+
+ - name: Create a Sentry.io release
+ uses: tclindner/[email protected]
+ env:
+ SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
+ SENTRY_ORG: python-discord
+ SENTRY_PROJECT: bot
+ with:
+ tagName: ${{ github.sha }}
+ environment: production
+ releaseNamePrefix: bot@
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 876d32b15..1597592ca 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -21,6 +21,6 @@ repos:
name: Flake8
description: This hook runs flake8 within our project's pipenv environment.
entry: pipenv run flake8
- language: python
+ language: system
types: [python]
require_serial: true
diff --git a/Dockerfile b/Dockerfile
index 06a538b2a..5d0380b44 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,10 +1,19 @@
FROM python:3.8-slim
+# Define Git SHA build argument
+ARG git_sha="development"
+
# Set pip to have cleaner logs and no saved cache
ENV PIP_NO_CACHE_DIR=false \
PIPENV_HIDE_EMOJIS=1 \
PIPENV_IGNORE_VIRTUALENVS=1 \
- PIPENV_NOSPIN=1
+ PIPENV_NOSPIN=1 \
+ GIT_SHA=$git_sha
+
+RUN apt-get -y update \
+ && apt-get install -y \
+ git \
+ && rm -rf /var/lib/apt/lists/*
# Install pipenv
RUN pip install -U pipenv
diff --git a/Pipfile b/Pipfile
index b691a7862..94df8e65d 100644
--- a/Pipfile
+++ b/Pipfile
@@ -14,16 +14,16 @@ beautifulsoup4 = "~=4.9"
colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}
coloredlogs = "~=14.0"
deepdiff = "~=4.0"
-"discord.py" = "~=1.5.0"
+"discord.py" = {git = "https://github.com/Rapptz/discord.py.git", ref = "93f102ca907af6722ee03638766afd53dfe93a7f"}
feedparser = "~=5.2"
fuzzywuzzy = "~=0.17"
lxml = "~=4.4"
-markdownify = "~=0.4"
+markdownify = "==0.5.3"
more_itertools = "~=8.2"
python-dateutil = "~=2.8"
pyyaml = "~=5.1"
requests = "~=2.22"
-sentry-sdk = "~=0.14"
+sentry-sdk = "~=0.19"
sphinx = "~=2.2"
statsd = "~=3.3"
arrow = "~=0.17"
diff --git a/Pipfile.lock b/Pipfile.lock
index 2fbaf8803..a4b7ed5b5 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "66cc236b0ecc19515967049cf42fd8cde76db4f5803f3ab492a0558a9de447f4"
+ "sha256": "f8392181c82292d0a30a7b128bac4a5ed0ea517610e940743e612118b5da94b5"
},
"pipfile-spec": 6,
"requires": {
@@ -34,22 +34,46 @@
},
"aiohttp": {
"hashes": [
- "sha256:1a4160579ffbc1b69e88cb6ca8bb0fbd4947dfcbf9fb1e2a4fc4c7a4a986c1fe",
- "sha256:206c0ccfcea46e1bddc91162449c20c72f308aebdcef4977420ef329c8fcc599",
- "sha256:2ad493de47a8f926386fa6d256832de3095ba285f325db917c7deae0b54a9fc8",
- "sha256:319b490a5e2beaf06891f6711856ea10591cfe84fe9f3e71a721aa8f20a0872a",
- "sha256:470e4c90da36b601676fe50c49a60d34eb8c6593780930b1aa4eea6f508dfa37",
- "sha256:60f4caa3b7f7a477f66ccdd158e06901e1d235d572283906276e3803f6b098f5",
- "sha256:66d64486172b032db19ea8522328b19cfb78a3e1e5b62ab6a0567f93f073dea0",
- "sha256:687461cd974722110d1763b45c5db4d2cdee8d50f57b00c43c7590d1dd77fc5c",
- "sha256:698cd7bc3c7d1b82bb728bae835724a486a8c376647aec336aa21a60113c3645",
- "sha256:797456399ffeef73172945708810f3277f794965eb6ec9bd3a0c007c0476be98",
- "sha256:a885432d3cabc1287bcf88ea94e1826d3aec57fd5da4a586afae4591b061d40d",
- "sha256:c506853ba52e516b264b106321c424d03f3ddef2813246432fa9d1cefd361c81",
- "sha256:fb83326d8295e8840e4ba774edf346e87eca78ba8a89c55d2690352842c15ba5"
+ "sha256:0b795072bb1bf87b8620120a6373a3c61bfcb8da7e5c2377f4bb23ff4f0b62c9",
+ "sha256:0d438c8ca703b1b714e82ed5b7a4412c82577040dadff479c08405e2a715564f",
+ "sha256:16a3cb5df5c56f696234ea9e65e227d1ebe9c18aa774d36ff42f532139066a5f",
+ "sha256:1edfd82a98c5161497bbb111b2b70c0813102ad7e0aa81cbeb34e64c93863005",
+ "sha256:2406dc1dda01c7f6060ab586e4601f18affb7a6b965c50a8c90ff07569cf782a",
+ "sha256:2858b2504c8697beb9357be01dc47ef86438cc1cb36ecb6991796d19475faa3e",
+ "sha256:2a7b7640167ab536c3cb90cfc3977c7094f1c5890d7eeede8b273c175c3910fd",
+ "sha256:3228b7a51e3ed533f5472f54f70fd0b0a64c48dc1649a0f0e809bec312934d7a",
+ "sha256:328b552513d4f95b0a2eea4c8573e112866107227661834652a8984766aa7656",
+ "sha256:39f4b0a6ae22a1c567cb0630c30dd082481f95c13ca528dc501a7766b9c718c0",
+ "sha256:3b0036c978cbcc4a4512278e98e3e6d9e6b834dc973206162eddf98b586ef1c6",
+ "sha256:3ea8c252d8df5e9166bcf3d9edced2af132f4ead8ac422eac723c5781063709a",
+ "sha256:41608c0acbe0899c852281978492f9ce2c6fbfaf60aff0cefc54a7c4516b822c",
+ "sha256:59d11674964b74a81b149d4ceaff2b674b3b0e4d0f10f0be1533e49c4a28408b",
+ "sha256:5e479df4b2d0f8f02133b7e4430098699450e1b2a826438af6bec9a400530957",
+ "sha256:684850fb1e3e55c9220aad007f8386d8e3e477c4ec9211ae54d968ecdca8c6f9",
+ "sha256:6ccc43d68b81c424e46192a778f97da94ee0630337c9bbe5b2ecc9b0c1c59001",
+ "sha256:6d42debaf55450643146fabe4b6817bb2a55b23698b0434107e892a43117285e",
+ "sha256:710376bf67d8ff4500a31d0c207b8941ff4fba5de6890a701d71680474fe2a60",
+ "sha256:756ae7efddd68d4ea7d89c636b703e14a0c686688d42f588b90778a3c2fc0564",
+ "sha256:77149002d9386fae303a4a162e6bce75cc2161347ad2ba06c2f0182561875d45",
+ "sha256:78e2f18a82b88cbc37d22365cf8d2b879a492faedb3f2975adb4ed8dfe994d3a",
+ "sha256:7d9b42127a6c0bdcc25c3dcf252bb3ddc70454fac593b1b6933ae091396deb13",
+ "sha256:8389d6044ee4e2037dca83e3f6994738550f6ee8cfb746762283fad9b932868f",
+ "sha256:9c1a81af067e72261c9cbe33ea792893e83bc6aa987bfbd6fdc1e5e7b22777c4",
+ "sha256:c1e0920909d916d3375c7a1fdb0b1c78e46170e8bb42792312b6eb6676b2f87f",
+ "sha256:c68fdf21c6f3573ae19c7ee65f9ff185649a060c9a06535e9c3a0ee0bbac9235",
+ "sha256:c733ef3bdcfe52a1a75564389bad4064352274036e7e234730526d155f04d914",
+ "sha256:c9c58b0b84055d8bc27b7df5a9d141df4ee6ff59821f922dd73155861282f6a3",
+ "sha256:d03abec50df423b026a5aa09656bd9d37f1e6a49271f123f31f9b8aed5dc3ea3",
+ "sha256:d2cfac21e31e841d60dc28c0ec7d4ec47a35c608cb8906435d47ef83ffb22150",
+ "sha256:dcc119db14757b0c7bce64042158307b9b1c76471e655751a61b57f5a0e4d78e",
+ "sha256:df3a7b258cc230a65245167a202dd07320a5af05f3d41da1488ba0fa05bc9347",
+ "sha256:df48a623c58180874d7407b4d9ec06a19b84ed47f60a3884345b1a5099c1818b",
+ "sha256:e1b95972a0ae3f248a899cdbac92ba2e01d731225f566569311043ce2226f5e7",
+ "sha256:f326b3c1bbfda5b9308252ee0dcb30b612ee92b0e105d4abec70335fab5b1245",
+ "sha256:f411cb22115cb15452d099fec0ee636b06cf81bfb40ed9c02d30c8dc2bc2e3d1"
],
"index": "pypi",
- "version": "==3.6.3"
+ "version": "==3.7.3"
},
"aioping": {
"hashes": [
@@ -214,13 +238,13 @@
"index": "pypi",
"version": "==4.3.2"
},
+ "discord-py": {
+ "git": "https://github.com/Rapptz/discord.py.git",
+ "ref": "93f102ca907af6722ee03638766afd53dfe93a7f"
+ },
"discord.py": {
- "hashes": [
- "sha256:2367359e31f6527f8a936751fc20b09d7495dd6a76b28c8fb13d4ca6c55b7563",
- "sha256:def00dc50cf36d21346d71bc89f0cad8f18f9a3522978dc18c7796287d47de8b"
- ],
- "index": "pypi",
- "version": "==1.5.1"
+ "git": "https://github.com/Rapptz/discord.py.git",
+ "ref": "93f102ca907af6722ee03638766afd53dfe93a7f"
},
"docutils": {
"hashes": [
@@ -390,11 +414,11 @@
},
"markdownify": {
"hashes": [
- "sha256:901c6106533f4a0b79cfe7c700c4df6b15cf782aa6236fd13161bf2608e2c591",
- "sha256:f40874e3113a170697f0e74ea7aeee2d66eb9973201a5fbcc68ef8ce6bfbcf8a"
+ "sha256:30be8340724e706c9e811c27fe8c1542cf74a15b46827924fff5c54b40dd9b0d",
+ "sha256:a69588194fd76634f0139d6801b820fd652dc5eeba9530e90d323dfdc0155252"
],
"index": "pypi",
- "version": "==0.6.0"
+ "version": "==0.5.3"
},
"markupsafe": {
"hashes": [
@@ -445,26 +469,46 @@
},
"multidict": {
"hashes": [
- "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a",
- "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000",
- "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2",
- "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507",
- "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5",
- "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7",
- "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d",
- "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463",
- "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19",
- "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3",
- "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b",
- "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c",
- "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87",
- "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7",
- "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430",
- "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255",
- "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"
+ "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a",
+ "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93",
+ "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632",
+ "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656",
+ "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79",
+ "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7",
+ "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d",
+ "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5",
+ "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224",
+ "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26",
+ "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea",
+ "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348",
+ "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6",
+ "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76",
+ "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1",
+ "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f",
+ "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952",
+ "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a",
+ "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37",
+ "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9",
+ "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359",
+ "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8",
+ "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da",
+ "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3",
+ "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d",
+ "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf",
+ "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841",
+ "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d",
+ "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93",
+ "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f",
+ "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647",
+ "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635",
+ "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456",
+ "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda",
+ "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5",
+ "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281",
+ "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"
],
- "markers": "python_version >= '3.5'",
- "version": "==4.7.6"
+ "markers": "python_version >= '3.6'",
+ "version": "==5.1.0"
},
"ordered-set": {
"hashes": [
@@ -556,10 +600,10 @@
},
"pytz": {
"hashes": [
- "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
- "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
+ "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4",
+ "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"
],
- "version": "==2020.4"
+ "version": "==2020.5"
},
"pyyaml": {
"hashes": [
@@ -698,6 +742,14 @@
"index": "pypi",
"version": "==3.3.0"
},
+ "typing-extensions": {
+ "hashes": [
+ "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918",
+ "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c",
+ "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"
+ ],
+ "version": "==3.7.4.3"
+ },
"urllib3": {
"hashes": [
"sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
@@ -708,26 +760,46 @@
},
"yarl": {
"hashes": [
- "sha256:040b237f58ff7d800e6e0fd89c8439b841f777dd99b4a9cca04d6935564b9409",
- "sha256:17668ec6722b1b7a3a05cc0167659f6c95b436d25a36c2d52db0eca7d3f72593",
- "sha256:3a584b28086bc93c888a6c2aa5c92ed1ae20932f078c46509a66dce9ea5533f2",
- "sha256:4439be27e4eee76c7632c2427ca5e73703151b22cae23e64adb243a9c2f565d8",
- "sha256:48e918b05850fffb070a496d2b5f97fc31d15d94ca33d3d08a4f86e26d4e7c5d",
- "sha256:9102b59e8337f9874638fcfc9ac3734a0cfadb100e47d55c20d0dc6087fb4692",
- "sha256:9b930776c0ae0c691776f4d2891ebc5362af86f152dd0da463a6614074cb1b02",
- "sha256:b3b9ad80f8b68519cc3372a6ca85ae02cc5a8807723ac366b53c0f089db19e4a",
- "sha256:bc2f976c0e918659f723401c4f834deb8a8e7798a71be4382e024bcc3f7e23a8",
- "sha256:c22c75b5f394f3d47105045ea551e08a3e804dc7e01b37800ca35b58f856c3d6",
- "sha256:c52ce2883dc193824989a9b97a76ca86ecd1fa7955b14f87bf367a61b6232511",
- "sha256:ce584af5de8830d8701b8979b18fcf450cef9a382b1a3c8ef189bedc408faf1e",
- "sha256:da456eeec17fa8aa4594d9a9f27c0b1060b6a75f2419fe0c00609587b2695f4a",
- "sha256:db6db0f45d2c63ddb1a9d18d1b9b22f308e52c83638c26b422d520a815c4b3fb",
- "sha256:df89642981b94e7db5596818499c4b2219028f2a528c9c37cc1de45bf2fd3a3f",
- "sha256:f18d68f2be6bf0e89f1521af2b1bb46e66ab0018faafa81d70f358153170a317",
- "sha256:f379b7f83f23fe12823085cd6b906edc49df969eb99757f58ff382349a3303c6"
+ "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e",
+ "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434",
+ "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366",
+ "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3",
+ "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec",
+ "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959",
+ "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e",
+ "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c",
+ "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6",
+ "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a",
+ "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6",
+ "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424",
+ "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e",
+ "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f",
+ "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50",
+ "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2",
+ "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc",
+ "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4",
+ "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970",
+ "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10",
+ "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0",
+ "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406",
+ "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896",
+ "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643",
+ "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721",
+ "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478",
+ "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724",
+ "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e",
+ "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8",
+ "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96",
+ "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25",
+ "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76",
+ "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2",
+ "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2",
+ "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c",
+ "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a",
+ "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"
],
- "markers": "python_version >= '3.5'",
- "version": "==1.5.1"
+ "markers": "python_version >= '3.6'",
+ "version": "==1.6.3"
}
},
"develop": {
@@ -770,43 +842,58 @@
},
"coverage": {
"hashes": [
- "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516",
- "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259",
- "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9",
- "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097",
- "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0",
- "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f",
- "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7",
- "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c",
- "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5",
- "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7",
- "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729",
- "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978",
- "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9",
- "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f",
- "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9",
- "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822",
- "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418",
- "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82",
- "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f",
- "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d",
- "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221",
- "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4",
- "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21",
- "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709",
- "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54",
- "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d",
- "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270",
- "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24",
- "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751",
- "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a",
- "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237",
- "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7",
- "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636",
- "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"
+ "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297",
+ "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1",
+ "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497",
+ "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606",
+ "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528",
+ "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b",
+ "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4",
+ "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830",
+ "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1",
+ "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f",
+ "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d",
+ "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3",
+ "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8",
+ "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500",
+ "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7",
+ "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb",
+ "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b",
+ "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059",
+ "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b",
+ "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72",
+ "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36",
+ "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277",
+ "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c",
+ "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631",
+ "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff",
+ "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8",
+ "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec",
+ "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b",
+ "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7",
+ "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105",
+ "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b",
+ "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c",
+ "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b",
+ "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98",
+ "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4",
+ "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879",
+ "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f",
+ "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4",
+ "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044",
+ "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e",
+ "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899",
+ "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f",
+ "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448",
+ "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714",
+ "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2",
+ "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d",
+ "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd",
+ "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7",
+ "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae"
],
"index": "pypi",
- "version": "==5.3"
+ "version": "==5.3.1"
},
"coveralls": {
"hashes": [
@@ -893,11 +980,11 @@
},
"flake8-tidy-imports": {
"hashes": [
- "sha256:2821c79e83c656652d5ac6d3650ca370ed3c9752edb5383b1d50dee5bd8a383f",
- "sha256:6cdd51e0d2f221e43ff4d5ac6331b1d95bbf4a5408906e36da913acaaed890e0"
+ "sha256:52e5f2f987d3d5597538d5941153409ebcab571635835b78f522c7bf03ca23bc",
+ "sha256:76e36fbbfdc8e3c5017f9a216c2855a298be85bc0631e66777f4e6a07a859dc4"
],
"index": "pypi",
- "version": "==4.2.0"
+ "version": "==4.2.1"
},
"flake8-todo": {
"hashes": [
@@ -908,11 +995,11 @@
},
"identify": {
"hashes": [
- "sha256:943cd299ac7f5715fcb3f684e2fc1594c1e0f22a90d15398e5888143bd4144b5",
- "sha256:cc86e6a9a390879dcc2976cef169dd9cc48843ed70b7380f321d1b118163c60e"
+ "sha256:7aef7a5104d6254c162990e54a203cdc0fd202046b6c415bd5d636472f6565c4",
+ "sha256:b2c71bf9f5c482c389cef816f3a15f1c9d7429ad70f497d4a2e522442d80c6de"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==1.5.10"
+ "version": "==1.5.11"
},
"idna": {
"hashes": [
diff --git a/bot/api.py b/bot/api.py
index 4b8520582..d93f9f2ba 100644
--- a/bot/api.py
+++ b/bot/api.py
@@ -37,64 +37,27 @@ class APIClient:
session: Optional[aiohttp.ClientSession] = None
loop: asyncio.AbstractEventLoop = None
- def __init__(self, loop: asyncio.AbstractEventLoop, **kwargs):
+ def __init__(self, **session_kwargs):
auth_headers = {
'Authorization': f"Token {Keys.site_api}"
}
- if 'headers' in kwargs:
- kwargs['headers'].update(auth_headers)
+ if 'headers' in session_kwargs:
+ session_kwargs['headers'].update(auth_headers)
else:
- kwargs['headers'] = auth_headers
+ session_kwargs['headers'] = auth_headers
- self.session = None
- self.loop = loop
-
- self._ready = asyncio.Event(loop=loop)
- self._creation_task = None
- self._default_session_kwargs = kwargs
-
- self.recreate()
+ # aiohttp will complain if APIClient gets instantiated outside a coroutine. Thankfully, we
+ # don't and shouldn't need to do that, so we can avoid scheduling a task to create it.
+ self.session = aiohttp.ClientSession(**session_kwargs)
@staticmethod
def _url_for(endpoint: str) -> str:
return f"{URLs.site_schema}{URLs.site_api}/{quote_url(endpoint)}"
- async def _create_session(self, **session_kwargs) -> None:
- """
- Create the aiohttp session with `session_kwargs` and set the ready event.
-
- `session_kwargs` is merged with `_default_session_kwargs` and overwrites its values.
- If an open session already exists, it will first be closed.
- """
- await self.close()
- self.session = aiohttp.ClientSession(**{**self._default_session_kwargs, **session_kwargs})
- self._ready.set()
-
async def close(self) -> None:
- """Close the aiohttp session and unset the ready event."""
- if self.session:
- await self.session.close()
-
- self._ready.clear()
-
- def recreate(self, force: bool = False, **session_kwargs) -> None:
- """
- Schedule the aiohttp session to be created with `session_kwargs` if it's been closed.
-
- If `force` is True, the session will be recreated even if an open one exists. If a task to
- create the session is pending, it will be cancelled.
-
- `session_kwargs` is merged with the kwargs given when the `APIClient` was created and
- overwrites those default kwargs.
- """
- if force or self.session is None or self.session.closed:
- if force and self._creation_task:
- self._creation_task.cancel()
-
- # Don't schedule a task if one is already in progress.
- if force or self._creation_task is None or self._creation_task.done():
- self._creation_task = self.loop.create_task(self._create_session(**session_kwargs))
+ """Close the aiohttp session."""
+ await self.session.close()
async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None:
"""Raise ResponseCodeError for non-OK response if an exception should be raised."""
@@ -108,8 +71,6 @@ class APIClient:
async def request(self, method: str, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
"""Send an HTTP request to the site API and return the JSON response."""
- await self._ready.wait()
-
async with self.session.request(method.upper(), self._url_for(endpoint), **kwargs) as resp:
await self.maybe_raise_for_status(resp, raise_for_status)
return await resp.json()
@@ -132,25 +93,9 @@ class APIClient:
async def delete(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> Optional[dict]:
"""Site API DELETE."""
- await self._ready.wait()
-
async with self.session.delete(self._url_for(endpoint), **kwargs) as resp:
if resp.status == 204:
return None
await self.maybe_raise_for_status(resp, raise_for_status)
return await resp.json()
-
-
-def loop_is_running() -> bool:
- """
- Determine if there is a running asyncio event loop.
-
- This helps enable "call this when event loop is running" logic (see: Twisted's `callWhenRunning`),
- which is currently not provided by asyncio.
- """
- try:
- asyncio.get_running_loop()
- except RuntimeError:
- return False
- return True
diff --git a/bot/bot.py b/bot/bot.py
index f71f5d1fb..d5f108575 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -3,7 +3,8 @@ import logging
import socket
import warnings
from collections import defaultdict
-from typing import Dict, Optional
+from contextlib import suppress
+from typing import Dict, List, Optional
import aiohttp
import discord
@@ -32,7 +33,7 @@ class Bot(commands.Bot):
self.http_session: Optional[aiohttp.ClientSession] = None
self.redis_session = redis_session
- self.api_client = api.APIClient(loop=self.loop)
+ self.api_client: Optional[api.APIClient] = None
self.filter_list_cache = defaultdict(dict)
self._connector = None
@@ -70,6 +71,9 @@ class Bot(commands.Bot):
attempt + 1
)
+ # All tasks that need to block closing until finished
+ self.closing_tasks: List[asyncio.Task] = []
+
async def cache_filter_list_data(self) -> None:
"""Cache all the data in the FilterList on the site."""
full_cache = await self.api_client.get('bot/filter-lists')
@@ -77,46 +81,6 @@ class Bot(commands.Bot):
for item in full_cache:
self.insert_item_into_filter_list_cache(item)
- def _recreate(self) -> None:
- """Re-create the connector, aiohttp session, the APIClient and the Redis session."""
- # Use asyncio for DNS resolution instead of threads so threads aren't spammed.
- # Doesn't seem to have any state with regards to being closed, so no need to worry?
- self._resolver = aiohttp.AsyncResolver()
-
- # Its __del__ does send a warning but it doesn't always show up for some reason.
- if self._connector and not self._connector._closed:
- log.warning(
- "The previous connector was not closed; it will remain open and be overwritten"
- )
-
- if self.redis_session.closed:
- # If the RedisSession was somehow closed, we try to reconnect it
- # here. Normally, this shouldn't happen.
- self.loop.create_task(self.redis_session.connect())
-
- # Use AF_INET as its socket family to prevent HTTPS related problems both locally
- # and in production.
- self._connector = aiohttp.TCPConnector(
- resolver=self._resolver,
- family=socket.AF_INET,
- )
-
- # Client.login() will call HTTPClient.static_login() which will create a session using
- # this connector attribute.
- self.http.connector = self._connector
-
- # Its __del__ does send a warning but it doesn't always show up for some reason.
- if self.http_session and not self.http_session.closed:
- log.warning(
- "The previous session was not closed; it will remain open and be overwritten"
- )
-
- self.http_session = aiohttp.ClientSession(connector=self._connector)
- self.api_client.recreate(force=True, connector=self._connector)
-
- # Build the FilterList cache
- self.loop.create_task(self.cache_filter_list_data())
-
@classmethod
def create(cls) -> "Bot":
"""Create and return an instance of a Bot."""
@@ -180,21 +144,29 @@ class Bot(commands.Bot):
return command
def clear(self) -> None:
- """
- Clears the internal state of the bot and recreates the connector and sessions.
-
- Will cause a DeprecationWarning if called outside a coroutine.
- """
- # Because discord.py recreates the HTTPClient session, may as well follow suit and recreate
- # our own stuff here too.
- self._recreate()
- super().clear()
+ """Not implemented! Re-instantiate the bot instead of attempting to re-use a closed one."""
+ raise NotImplementedError("Re-using a Bot object after closing it is not supported.")
async def close(self) -> None:
"""Close the Discord connection and the aiohttp session, connector, statsd client, and resolver."""
+ # Done before super().close() to allow tasks finish before the HTTP session closes.
+ for ext in list(self.extensions):
+ with suppress(Exception):
+ self.unload_extension(ext)
+
+ for cog in list(self.cogs):
+ with suppress(Exception):
+ self.remove_cog(cog)
+
+ # Wait until all tasks that have to be completed before bot is closing is done
+ log.trace("Waiting for tasks before closing.")
+ await asyncio.gather(*self.closing_tasks)
+
+ # Now actually do full close of bot
await super().close()
- await self.api_client.close()
+ if self.api_client:
+ await self.api_client.close()
if self.http_session:
await self.http_session.close()
@@ -229,7 +201,31 @@ class Bot(commands.Bot):
async def login(self, *args, **kwargs) -> None:
"""Re-create the connector and set up sessions before logging into Discord."""
- self._recreate()
+ # Use asyncio for DNS resolution instead of threads so threads aren't spammed.
+ self._resolver = aiohttp.AsyncResolver()
+
+ # Use AF_INET as its socket family to prevent HTTPS related problems both locally
+ # and in production.
+ self._connector = aiohttp.TCPConnector(
+ resolver=self._resolver,
+ family=socket.AF_INET,
+ )
+
+ # Client.login() will call HTTPClient.static_login() which will create a session using
+ # this connector attribute.
+ self.http.connector = self._connector
+
+ self.http_session = aiohttp.ClientSession(connector=self._connector)
+ self.api_client = api.APIClient(connector=self._connector)
+
+ if self.redis_session.closed:
+ # If the RedisSession was somehow closed, we try to reconnect it
+ # here. Normally, this shouldn't happen.
+ await self.redis_session.connect()
+
+ # Build the FilterList cache
+ await self.cache_filter_list_data()
+
await self.stats.create_socket()
await super().login(*args, **kwargs)
diff --git a/bot/constants.py b/bot/constants.py
index 3475ee4e3..ded6e386d 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -612,7 +612,7 @@ class Verification(metaclass=YAMLGetter):
class VoiceGate(metaclass=YAMLGetter):
section = "voice_gate"
- minimum_days_verified: int
+ minimum_days_member: int
minimum_messages: int
bot_message_delete_delay: int
minimum_activity_blocks: int
@@ -697,6 +697,9 @@ MODERATION_CHANNELS = Guild.moderation_channels
# Category combinations
MODERATION_CATEGORIES = Guild.moderation_categories
+# Git SHA for Sentry
+GIT_SHA = os.environ.get("GIT_SHA", "development")
+
# Bot replies
NEGATIVE_REPLIES = [
"Noooooo!!",
diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py
index 187950689..b2138b03f 100644
--- a/bot/exts/info/information.py
+++ b/bot/exts/info/information.py
@@ -6,7 +6,6 @@ from collections import Counter, defaultdict
from string import Template
from typing import Any, Mapping, Optional, Tuple, Union
-from dateutil import parser
from discord import ChannelType, Colour, Embed, Guild, Message, Role, Status, utils
from discord.abc import GuildChannel
from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role
@@ -225,12 +224,12 @@ class Information(Cog):
if is_set and (emoji := getattr(constants.Emojis, f"badge_{badge}", None)):
badges.append(emoji)
- verified_at, activity = await self.user_verification_and_messages(user)
+ activity = await self.user_messages(user)
if on_server:
joined = time_since(user.joined_at, max_units=3)
roles = ", ".join(role.mention for role in user.roles[1:])
- membership = {"Joined": joined, "Verified": verified_at or "False", "Roles": roles or None}
+ membership = {"Joined": joined, "Verified": not user.pending, "Roles": roles or None}
if not is_mod_channel(ctx.channel):
membership.pop("Verified")
@@ -360,30 +359,21 @@ class Information(Cog):
return "Nominations", "\n".join(output)
- async def user_verification_and_messages(self, user: FetchedMember) -> Tuple[Union[bool, str], Tuple[str, str]]:
+ async def user_messages(self, user: FetchedMember) -> Tuple[Union[bool, str], Tuple[str, str]]:
"""
- Gets the time of verification and amount of messages for `member`.
+ Gets the amount of messages for `member`.
Fetches information from the metricity database that's hosted by the site.
If the database returns a code besides a 404, then many parts of the bot are broken including this one.
"""
activity_output = []
- verified_at = False
try:
user_activity = await self.bot.api_client.get(f"bot/users/{user.id}/metricity_data")
except ResponseCodeError as e:
if e.status == 404:
activity_output = "No activity"
-
else:
- try:
- if (verified_at := user_activity["verified_at"]) is not None:
- verified_at = time_since(parser.isoparse(verified_at), max_units=3)
- except ValueError:
- log.warning(f"Could not parse ISO string correctly for user {user.id} verification date.")
- verified_at = None
-
activity_output.append(user_activity["total_messages"] or "No messages")
activity_output.append(user_activity["activity_blocks"] or "No activity")
@@ -391,7 +381,7 @@ class Information(Cog):
f"{name}: {metric}" for name, metric in zip(["Messages", "Activity blocks"], activity_output)
)
- return verified_at, ("Activity", activity_output)
+ return ("Activity", activity_output)
def format_fields(self, mapping: Mapping[str, Any], field_width: Optional[int] = None) -> str:
"""Format a mapping to be readable to a human."""
diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py
new file mode 100644
index 000000000..8ac96bbdb
--- /dev/null
+++ b/bot/exts/info/pep.py
@@ -0,0 +1,164 @@
+import logging
+from datetime import datetime, timedelta
+from email.parser import HeaderParser
+from io import StringIO
+from typing import Dict, Optional, Tuple
+
+from discord import Colour, Embed
+from discord.ext.commands import Cog, Context, command
+
+from bot.bot import Bot
+from bot.constants import Keys
+from bot.utils.cache import AsyncCache
+
+log = logging.getLogger(__name__)
+
+ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
+BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
+PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master"
+
+pep_cache = AsyncCache()
+
+GITHUB_API_HEADERS = {}
+if Keys.github:
+ GITHUB_API_HEADERS["Authorization"] = f"token {Keys.github}"
+
+
+class PythonEnhancementProposals(Cog):
+ """Cog for displaying information about PEPs."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.peps: Dict[int, str] = {}
+ # To avoid situations where we don't have last datetime, set this to now.
+ self.last_refreshed_peps: datetime = datetime.now()
+ self.bot.loop.create_task(self.refresh_peps_urls())
+
+ async def refresh_peps_urls(self) -> None:
+ """Refresh PEP URLs listing in every 3 hours."""
+ # Wait until HTTP client is available
+ await self.bot.wait_until_ready()
+ log.trace("Started refreshing PEP URLs.")
+ self.last_refreshed_peps = datetime.now()
+
+ async with self.bot.http_session.get(
+ PEPS_LISTING_API_URL,
+ headers=GITHUB_API_HEADERS
+ ) as resp:
+ if resp.status != 200:
+ log.warning(f"Fetching PEP URLs from GitHub API failed with code {resp.status}")
+ return
+
+ listing = await resp.json()
+
+ log.trace("Got PEP URLs listing from GitHub API")
+
+ for file in listing:
+ name = file["name"]
+ if name.startswith("pep-") and name.endswith((".rst", ".txt")):
+ pep_number = name.replace("pep-", "").split(".")[0]
+ self.peps[int(pep_number)] = file["download_url"]
+
+ log.info("Successfully refreshed PEP URLs listing.")
+
+ @staticmethod
+ def get_pep_zero_embed() -> Embed:
+ """Get information embed about PEP 0."""
+ pep_embed = Embed(
+ title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
+ url="https://www.python.org/dev/peps/"
+ )
+ pep_embed.set_thumbnail(url=ICON_URL)
+ pep_embed.add_field(name="Status", value="Active")
+ pep_embed.add_field(name="Created", value="13-Jul-2000")
+ pep_embed.add_field(name="Type", value="Informational")
+
+ return pep_embed
+
+ async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]:
+ """Validate is PEP number valid. When it isn't, return error embed, otherwise None."""
+ if (
+ pep_nr not in self.peps
+ and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now()
+ and len(str(pep_nr)) < 5
+ ):
+ await self.refresh_peps_urls()
+
+ if pep_nr not in self.peps:
+ log.trace(f"PEP {pep_nr} was not found")
+ return Embed(
+ title="PEP not found",
+ description=f"PEP {pep_nr} does not exist.",
+ colour=Colour.red()
+ )
+
+ return None
+
+ def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed:
+ """Generate PEP embed based on PEP headers data."""
+ # Assemble the embed
+ pep_embed = Embed(
+ title=f"**PEP {pep_nr} - {pep_header['Title']}**",
+ description=f"[Link]({BASE_PEP_URL}{pep_nr:04})",
+ )
+
+ pep_embed.set_thumbnail(url=ICON_URL)
+
+ # Add the interesting information
+ fields_to_check = ("Status", "Python-Version", "Created", "Type")
+ for field in fields_to_check:
+ # Check for a PEP metadata field that is present but has an empty value
+ # embed field values can't contain an empty string
+ if pep_header.get(field, ""):
+ pep_embed.add_field(name=field, value=pep_header[field])
+
+ return pep_embed
+
+ @pep_cache(arg_offset=1)
+ async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]:
+ """Fetch, generate and return PEP embed. Second item of return tuple show does getting success."""
+ response = await self.bot.http_session.get(self.peps[pep_nr])
+
+ if response.status == 200:
+ log.trace(f"PEP {pep_nr} found")
+ pep_content = await response.text()
+
+ # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179
+ pep_header = HeaderParser().parse(StringIO(pep_content))
+ return self.generate_pep_embed(pep_header, pep_nr), True
+ else:
+ log.trace(
+ f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}."
+ )
+ return Embed(
+ title="Unexpected error",
+ description="Unexpected HTTP error during PEP search. Please let us know.",
+ colour=Colour.red()
+ ), False
+
+ @command(name='pep', aliases=('get_pep', 'p'))
+ async def pep_command(self, ctx: Context, pep_number: int) -> None:
+ """Fetches information about a PEP and sends it to the channel."""
+ # Trigger typing in chat to show users that bot is responding
+ await ctx.trigger_typing()
+
+ # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs.
+ if pep_number == 0:
+ pep_embed = self.get_pep_zero_embed()
+ success = True
+ else:
+ success = False
+ if not (pep_embed := await self.validate_pep_number(pep_number)):
+ pep_embed, success = await self.get_pep_embed(pep_number)
+
+ await ctx.send(embed=pep_embed)
+ if success:
+ log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.")
+ self.bot.stats.incr(f"pep_fetches.{pep_number}")
+ else:
+ log.trace(f"Getting PEP {pep_number} failed. Error embed sent.")
+
+
+def setup(bot: Bot) -> None:
+ """Load the PEP cog."""
+ bot.add_cog(PythonEnhancementProposals(bot))
diff --git a/bot/exts/info/reddit.py b/bot/exts/info/reddit.py
index bad4c504d..6790be762 100644
--- a/bot/exts/info/reddit.py
+++ b/bot/exts/info/reddit.py
@@ -45,7 +45,7 @@ class Reddit(Cog):
"""Stop the loop task and revoke the access token when the cog is unloaded."""
self.auto_poster_loop.cancel()
if self.access_token and self.access_token.expires_at > datetime.utcnow():
- asyncio.create_task(self.revoke_access_token())
+ self.bot.closing_tasks.append(asyncio.create_task(self.revoke_access_token()))
async def init_reddit_ready(self) -> None:
"""Sets the reddit webhook when the cog is loaded."""
diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py
index c062ae7f8..242b2d30f 100644
--- a/bot/exts/moderation/infraction/_scheduler.py
+++ b/bot/exts/moderation/infraction/_scheduler.py
@@ -74,8 +74,21 @@ class InfractionScheduler:
return
# Allowing mod log since this is a passive action that should be logged.
- await apply_coro
- log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.")
+ try:
+ await apply_coro
+ except discord.HTTPException as e:
+ # When user joined and then right after this left again before action completed, this can't apply roles
+ if e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't reapply {infraction['type']} to user {infraction['user']} because user left the guild."
+ )
+ else:
+ log.exception(
+ f"Got unexpected HTTPException (HTTP {e.status}, Discord code {e.code})"
+ f"when awaiting {infraction['type']} coroutine for {infraction['user']}."
+ )
+ else:
+ log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.")
async def apply_infraction(
self,
@@ -178,6 +191,10 @@ class InfractionScheduler:
log_msg = f"Failed to apply {' '.join(infr_type.split('_'))} infraction #{id_} to {user}"
if isinstance(e, discord.Forbidden):
log.warning(f"{log_msg}: bot lacks permissions.")
+ elif e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't apply {infraction['type']} to user {infraction['user']} because user left from guild."
+ )
else:
log.exception(log_msg)
failed = True
@@ -352,9 +369,16 @@ class InfractionScheduler:
log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)"
log_content = mod_role.mention
except discord.HTTPException as e:
- log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
- log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}."
- log_content = mod_role.mention
+ if e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't pardon {infraction['type']} for user {infraction['user']} because user left the guild."
+ )
+ log_text["Failure"] = "User left the guild."
+ log_content = mod_role.mention
+ else:
+ log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
+ log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}."
+ log_content = mod_role.mention
# Check if the user is currently being watched by Big Brother.
try:
diff --git a/bot/exts/moderation/silence.py b/bot/exts/moderation/silence.py
index e6712b3b6..a942d5294 100644
--- a/bot/exts/moderation/silence.py
+++ b/bot/exts/moderation/silence.py
@@ -93,7 +93,7 @@ class Silence(commands.Cog):
await self.bot.wait_until_guild_available()
guild = self.bot.get_guild(Guild.id)
- self._verified_role = guild.get_role(Roles.verified)
+ self._everyone_role = guild.default_role
self._mod_alerts_channel = self.bot.get_channel(Channels.mod_alerts)
self.notifier = SilenceNotifier(self.bot.get_channel(Channels.mod_log))
await self._reschedule()
@@ -142,7 +142,7 @@ class Silence(commands.Cog):
async def _unsilence_wrapper(self, channel: TextChannel) -> None:
"""Unsilence `channel` and send a success/failure message."""
if not await self._unsilence(channel):
- overwrite = channel.overwrites_for(self._verified_role)
+ overwrite = channel.overwrites_for(self._everyone_role)
if overwrite.send_messages is False or overwrite.add_reactions is False:
await channel.send(MSG_UNSILENCE_MANUAL)
else:
@@ -152,14 +152,14 @@ class Silence(commands.Cog):
async def _set_silence_overwrites(self, channel: TextChannel) -> bool:
"""Set silence permission overwrites for `channel` and return True if successful."""
- overwrite = channel.overwrites_for(self._verified_role)
+ overwrite = channel.overwrites_for(self._everyone_role)
prev_overwrites = dict(send_messages=overwrite.send_messages, add_reactions=overwrite.add_reactions)
if channel.id in self.scheduler or all(val is False for val in prev_overwrites.values()):
return False
overwrite.update(send_messages=False, add_reactions=False)
- await channel.set_permissions(self._verified_role, overwrite=overwrite)
+ await channel.set_permissions(self._everyone_role, overwrite=overwrite)
await self.previous_overwrites.set(channel.id, json.dumps(prev_overwrites))
return True
@@ -188,14 +188,14 @@ class Silence(commands.Cog):
log.info(f"Tried to unsilence channel #{channel} ({channel.id}) but the channel was not silenced.")
return False
- overwrite = channel.overwrites_for(self._verified_role)
+ overwrite = channel.overwrites_for(self._everyone_role)
if prev_overwrites is None:
log.info(f"Missing previous overwrites for #{channel} ({channel.id}); defaulting to None.")
overwrite.update(send_messages=None, add_reactions=None)
else:
overwrite.update(**json.loads(prev_overwrites))
- await channel.set_permissions(self._verified_role, overwrite=overwrite)
+ await channel.set_permissions(self._everyone_role, overwrite=overwrite)
log.info(f"Unsilenced channel #{channel} ({channel.id}).")
self.scheduler.cancel(channel.id)
@@ -207,7 +207,7 @@ class Silence(commands.Cog):
await self._mod_alerts_channel.send(
f"<@&{Roles.admins}> Restored overwrites with default values after unsilencing "
f"{channel.mention}. Please check that the `Send Messages` and `Add Reactions` "
- f"overwrites for {self._verified_role.mention} are at their desired values."
+ f"overwrites for {self._everyone_role.mention} are at their desired values."
)
return True
diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py
index 7aa559617..ce91dcb15 100644
--- a/bot/exts/moderation/verification.py
+++ b/bot/exts/moderation/verification.py
@@ -55,7 +55,7 @@ If you'd like to unsubscribe from the announcement notifications, simply send `!
"""
ALTERNATE_VERIFIED_MESSAGE = f"""
-Thanks for accepting our rules!
+You are now verified!
You can find a copy of our rules for reference at <https://pythondiscord.com/pages/rules>.
@@ -174,14 +174,13 @@ class Verification(Cog):
# ]
task_cache = RedisCache()
- # Create a cache for storing recipients of the alternate welcome DM.
- member_gating_cache = RedisCache()
-
def __init__(self, bot: Bot) -> None:
"""Start internal tasks."""
self.bot = bot
self.bot.loop.create_task(self._maybe_start_tasks())
+ self.pending_members = set()
+
def cog_unload(self) -> None:
"""
Cancel internal tasks.
@@ -568,19 +567,8 @@ class Verification(Cog):
# If the user has the pending flag set, they will be using the alternate
# gate and will not need a welcome DM with verification instructions.
# We will send them an alternate DM once they verify with the welcome
- # video.
+ # video when they pass the gate.
if raw_member.get("pending"):
- await self.member_gating_cache.set(member.id, True)
-
- # TODO: Temporary, remove soon after asking joe.
- await self.mod_log.send_log_message(
- icon_url=self.bot.user.avatar_url,
- colour=discord.Colour.blurple(),
- title="New native gated user",
- channel_id=constants.Channels.user_log,
- text=f"<@{member.id}> ({member.id})",
- )
-
return
log.trace(f"Sending on join message to new member: {member.id}")
@@ -592,19 +580,15 @@ class Verification(Cog):
@Cog.listener()
async def on_member_update(self, before: discord.Member, after: discord.Member) -> None:
"""Check if we need to send a verification DM to a gated user."""
- before_roles = [role.id for role in before.roles]
- after_roles = [role.id for role in after.roles]
-
- if constants.Roles.verified not in before_roles and constants.Roles.verified in after_roles:
- if await self.member_gating_cache.pop(after.id):
- try:
- # If the member has not received a DM from our !accept command
- # and has gone through the alternate gating system we should send
- # our alternate welcome DM which includes info such as our welcome
- # video.
- await safe_dm(after.send(ALTERNATE_VERIFIED_MESSAGE))
- except discord.HTTPException:
- log.exception("DM dispatch failed on unexpected error code")
+ if before.pending is True and after.pending is False:
+ try:
+ # If the member has not received a DM from our !accept command
+ # and has gone through the alternate gating system we should send
+ # our alternate welcome DM which includes info such as our welcome
+ # video.
+ await safe_dm(after.send(ALTERNATE_VERIFIED_MESSAGE))
+ except discord.HTTPException:
+ log.exception("DM dispatch failed on unexpected error code")
@Cog.listener()
async def on_message(self, message: discord.Message) -> None:
@@ -850,19 +834,21 @@ class Verification(Cog):
@command(name='verify')
@has_any_role(*constants.MODERATION_ROLES)
- async def apply_developer_role(self, ctx: Context, user: discord.Member) -> None:
- """Command for moderators to apply the Developer role to any user."""
+ async def perform_manual_verification(self, ctx: Context, user: discord.Member) -> None:
+ """Command for moderators to verify any user."""
log.trace(f'verify command called by {ctx.author} for {user.id}.')
- developer_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.verified)
- if developer_role in user.roles:
- log.trace(f'{user.id} is already a developer, aborting.')
- await ctx.send(f'{constants.Emojis.cross_mark} {user} is already a developer.')
+ if not user.pending:
+ log.trace(f'{user.id} is already verified, aborting.')
+ await ctx.send(f'{constants.Emojis.cross_mark} {user.mention} is already verified.')
return
- await user.add_roles(developer_role)
- log.trace(f'Developer role successfully applied to {user.id}')
- await ctx.send(f'{constants.Emojis.check_mark} Developer role applied to {user}.')
+ # Adding a role automatically verifies the user, so we add and remove the Announcements role.
+ temporary_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.announcements)
+ await user.add_roles(temporary_role)
+ await user.remove_roles(temporary_role)
+ log.trace(f'{user.id} manually verified.')
+ await ctx.send(f'{constants.Emojis.check_mark} {user.mention} is now verified.')
# endregion
diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py
index 4d48d2c1b..0cbce6a51 100644
--- a/bot/exts/moderation/voice_gate.py
+++ b/bot/exts/moderation/voice_gate.py
@@ -5,7 +5,6 @@ from datetime import datetime, timedelta
import discord
from async_rediscache import RedisCache
-from dateutil import parser
from discord import Colour, Member, VoiceState
from discord.ext.commands import Cog, Context, command
@@ -29,7 +28,7 @@ FAILED_MESSAGE = (
)
MESSAGE_FIELD_MAP = {
- "verified_at": f"have been verified for less than {GateConf.minimum_days_verified} days",
+ "joined_at": f"have been on the server for less than {GateConf.minimum_days_member} days",
"voice_banned": "have an active voice ban infraction",
"total_messages": f"have sent less than {GateConf.minimum_messages} messages",
"activity_blocks": f"have been active for fewer than {GateConf.minimum_activity_blocks} ten-minute blocks",
@@ -149,14 +148,8 @@ class VoiceGate(Cog):
await ctx.author.send(embed=embed)
return
- # Pre-parse this for better code style
- if data["verified_at"] is not None:
- data["verified_at"] = parser.isoparse(data["verified_at"])
- else:
- data["verified_at"] = datetime.utcnow() - timedelta(days=3)
-
checks = {
- "verified_at": data["verified_at"] > datetime.utcnow() - timedelta(days=GateConf.minimum_days_verified),
+ "joined_at": ctx.author.joined_at > datetime.utcnow() - timedelta(days=GateConf.minimum_days_member),
"total_messages": data["total_messages"] < GateConf.minimum_messages,
"voice_banned": data["voice_banned"],
"activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks
diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py
index 7118dee02..f9fc12dc3 100644
--- a/bot/exts/moderation/watchchannels/_watchchannel.py
+++ b/bot/exts/moderation/watchchannels/_watchchannel.py
@@ -342,11 +342,14 @@ class WatchChannel(metaclass=CogABCMeta):
"""Takes care of unloading the cog and canceling the consumption task."""
self.log.trace("Unloading the cog")
if self._consume_task and not self._consume_task.done():
+ def done_callback(task: asyncio.Task) -> None:
+ """Send exception when consuming task have been cancelled."""
+ try:
+ task.result()
+ except asyncio.CancelledError:
+ self.log.info(
+ f"The consume task of {type(self).__name__} was canceled. Messages may be lost."
+ )
+
+ self._consume_task.add_done_callback(done_callback)
self._consume_task.cancel()
- try:
- self._consume_task.result()
- except asyncio.CancelledError as e:
- self.log.exception(
- "The consume task was canceled. Messages may be lost.",
- exc_info=e
- )
diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py
index 69d623581..a4c828f95 100644
--- a/bot/exts/utils/bot.py
+++ b/bot/exts/utils/bot.py
@@ -5,7 +5,7 @@ from discord import Embed, TextChannel
from discord.ext.commands import Cog, Context, command, group, has_any_role
from bot.bot import Bot
-from bot.constants import Guild, MODERATION_ROLES, Roles, URLs
+from bot.constants import Guild, MODERATION_ROLES, URLs
log = logging.getLogger(__name__)
@@ -17,13 +17,11 @@ class BotCog(Cog, name="Bot"):
self.bot = bot
@group(invoke_without_command=True, name="bot", hidden=True)
- @has_any_role(Roles.verified)
async def botinfo_group(self, ctx: Context) -> None:
"""Bot informational commands."""
await ctx.send_help(ctx.command)
@botinfo_group.command(name='about', aliases=('info',), hidden=True)
- @has_any_role(Roles.verified)
async def about_command(self, ctx: Context) -> None:
"""Get information about the bot."""
embed = Embed(
diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py
index bf25cb4c2..8acaf9131 100644
--- a/bot/exts/utils/clean.py
+++ b/bot/exts/utils/clean.py
@@ -191,7 +191,7 @@ class Clean(Cog):
channel_id=Channels.mod_log,
)
- @group(invoke_without_command=True, name="clean", aliases=["purge"])
+ @group(invoke_without_command=True, name="clean", aliases=["clear", "purge"])
@has_any_role(*MODERATION_ROLES)
async def clean_group(self, ctx: Context) -> None:
"""Commands for cleaning messages in channels."""
diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py
index df9d6661d..37e855588 100644
--- a/bot/exts/utils/utils.py
+++ b/bot/exts/utils/utils.py
@@ -2,10 +2,7 @@ import difflib
import logging
import re
import unicodedata
-from datetime import datetime, timedelta
-from email.parser import HeaderParser
-from io import StringIO
-from typing import Dict, Optional, Tuple, Union
+from typing import Tuple, Union
from discord import Colour, Embed, utils
from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role
@@ -17,7 +14,6 @@ from bot.converters import Snowflake
from bot.decorators import in_whitelist
from bot.pagination import LinePaginator
from bot.utils import messages
-from bot.utils.cache import AsyncCache
from bot.utils.time import time_since
log = logging.getLogger(__name__)
@@ -44,28 +40,12 @@ If the implementation is easy to explain, it may be a good idea.
Namespaces are one honking great idea -- let's do more of those!
"""
-ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
-
-pep_cache = AsyncCache()
-
-# Add GitHub token when it's set to raise limit of requests per hour
-GITHUB_HEADERS = {}
-if Keys.github:
- GITHUB_HEADERS["Authorization"] = f"token {Keys.github}"
-
class Utils(Cog):
"""A selection of utilities which don't have a clear category."""
- BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
- BASE_GITHUB_PEP_URL = "https://raw.githubusercontent.com/python/peps/master/pep-"
- PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master"
-
def __init__(self, bot: Bot):
self.bot = bot
- self.peps: Dict[int, str] = {}
- self.last_refreshed_peps: Optional[datetime] = None
- self.bot.loop.create_task(self.refresh_peps_urls())
@command()
@in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES)
@@ -212,126 +192,6 @@ class Utils(Cog):
for reaction in options:
await message.add_reaction(reaction)
- # region: PEP
-
- async def refresh_peps_urls(self) -> None:
- """Refresh PEP URLs listing in every 3 hours."""
- # Wait until HTTP client is available
- await self.bot.wait_until_ready()
- log.trace("Started refreshing PEP URLs.")
-
- async with self.bot.http_session.get(self.PEPS_LISTING_API_URL, headers=GITHUB_HEADERS) as resp:
- listing = await resp.json()
-
- log.trace("Got PEP URLs listing from GitHub API")
-
- for file in listing:
- name = file["name"]
- if name.startswith("pep-") and name.endswith((".rst", ".txt")):
- pep_number = name.replace("pep-", "").split(".")[0]
- self.peps[int(pep_number)] = file["download_url"]
-
- self.last_refreshed_peps = datetime.now()
- log.info("Successfully refreshed PEP URLs listing.")
-
- @command(name='pep', aliases=('get_pep', 'p'))
- async def pep_command(self, ctx: Context, pep_number: int) -> None:
- """Fetches information about a PEP and sends it to the channel."""
- # Trigger typing in chat to show users that bot is responding
- await ctx.trigger_typing()
-
- # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs.
- if pep_number == 0:
- pep_embed = self.get_pep_zero_embed()
- success = True
- else:
- success = False
- if not (pep_embed := await self.validate_pep_number(pep_number)):
- pep_embed, success = await self.get_pep_embed(pep_number)
-
- await ctx.send(embed=pep_embed)
- if success:
- log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.")
- self.bot.stats.incr(f"pep_fetches.{pep_number}")
- else:
- log.trace(f"Getting PEP {pep_number} failed. Error embed sent.")
-
- @staticmethod
- def get_pep_zero_embed() -> Embed:
- """Get information embed about PEP 0."""
- pep_embed = Embed(
- title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
- url="https://www.python.org/dev/peps/"
- )
- pep_embed.set_thumbnail(url=ICON_URL)
- pep_embed.add_field(name="Status", value="Active")
- pep_embed.add_field(name="Created", value="13-Jul-2000")
- pep_embed.add_field(name="Type", value="Informational")
-
- return pep_embed
-
- async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]:
- """Validate is PEP number valid. When it isn't, return error embed, otherwise None."""
- if (
- pep_nr not in self.peps
- and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now()
- and len(str(pep_nr)) < 5
- ):
- await self.refresh_peps_urls()
-
- if pep_nr not in self.peps:
- log.trace(f"PEP {pep_nr} was not found")
- return Embed(
- title="PEP not found",
- description=f"PEP {pep_nr} does not exist.",
- colour=Colour.red()
- )
-
- return None
-
- def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed:
- """Generate PEP embed based on PEP headers data."""
- # Assemble the embed
- pep_embed = Embed(
- title=f"**PEP {pep_nr} - {pep_header['Title']}**",
- description=f"[Link]({self.BASE_PEP_URL}{pep_nr:04})",
- )
-
- pep_embed.set_thumbnail(url=ICON_URL)
-
- # Add the interesting information
- fields_to_check = ("Status", "Python-Version", "Created", "Type")
- for field in fields_to_check:
- # Check for a PEP metadata field that is present but has an empty value
- # embed field values can't contain an empty string
- if pep_header.get(field, ""):
- pep_embed.add_field(name=field, value=pep_header[field])
-
- return pep_embed
-
- @pep_cache(arg_offset=1)
- async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]:
- """Fetch, generate and return PEP embed. Second item of return tuple show does getting success."""
- response = await self.bot.http_session.get(self.peps[pep_nr])
-
- if response.status == 200:
- log.trace(f"PEP {pep_nr} found")
- pep_content = await response.text()
-
- # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179
- pep_header = HeaderParser().parse(StringIO(pep_content))
- return self.generate_pep_embed(pep_header, pep_nr), True
- else:
- log.trace(
- f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}."
- )
- return Embed(
- title="Unexpected error",
- description="Unexpected HTTP error during PEP search. Please let us know.",
- colour=Colour.red()
- ), False
- # endregion
-
def setup(bot: Bot) -> None:
"""Load the Utils cog."""
diff --git a/bot/log.py b/bot/log.py
index 13141de40..0935666d1 100644
--- a/bot/log.py
+++ b/bot/log.py
@@ -6,7 +6,6 @@ from pathlib import Path
import coloredlogs
import sentry_sdk
-from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.redis import RedisIntegration
@@ -67,9 +66,9 @@ def setup_sentry() -> None:
dsn=constants.Bot.sentry_dsn,
integrations=[
sentry_logging,
- AioHttpIntegration(),
RedisIntegration(),
- ]
+ ],
+ release=f"bot@{constants.GIT_SHA}"
)
diff --git a/config-default.yml b/config-default.yml
index 9e1c9c012..5a97ec402 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -328,7 +328,7 @@ filter:
keys:
site_api: !ENV "BOT_API_KEY"
- github: !ENV "GITHUB_TOKEN"
+ github: !ENV "GITHUB_API_KEY"
urls:
@@ -532,7 +532,7 @@ verification:
voice_gate:
- minimum_days_verified: 3 # How many days the user must have been verified for
+ minimum_days_member: 3 # How many days the user must have been a member for
minimum_messages: 50 # How many messages a user must have to be eligible for voice
bot_message_delete_delay: 10 # Seconds before deleting bot's response in Voice Gate
minimum_activity_blocks: 3 # Number of 10 minute blocks during which a user must have been active
diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py
index 254b0a867..d077be960 100644
--- a/tests/bot/exts/info/test_information.py
+++ b/tests/bot/exts/info/test_information.py
@@ -355,7 +355,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
self.assertEqual(
textwrap.dedent(f"""
Joined: {"1 year ago"}
- Verified: {"False"}
+ Verified: {"True"}
Roles: &Moderators
""").strip(),
embed.fields[1].value
diff --git a/tests/bot/exts/moderation/test_silence.py b/tests/bot/exts/moderation/test_silence.py
index 104293d8e..fa5fc9e81 100644
--- a/tests/bot/exts/moderation/test_silence.py
+++ b/tests/bot/exts/moderation/test_silence.py
@@ -117,15 +117,6 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
self.bot.get_guild.assert_called_once_with(Guild.id)
@autospec(silence, "SilenceNotifier", pass_mocks=False)
- async def test_async_init_got_role(self):
- """Got `Roles.verified` role from guild."""
- guild = self.bot.get_guild()
- guild.get_role.side_effect = lambda id_: Mock(id=id_)
-
- await self.cog._async_init()
- self.assertEqual(self.cog._verified_role.id, Roles.verified)
-
- @autospec(silence, "SilenceNotifier", pass_mocks=False)
async def test_async_init_got_channels(self):
"""Got channels from bot."""
self.bot.get_channel.side_effect = lambda id_: MockTextChannel(id=id_)
@@ -302,7 +293,7 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase):
self.assertFalse(self.overwrite.send_messages)
self.assertFalse(self.overwrite.add_reactions)
self.channel.set_permissions.assert_awaited_once_with(
- self.cog._verified_role,
+ self.cog._everyone_role,
overwrite=self.overwrite
)
@@ -435,7 +426,7 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase):
"""Channel's `send_message` and `add_reactions` overwrites were restored."""
await self.cog._unsilence(self.channel)
self.channel.set_permissions.assert_awaited_once_with(
- self.cog._verified_role,
+ self.cog._everyone_role,
overwrite=self.overwrite,
)
@@ -449,7 +440,7 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase):
await self.cog._unsilence(self.channel)
self.channel.set_permissions.assert_awaited_once_with(
- self.cog._verified_role,
+ self.cog._everyone_role,
overwrite=self.overwrite,
)
diff --git a/tests/bot/test_api.py b/tests/bot/test_api.py
index 99e942813..76bcb481d 100644
--- a/tests/bot/test_api.py
+++ b/tests/bot/test_api.py
@@ -13,14 +13,6 @@ class APIClientTests(unittest.IsolatedAsyncioTestCase):
cls.error_api_response = MagicMock()
cls.error_api_response.status = 999
- def test_loop_is_not_running_by_default(self):
- """The event loop should not be running by default."""
- self.assertFalse(api.loop_is_running())
-
- async def test_loop_is_running_in_async_context(self):
- """The event loop should be running in an async context."""
- self.assertTrue(api.loop_is_running())
-
def test_response_code_error_default_initialization(self):
"""Test the default initialization of `ResponseCodeError` without `text` or `json`"""
error = api.ResponseCodeError(response=self.error_api_response)
diff --git a/tests/helpers.py b/tests/helpers.py
index 870f66197..496363ae3 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -230,7 +230,7 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin
spec_set = member_instance
def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None:
- default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False}
+ default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False, "pending": False}
super().__init__(**collections.ChainMap(kwargs, default_kwargs))
self.roles = [MockRole(name="@everyone", position=1, id=0)]