aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Numerlor <[email protected]>2021-01-11 03:13:04 +0100
committerGravatar Numerlor <[email protected]>2021-01-11 03:13:04 +0100
commit24e432c75347eb23025eba6cf37030976ba846e8 (patch)
treeb8523c34af5d0af73ba451856d10e03b099e44b5
parentRemove old reference to CachedParser and unused const (diff)
parentMerge pull request #1350 from python-discord/mbaruh/developerectomy (diff)
Merge remote-tracking branch 'upstream/master' into doc-imp
-rw-r--r--.github/CODEOWNERS4
-rw-r--r--.github/workflows/build.yml2
-rw-r--r--.github/workflows/sentry_release.yml24
-rw-r--r--.pre-commit-config.yaml2
-rw-r--r--Dockerfile11
-rw-r--r--Pipfile4
-rw-r--r--Pipfile.lock363
-rw-r--r--bot/api.py73
-rw-r--r--bot/bot.py102
-rw-r--r--bot/constants.py19
-rw-r--r--bot/exts/backend/error_handler.py10
-rw-r--r--bot/exts/info/information.py47
-rw-r--r--bot/exts/info/pep.py164
-rw-r--r--bot/exts/info/reddit.py2
-rw-r--r--bot/exts/info/tags.py4
-rw-r--r--bot/exts/moderation/infraction/_scheduler.py34
-rw-r--r--bot/exts/moderation/silence.py16
-rw-r--r--bot/exts/moderation/verification.py737
-rw-r--r--bot/exts/moderation/voice_gate.py11
-rw-r--r--bot/exts/moderation/watchchannels/_watchchannel.py17
-rw-r--r--bot/exts/moderation/watchchannels/talentpool.py16
-rw-r--r--bot/exts/utils/bot.py4
-rw-r--r--bot/exts/utils/clean.py2
-rw-r--r--bot/exts/utils/jams.py4
-rw-r--r--bot/exts/utils/utils.py137
-rw-r--r--bot/log.py5
-rw-r--r--bot/resources/elements.json1
-rw-r--r--bot/resources/tags/codeblock.md4
-rw-r--r--bot/rules/burst_shared.py11
-rw-r--r--config-default.yml20
-rw-r--r--tests/bot/exts/info/test_information.py1
-rw-r--r--tests/bot/exts/moderation/test_silence.py15
-rw-r--r--tests/bot/exts/utils/test_jams.py4
-rw-r--r--tests/bot/test_api.py8
-rw-r--r--tests/helpers.py2
35 files changed, 646 insertions, 1234 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 73e303325..ad813d893 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -6,9 +6,11 @@ bot/exts/info/codeblock/** @MarkKoz
bot/exts/utils/extensions.py @MarkKoz
bot/exts/utils/snekbox.py @MarkKoz @Akarys42
bot/exts/help_channels/** @MarkKoz @Akarys42
-bot/exts/moderation/** @Akarys42 @mbaruh @Den4200
+bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129
bot/exts/info/** @Akarys42 @mbaruh @Den4200
bot/exts/filters/** @mbaruh
+bot/exts/fun/** @ks129
+bot/exts/utils/** @ks129
# Utils
bot/utils/extensions.py @MarkKoz
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 6152f1543..6c97e8784 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -55,3 +55,5 @@ jobs:
tags: |
ghcr.io/python-discord/bot:latest
ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }}
+ build-args: |
+ git_sha=${{ github.sha }}
diff --git a/.github/workflows/sentry_release.yml b/.github/workflows/sentry_release.yml
new file mode 100644
index 000000000..b8d92e90a
--- /dev/null
+++ b/.github/workflows/sentry_release.yml
@@ -0,0 +1,24 @@
+name: Create Sentry release
+
+on:
+ push:
+ branches:
+ - master
+
+jobs:
+ create_sentry_release:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@master
+
+ - name: Create a Sentry.io release
+ uses: tclindner/[email protected]
+ env:
+ SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
+ SENTRY_ORG: python-discord
+ SENTRY_PROJECT: bot
+ with:
+ tagName: ${{ github.sha }}
+ environment: production
+ releaseNamePrefix: bot@
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 876d32b15..1597592ca 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -21,6 +21,6 @@ repos:
name: Flake8
description: This hook runs flake8 within our project's pipenv environment.
entry: pipenv run flake8
- language: python
+ language: system
types: [python]
require_serial: true
diff --git a/Dockerfile b/Dockerfile
index 06a538b2a..5d0380b44 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,10 +1,19 @@
FROM python:3.8-slim
+# Define Git SHA build argument
+ARG git_sha="development"
+
# Set pip to have cleaner logs and no saved cache
ENV PIP_NO_CACHE_DIR=false \
PIPENV_HIDE_EMOJIS=1 \
PIPENV_IGNORE_VIRTUALENVS=1 \
- PIPENV_NOSPIN=1
+ PIPENV_NOSPIN=1 \
+ GIT_SHA=$git_sha
+
+RUN apt-get -y update \
+ && apt-get install -y \
+ git \
+ && rm -rf /var/lib/apt/lists/*
# Install pipenv
RUN pip install -U pipenv
diff --git a/Pipfile b/Pipfile
index a92f64f59..45b759c25 100644
--- a/Pipfile
+++ b/Pipfile
@@ -14,7 +14,7 @@ beautifulsoup4 = "~=4.9"
colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}
coloredlogs = "~=14.0"
deepdiff = "~=4.0"
-"discord.py" = "~=1.5.0"
+"discord.py" = {git = "https://github.com/Rapptz/discord.py.git", ref = "94f76e63947b102e5de6dae9a2cd687b308033"}
feedparser = "~=5.2"
fuzzywuzzy = "~=0.17"
lxml = "~=4.4"
@@ -22,7 +22,7 @@ markdownify = "~=0.6.1"
more_itertools = "~=8.2"
python-dateutil = "~=2.8"
pyyaml = "~=5.1"
-sentry-sdk = "~=0.14"
+sentry-sdk = "~=0.19"
statsd = "~=3.3"
emoji = "~=0.6"
diff --git a/Pipfile.lock b/Pipfile.lock
index 927786f93..7fe041806 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "a033c8f3593c5429469fde8790d26d3acd85c18bb6eddd06ee30d7d282191432"
+ "sha256": "f5286278b549c5f297710bedf1b85a54b89e6fbd29e5be9ad9d5583ab2b88818"
},
"pipfile-spec": 6,
"requires": {
@@ -34,22 +34,46 @@
},
"aiohttp": {
"hashes": [
- "sha256:1a4160579ffbc1b69e88cb6ca8bb0fbd4947dfcbf9fb1e2a4fc4c7a4a986c1fe",
- "sha256:206c0ccfcea46e1bddc91162449c20c72f308aebdcef4977420ef329c8fcc599",
- "sha256:2ad493de47a8f926386fa6d256832de3095ba285f325db917c7deae0b54a9fc8",
- "sha256:319b490a5e2beaf06891f6711856ea10591cfe84fe9f3e71a721aa8f20a0872a",
- "sha256:470e4c90da36b601676fe50c49a60d34eb8c6593780930b1aa4eea6f508dfa37",
- "sha256:60f4caa3b7f7a477f66ccdd158e06901e1d235d572283906276e3803f6b098f5",
- "sha256:66d64486172b032db19ea8522328b19cfb78a3e1e5b62ab6a0567f93f073dea0",
- "sha256:687461cd974722110d1763b45c5db4d2cdee8d50f57b00c43c7590d1dd77fc5c",
- "sha256:698cd7bc3c7d1b82bb728bae835724a486a8c376647aec336aa21a60113c3645",
- "sha256:797456399ffeef73172945708810f3277f794965eb6ec9bd3a0c007c0476be98",
- "sha256:a885432d3cabc1287bcf88ea94e1826d3aec57fd5da4a586afae4591b061d40d",
- "sha256:c506853ba52e516b264b106321c424d03f3ddef2813246432fa9d1cefd361c81",
- "sha256:fb83326d8295e8840e4ba774edf346e87eca78ba8a89c55d2690352842c15ba5"
- ],
- "index": "pypi",
- "version": "==3.6.3"
+ "sha256:0b795072bb1bf87b8620120a6373a3c61bfcb8da7e5c2377f4bb23ff4f0b62c9",
+ "sha256:0d438c8ca703b1b714e82ed5b7a4412c82577040dadff479c08405e2a715564f",
+ "sha256:16a3cb5df5c56f696234ea9e65e227d1ebe9c18aa774d36ff42f532139066a5f",
+ "sha256:1edfd82a98c5161497bbb111b2b70c0813102ad7e0aa81cbeb34e64c93863005",
+ "sha256:2406dc1dda01c7f6060ab586e4601f18affb7a6b965c50a8c90ff07569cf782a",
+ "sha256:2858b2504c8697beb9357be01dc47ef86438cc1cb36ecb6991796d19475faa3e",
+ "sha256:2a7b7640167ab536c3cb90cfc3977c7094f1c5890d7eeede8b273c175c3910fd",
+ "sha256:3228b7a51e3ed533f5472f54f70fd0b0a64c48dc1649a0f0e809bec312934d7a",
+ "sha256:328b552513d4f95b0a2eea4c8573e112866107227661834652a8984766aa7656",
+ "sha256:39f4b0a6ae22a1c567cb0630c30dd082481f95c13ca528dc501a7766b9c718c0",
+ "sha256:3b0036c978cbcc4a4512278e98e3e6d9e6b834dc973206162eddf98b586ef1c6",
+ "sha256:3ea8c252d8df5e9166bcf3d9edced2af132f4ead8ac422eac723c5781063709a",
+ "sha256:41608c0acbe0899c852281978492f9ce2c6fbfaf60aff0cefc54a7c4516b822c",
+ "sha256:59d11674964b74a81b149d4ceaff2b674b3b0e4d0f10f0be1533e49c4a28408b",
+ "sha256:5e479df4b2d0f8f02133b7e4430098699450e1b2a826438af6bec9a400530957",
+ "sha256:684850fb1e3e55c9220aad007f8386d8e3e477c4ec9211ae54d968ecdca8c6f9",
+ "sha256:6ccc43d68b81c424e46192a778f97da94ee0630337c9bbe5b2ecc9b0c1c59001",
+ "sha256:6d42debaf55450643146fabe4b6817bb2a55b23698b0434107e892a43117285e",
+ "sha256:710376bf67d8ff4500a31d0c207b8941ff4fba5de6890a701d71680474fe2a60",
+ "sha256:756ae7efddd68d4ea7d89c636b703e14a0c686688d42f588b90778a3c2fc0564",
+ "sha256:77149002d9386fae303a4a162e6bce75cc2161347ad2ba06c2f0182561875d45",
+ "sha256:78e2f18a82b88cbc37d22365cf8d2b879a492faedb3f2975adb4ed8dfe994d3a",
+ "sha256:7d9b42127a6c0bdcc25c3dcf252bb3ddc70454fac593b1b6933ae091396deb13",
+ "sha256:8389d6044ee4e2037dca83e3f6994738550f6ee8cfb746762283fad9b932868f",
+ "sha256:9c1a81af067e72261c9cbe33ea792893e83bc6aa987bfbd6fdc1e5e7b22777c4",
+ "sha256:c1e0920909d916d3375c7a1fdb0b1c78e46170e8bb42792312b6eb6676b2f87f",
+ "sha256:c68fdf21c6f3573ae19c7ee65f9ff185649a060c9a06535e9c3a0ee0bbac9235",
+ "sha256:c733ef3bdcfe52a1a75564389bad4064352274036e7e234730526d155f04d914",
+ "sha256:c9c58b0b84055d8bc27b7df5a9d141df4ee6ff59821f922dd73155861282f6a3",
+ "sha256:d03abec50df423b026a5aa09656bd9d37f1e6a49271f123f31f9b8aed5dc3ea3",
+ "sha256:d2cfac21e31e841d60dc28c0ec7d4ec47a35c608cb8906435d47ef83ffb22150",
+ "sha256:dcc119db14757b0c7bce64042158307b9b1c76471e655751a61b57f5a0e4d78e",
+ "sha256:df3a7b258cc230a65245167a202dd07320a5af05f3d41da1488ba0fa05bc9347",
+ "sha256:df48a623c58180874d7407b4d9ec06a19b84ed47f60a3884345b1a5099c1818b",
+ "sha256:e1b95972a0ae3f248a899cdbac92ba2e01d731225f566569311043ce2226f5e7",
+ "sha256:f326b3c1bbfda5b9308252ee0dcb30b612ee92b0e105d4abec70335fab5b1245",
+ "sha256:f411cb22115cb15452d099fec0ee636b06cf81bfb40ed9c02d30c8dc2bc2e3d1"
+ ],
+ "index": "pypi",
+ "version": "==3.7.3"
},
"aioping": {
"hashes": [
@@ -192,13 +216,13 @@
"index": "pypi",
"version": "==4.3.2"
},
+ "discord-py": {
+ "git": "https://github.com/Rapptz/discord.py.git",
+ "ref": "94f76e63947b102e5de6dae9a2cd687b308033dd"
+ },
"discord.py": {
- "hashes": [
- "sha256:2367359e31f6527f8a936751fc20b09d7495dd6a76b28c8fb13d4ca6c55b7563",
- "sha256:def00dc50cf36d21346d71bc89f0cad8f18f9a3522978dc18c7796287d47de8b"
- ],
- "index": "pypi",
- "version": "==1.5.1"
+ "git": "https://github.com/Rapptz/discord.py.git",
+ "ref": "94f76e63947b102e5de6dae9a2cd687b308033"
},
"emoji": {
"hashes": [
@@ -293,11 +317,11 @@
},
"idna": {
"hashes": [
- "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
- "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
+ "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16",
+ "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"
],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.10"
+ "markers": "python_version >= '3.4'",
+ "version": "==3.1"
},
"lxml": {
"hashes": [
@@ -344,11 +368,11 @@
},
"markdownify": {
"hashes": [
- "sha256:30be8340724e706c9e811c27fe8c1542cf74a15b46827924fff5c54b40dd9b0d",
- "sha256:a69588194fd76634f0139d6801b820fd652dc5eeba9530e90d323dfdc0155252"
+ "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d",
+ "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"
],
"index": "pypi",
- "version": "==0.5.3"
+ "version": "==0.6.1"
},
"more-itertools": {
"hashes": [
@@ -360,26 +384,46 @@
},
"multidict": {
"hashes": [
- "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a",
- "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000",
- "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2",
- "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507",
- "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5",
- "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7",
- "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d",
- "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463",
- "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19",
- "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3",
- "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b",
- "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c",
- "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87",
- "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7",
- "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430",
- "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255",
- "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"
+ "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a",
+ "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93",
+ "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632",
+ "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656",
+ "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79",
+ "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7",
+ "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d",
+ "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5",
+ "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224",
+ "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26",
+ "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea",
+ "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348",
+ "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6",
+ "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76",
+ "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1",
+ "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f",
+ "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952",
+ "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a",
+ "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37",
+ "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9",
+ "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359",
+ "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8",
+ "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da",
+ "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3",
+ "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d",
+ "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf",
+ "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841",
+ "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d",
+ "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93",
+ "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f",
+ "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647",
+ "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635",
+ "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456",
+ "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda",
+ "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5",
+ "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281",
+ "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"
],
- "markers": "python_version >= '3.5'",
- "version": "==4.7.6"
+ "markers": "python_version >= '3.6'",
+ "version": "==5.1.0"
},
"ordered-set": {
"hashes": [
@@ -456,19 +500,19 @@
},
"pyyaml": {
"hashes": [
- "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
- "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
+ "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
+ "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
+ "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
"sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",
- "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a",
"sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
- "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
- "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
+ "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
"sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
- "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
"sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
+ "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
"sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a",
"sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
- "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"
+ "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
+ "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
],
"index": "pypi",
"version": "==5.3.1"
@@ -520,6 +564,14 @@
"index": "pypi",
"version": "==3.3.0"
},
+ "typing-extensions": {
+ "hashes": [
+ "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918",
+ "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c",
+ "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"
+ ],
+ "version": "==3.7.4.3"
+ },
"urllib3": {
"hashes": [
"sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
@@ -530,26 +582,46 @@
},
"yarl": {
"hashes": [
- "sha256:040b237f58ff7d800e6e0fd89c8439b841f777dd99b4a9cca04d6935564b9409",
- "sha256:17668ec6722b1b7a3a05cc0167659f6c95b436d25a36c2d52db0eca7d3f72593",
- "sha256:3a584b28086bc93c888a6c2aa5c92ed1ae20932f078c46509a66dce9ea5533f2",
- "sha256:4439be27e4eee76c7632c2427ca5e73703151b22cae23e64adb243a9c2f565d8",
- "sha256:48e918b05850fffb070a496d2b5f97fc31d15d94ca33d3d08a4f86e26d4e7c5d",
- "sha256:9102b59e8337f9874638fcfc9ac3734a0cfadb100e47d55c20d0dc6087fb4692",
- "sha256:9b930776c0ae0c691776f4d2891ebc5362af86f152dd0da463a6614074cb1b02",
- "sha256:b3b9ad80f8b68519cc3372a6ca85ae02cc5a8807723ac366b53c0f089db19e4a",
- "sha256:bc2f976c0e918659f723401c4f834deb8a8e7798a71be4382e024bcc3f7e23a8",
- "sha256:c22c75b5f394f3d47105045ea551e08a3e804dc7e01b37800ca35b58f856c3d6",
- "sha256:c52ce2883dc193824989a9b97a76ca86ecd1fa7955b14f87bf367a61b6232511",
- "sha256:ce584af5de8830d8701b8979b18fcf450cef9a382b1a3c8ef189bedc408faf1e",
- "sha256:da456eeec17fa8aa4594d9a9f27c0b1060b6a75f2419fe0c00609587b2695f4a",
- "sha256:db6db0f45d2c63ddb1a9d18d1b9b22f308e52c83638c26b422d520a815c4b3fb",
- "sha256:df89642981b94e7db5596818499c4b2219028f2a528c9c37cc1de45bf2fd3a3f",
- "sha256:f18d68f2be6bf0e89f1521af2b1bb46e66ab0018faafa81d70f358153170a317",
- "sha256:f379b7f83f23fe12823085cd6b906edc49df969eb99757f58ff382349a3303c6"
+ "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e",
+ "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434",
+ "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366",
+ "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3",
+ "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec",
+ "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959",
+ "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e",
+ "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c",
+ "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6",
+ "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a",
+ "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6",
+ "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424",
+ "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e",
+ "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f",
+ "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50",
+ "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2",
+ "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc",
+ "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4",
+ "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970",
+ "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10",
+ "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0",
+ "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406",
+ "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896",
+ "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643",
+ "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721",
+ "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478",
+ "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724",
+ "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e",
+ "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8",
+ "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96",
+ "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25",
+ "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76",
+ "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2",
+ "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2",
+ "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c",
+ "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a",
+ "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"
],
- "markers": "python_version >= '3.5'",
- "version": "==1.5.1"
+ "markers": "python_version >= '3.6'",
+ "version": "==1.6.3"
}
},
"develop": {
@@ -592,43 +664,58 @@
},
"coverage": {
"hashes": [
- "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516",
- "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259",
- "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9",
- "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097",
- "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0",
- "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f",
- "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7",
- "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c",
- "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5",
- "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7",
- "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729",
- "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978",
- "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9",
- "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f",
- "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9",
- "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822",
- "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418",
- "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82",
- "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f",
- "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d",
- "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221",
- "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4",
- "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21",
- "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709",
- "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54",
- "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d",
- "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270",
- "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24",
- "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751",
- "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a",
- "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237",
- "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7",
- "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636",
- "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"
- ],
- "index": "pypi",
- "version": "==5.3"
+ "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297",
+ "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1",
+ "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497",
+ "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606",
+ "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528",
+ "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b",
+ "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4",
+ "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830",
+ "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1",
+ "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f",
+ "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d",
+ "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3",
+ "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8",
+ "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500",
+ "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7",
+ "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb",
+ "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b",
+ "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059",
+ "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b",
+ "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72",
+ "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36",
+ "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277",
+ "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c",
+ "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631",
+ "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff",
+ "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8",
+ "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec",
+ "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b",
+ "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7",
+ "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105",
+ "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b",
+ "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c",
+ "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b",
+ "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98",
+ "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4",
+ "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879",
+ "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f",
+ "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4",
+ "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044",
+ "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e",
+ "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899",
+ "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f",
+ "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448",
+ "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714",
+ "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2",
+ "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d",
+ "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd",
+ "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7",
+ "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae"
+ ],
+ "index": "pypi",
+ "version": "==5.3.1"
},
"coveralls": {
"hashes": [
@@ -668,11 +755,11 @@
},
"flake8-annotations": {
"hashes": [
- "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1",
- "sha256:bebd11a850f6987a943ce8cdff4159767e0f5f89b3c88aca64680c2175ee02df"
+ "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055",
+ "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e"
],
"index": "pypi",
- "version": "==2.4.1"
+ "version": "==2.5.0"
},
"flake8-bugbear": {
"hashes": [
@@ -715,11 +802,11 @@
},
"flake8-tidy-imports": {
"hashes": [
- "sha256:62059ca07d8a4926b561d392cbab7f09ee042350214a25cf12823384a45d27dd",
- "sha256:c30b40337a2e6802ba3bb611c26611154a27e94c53fc45639e3e282169574fd3"
+ "sha256:52e5f2f987d3d5597538d5941153409ebcab571635835b78f522c7bf03ca23bc",
+ "sha256:76e36fbbfdc8e3c5017f9a216c2855a298be85bc0631e66777f4e6a07a859dc4"
],
"index": "pypi",
- "version": "==4.1.0"
+ "version": "==4.2.1"
},
"flake8-todo": {
"hashes": [
@@ -730,19 +817,19 @@
},
"identify": {
"hashes": [
- "sha256:943cd299ac7f5715fcb3f684e2fc1594c1e0f22a90d15398e5888143bd4144b5",
- "sha256:cc86e6a9a390879dcc2976cef169dd9cc48843ed70b7380f321d1b118163c60e"
+ "sha256:18994e850ba50c37bcaed4832be8b354d6a06c8fb31f54e0e7ece76d32f69bc8",
+ "sha256:892473bf12e655884132a3a32aca737a3cbefaa34a850ff52d501773a45837bc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==1.5.10"
+ "version": "==1.5.12"
},
"idna": {
"hashes": [
- "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
- "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
+ "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16",
+ "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"
],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.10"
+ "markers": "python_version >= '3.4'",
+ "version": "==3.1"
},
"mccabe": {
"hashes": [
@@ -800,30 +887,30 @@
},
"pyyaml": {
"hashes": [
- "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
- "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
+ "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
+ "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
+ "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
"sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",
- "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a",
"sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
- "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
- "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
+ "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
"sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
- "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
"sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
+ "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
"sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a",
"sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
- "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"
+ "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
+ "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
],
"index": "pypi",
"version": "==5.3.1"
},
"requests": {
"hashes": [
- "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8",
- "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"
+ "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
+ "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
- "version": "==2.25.0"
+ "version": "==2.25.1"
},
"six": {
"hashes": [
@@ -858,11 +945,11 @@
},
"virtualenv": {
"hashes": [
- "sha256:54b05fc737ea9c9ee9f8340f579e5da5b09fb64fd010ab5757eb90268616907c",
- "sha256:b7a8ec323ee02fb2312f098b6b4c9de99559b462775bc8fe3627a73706603c1b"
+ "sha256:205a7577275dd0d9223c730dd498e21a8910600085c3dee97412b041fc4b853b",
+ "sha256:7992b8de87e544a4ab55afc2240bf8388c4e3b5765d03784dad384bfdf9097ee"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==20.2.2"
+ "version": "==20.3.0"
}
}
}
diff --git a/bot/api.py b/bot/api.py
index 4b8520582..d93f9f2ba 100644
--- a/bot/api.py
+++ b/bot/api.py
@@ -37,64 +37,27 @@ class APIClient:
session: Optional[aiohttp.ClientSession] = None
loop: asyncio.AbstractEventLoop = None
- def __init__(self, loop: asyncio.AbstractEventLoop, **kwargs):
+ def __init__(self, **session_kwargs):
auth_headers = {
'Authorization': f"Token {Keys.site_api}"
}
- if 'headers' in kwargs:
- kwargs['headers'].update(auth_headers)
+ if 'headers' in session_kwargs:
+ session_kwargs['headers'].update(auth_headers)
else:
- kwargs['headers'] = auth_headers
+ session_kwargs['headers'] = auth_headers
- self.session = None
- self.loop = loop
-
- self._ready = asyncio.Event(loop=loop)
- self._creation_task = None
- self._default_session_kwargs = kwargs
-
- self.recreate()
+ # aiohttp will complain if APIClient gets instantiated outside a coroutine. Thankfully, we
+ # don't and shouldn't need to do that, so we can avoid scheduling a task to create it.
+ self.session = aiohttp.ClientSession(**session_kwargs)
@staticmethod
def _url_for(endpoint: str) -> str:
return f"{URLs.site_schema}{URLs.site_api}/{quote_url(endpoint)}"
- async def _create_session(self, **session_kwargs) -> None:
- """
- Create the aiohttp session with `session_kwargs` and set the ready event.
-
- `session_kwargs` is merged with `_default_session_kwargs` and overwrites its values.
- If an open session already exists, it will first be closed.
- """
- await self.close()
- self.session = aiohttp.ClientSession(**{**self._default_session_kwargs, **session_kwargs})
- self._ready.set()
-
async def close(self) -> None:
- """Close the aiohttp session and unset the ready event."""
- if self.session:
- await self.session.close()
-
- self._ready.clear()
-
- def recreate(self, force: bool = False, **session_kwargs) -> None:
- """
- Schedule the aiohttp session to be created with `session_kwargs` if it's been closed.
-
- If `force` is True, the session will be recreated even if an open one exists. If a task to
- create the session is pending, it will be cancelled.
-
- `session_kwargs` is merged with the kwargs given when the `APIClient` was created and
- overwrites those default kwargs.
- """
- if force or self.session is None or self.session.closed:
- if force and self._creation_task:
- self._creation_task.cancel()
-
- # Don't schedule a task if one is already in progress.
- if force or self._creation_task is None or self._creation_task.done():
- self._creation_task = self.loop.create_task(self._create_session(**session_kwargs))
+ """Close the aiohttp session."""
+ await self.session.close()
async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None:
"""Raise ResponseCodeError for non-OK response if an exception should be raised."""
@@ -108,8 +71,6 @@ class APIClient:
async def request(self, method: str, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:
"""Send an HTTP request to the site API and return the JSON response."""
- await self._ready.wait()
-
async with self.session.request(method.upper(), self._url_for(endpoint), **kwargs) as resp:
await self.maybe_raise_for_status(resp, raise_for_status)
return await resp.json()
@@ -132,25 +93,9 @@ class APIClient:
async def delete(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> Optional[dict]:
"""Site API DELETE."""
- await self._ready.wait()
-
async with self.session.delete(self._url_for(endpoint), **kwargs) as resp:
if resp.status == 204:
return None
await self.maybe_raise_for_status(resp, raise_for_status)
return await resp.json()
-
-
-def loop_is_running() -> bool:
- """
- Determine if there is a running asyncio event loop.
-
- This helps enable "call this when event loop is running" logic (see: Twisted's `callWhenRunning`),
- which is currently not provided by asyncio.
- """
- try:
- asyncio.get_running_loop()
- except RuntimeError:
- return False
- return True
diff --git a/bot/bot.py b/bot/bot.py
index f71f5d1fb..d5f108575 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -3,7 +3,8 @@ import logging
import socket
import warnings
from collections import defaultdict
-from typing import Dict, Optional
+from contextlib import suppress
+from typing import Dict, List, Optional
import aiohttp
import discord
@@ -32,7 +33,7 @@ class Bot(commands.Bot):
self.http_session: Optional[aiohttp.ClientSession] = None
self.redis_session = redis_session
- self.api_client = api.APIClient(loop=self.loop)
+ self.api_client: Optional[api.APIClient] = None
self.filter_list_cache = defaultdict(dict)
self._connector = None
@@ -70,6 +71,9 @@ class Bot(commands.Bot):
attempt + 1
)
+ # All tasks that need to block closing until finished
+ self.closing_tasks: List[asyncio.Task] = []
+
async def cache_filter_list_data(self) -> None:
"""Cache all the data in the FilterList on the site."""
full_cache = await self.api_client.get('bot/filter-lists')
@@ -77,46 +81,6 @@ class Bot(commands.Bot):
for item in full_cache:
self.insert_item_into_filter_list_cache(item)
- def _recreate(self) -> None:
- """Re-create the connector, aiohttp session, the APIClient and the Redis session."""
- # Use asyncio for DNS resolution instead of threads so threads aren't spammed.
- # Doesn't seem to have any state with regards to being closed, so no need to worry?
- self._resolver = aiohttp.AsyncResolver()
-
- # Its __del__ does send a warning but it doesn't always show up for some reason.
- if self._connector and not self._connector._closed:
- log.warning(
- "The previous connector was not closed; it will remain open and be overwritten"
- )
-
- if self.redis_session.closed:
- # If the RedisSession was somehow closed, we try to reconnect it
- # here. Normally, this shouldn't happen.
- self.loop.create_task(self.redis_session.connect())
-
- # Use AF_INET as its socket family to prevent HTTPS related problems both locally
- # and in production.
- self._connector = aiohttp.TCPConnector(
- resolver=self._resolver,
- family=socket.AF_INET,
- )
-
- # Client.login() will call HTTPClient.static_login() which will create a session using
- # this connector attribute.
- self.http.connector = self._connector
-
- # Its __del__ does send a warning but it doesn't always show up for some reason.
- if self.http_session and not self.http_session.closed:
- log.warning(
- "The previous session was not closed; it will remain open and be overwritten"
- )
-
- self.http_session = aiohttp.ClientSession(connector=self._connector)
- self.api_client.recreate(force=True, connector=self._connector)
-
- # Build the FilterList cache
- self.loop.create_task(self.cache_filter_list_data())
-
@classmethod
def create(cls) -> "Bot":
"""Create and return an instance of a Bot."""
@@ -180,21 +144,29 @@ class Bot(commands.Bot):
return command
def clear(self) -> None:
- """
- Clears the internal state of the bot and recreates the connector and sessions.
-
- Will cause a DeprecationWarning if called outside a coroutine.
- """
- # Because discord.py recreates the HTTPClient session, may as well follow suit and recreate
- # our own stuff here too.
- self._recreate()
- super().clear()
+ """Not implemented! Re-instantiate the bot instead of attempting to re-use a closed one."""
+ raise NotImplementedError("Re-using a Bot object after closing it is not supported.")
async def close(self) -> None:
"""Close the Discord connection and the aiohttp session, connector, statsd client, and resolver."""
+ # Done before super().close() to allow tasks finish before the HTTP session closes.
+ for ext in list(self.extensions):
+ with suppress(Exception):
+ self.unload_extension(ext)
+
+ for cog in list(self.cogs):
+ with suppress(Exception):
+ self.remove_cog(cog)
+
+ # Wait until all tasks that have to be completed before bot is closing is done
+ log.trace("Waiting for tasks before closing.")
+ await asyncio.gather(*self.closing_tasks)
+
+ # Now actually do full close of bot
await super().close()
- await self.api_client.close()
+ if self.api_client:
+ await self.api_client.close()
if self.http_session:
await self.http_session.close()
@@ -229,7 +201,31 @@ class Bot(commands.Bot):
async def login(self, *args, **kwargs) -> None:
"""Re-create the connector and set up sessions before logging into Discord."""
- self._recreate()
+ # Use asyncio for DNS resolution instead of threads so threads aren't spammed.
+ self._resolver = aiohttp.AsyncResolver()
+
+ # Use AF_INET as its socket family to prevent HTTPS related problems both locally
+ # and in production.
+ self._connector = aiohttp.TCPConnector(
+ resolver=self._resolver,
+ family=socket.AF_INET,
+ )
+
+ # Client.login() will call HTTPClient.static_login() which will create a session using
+ # this connector attribute.
+ self.http.connector = self._connector
+
+ self.http_session = aiohttp.ClientSession(connector=self._connector)
+ self.api_client = api.APIClient(connector=self._connector)
+
+ if self.redis_session.closed:
+ # If the RedisSession was somehow closed, we try to reconnect it
+ # here. Normally, this shouldn't happen.
+ await self.redis_session.connect()
+
+ # Build the FilterList cache
+ await self.cache_filter_list_data()
+
await self.stats.create_socket()
await super().login(*args, **kwargs)
diff --git a/bot/constants.py b/bot/constants.py
index 08ae0d52f..d813046ab 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -434,7 +434,6 @@ class Channels(metaclass=YAMLGetter):
talent_pool: int
user_event_announcements: int
user_log: int
- verification: int
voice_chat: int
voice_gate: int
voice_log: int
@@ -471,8 +470,6 @@ class Roles(metaclass=YAMLGetter):
python_community: int
sprinters: int
team_leaders: int
- unverified: int
- verified: int # This is the Developers role on PyDis, here named verified for readability reasons.
voice_verified: int
@@ -493,6 +490,7 @@ class Keys(metaclass=YAMLGetter):
section = "keys"
site_api: Optional[str]
+ github: Optional[str]
class URLs(metaclass=YAMLGetter):
@@ -593,20 +591,10 @@ class PythonNews(metaclass=YAMLGetter):
webhook: int
-class Verification(metaclass=YAMLGetter):
- section = "verification"
-
- unverified_after: int
- kicked_after: int
- reminder_frequency: int
- bot_message_delete_delay: int
- kick_confirmation_threshold: float
-
-
class VoiceGate(metaclass=YAMLGetter):
section = "voice_gate"
- minimum_days_verified: int
+ minimum_days_member: int
minimum_messages: int
bot_message_delete_delay: int
minimum_activity_blocks: int
@@ -656,6 +644,9 @@ MODERATION_CHANNELS = Guild.moderation_channels
# Category combinations
MODERATION_CATEGORIES = Guild.moderation_categories
+# Git SHA for Sentry
+GIT_SHA = os.environ.get("GIT_SHA", "development")
+
# Bot replies
NEGATIVE_REPLIES = [
"Noooooo!!",
diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py
index c643d346e..5b5840858 100644
--- a/bot/exts/backend/error_handler.py
+++ b/bot/exts/backend/error_handler.py
@@ -8,7 +8,7 @@ from sentry_sdk import push_scope
from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, Colours
+from bot.constants import Colours
from bot.converters import TagNameConverter
from bot.errors import LockedResourceError
from bot.utils.checks import InWhitelistCheckFailure
@@ -47,7 +47,6 @@ class ErrorHandler(Cog):
* If CommandNotFound is raised when invoking the tag (determined by the presence of the
`invoked_from_error_handler` attribute), this error is treated as being unexpected
and therefore sends an error message
- * Commands in the verification channel are ignored
2. UserInputError: see `handle_user_input_error`
3. CheckFailure: see `handle_check_failure`
4. CommandOnCooldown: send an error message in the invoking context
@@ -63,10 +62,9 @@ class ErrorHandler(Cog):
if isinstance(e, errors.CommandNotFound) and not hasattr(ctx, "invoked_from_error_handler"):
if await self.try_silence(ctx):
return
- if ctx.channel.id != Channels.verification:
- # Try to look for a tag with the command's name
- await self.try_get_tag(ctx)
- return # Exit early to avoid logging.
+ # Try to look for a tag with the command's name
+ await self.try_get_tag(ctx)
+ return # Exit early to avoid logging.
elif isinstance(e, errors.UserInputError):
await self.handle_user_input_error(ctx, e)
elif isinstance(e, errors.CheckFailure):
diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py
index 5aaf85e5a..38e760ee3 100644
--- a/bot/exts/info/information.py
+++ b/bot/exts/info/information.py
@@ -11,6 +11,7 @@ from discord.abc import GuildChannel
from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role
from bot import constants
+from bot.api import ResponseCodeError
from bot.bot import Bot
from bot.converters import FetchedMember
from bot.decorators import in_whitelist
@@ -21,7 +22,6 @@ from bot.utils.time import time_since
log = logging.getLogger(__name__)
-
STATUS_EMOTES = {
Status.offline: constants.Emojis.status_offline,
Status.dnd: constants.Emojis.status_dnd,
@@ -224,13 +224,16 @@ class Information(Cog):
if is_set and (emoji := getattr(constants.Emojis, f"badge_{badge}", None)):
badges.append(emoji)
+ activity = await self.user_messages(user)
+
if on_server:
joined = time_since(user.joined_at, max_units=3)
roles = ", ".join(role.mention for role in user.roles[1:])
- membership = textwrap.dedent(f"""
- Joined: {joined}
- Roles: {roles or None}
- """).strip()
+ membership = {"Joined": joined, "Verified": not user.pending, "Roles": roles or None}
+ if not is_mod_channel(ctx.channel):
+ membership.pop("Verified")
+
+ membership = textwrap.dedent("\n".join([f"{key}: {value}" for key, value in membership.items()]))
else:
roles = None
membership = "The user is not a member of the server"
@@ -252,6 +255,8 @@ class Information(Cog):
# Show more verbose output in moderation channels for infractions and nominations
if is_mod_channel(ctx.channel):
+ fields.append(activity)
+
fields.append(await self.expanded_user_infraction_counts(user))
fields.append(await self.user_nomination_counts(user))
else:
@@ -354,6 +359,30 @@ class Information(Cog):
return "Nominations", "\n".join(output)
+ async def user_messages(self, user: FetchedMember) -> Tuple[Union[bool, str], Tuple[str, str]]:
+ """
+ Gets the amount of messages for `member`.
+
+ Fetches information from the metricity database that's hosted by the site.
+ If the database returns a code besides a 404, then many parts of the bot are broken including this one.
+ """
+ activity_output = []
+
+ try:
+ user_activity = await self.bot.api_client.get(f"bot/users/{user.id}/metricity_data")
+ except ResponseCodeError as e:
+ if e.status == 404:
+ activity_output = "No activity"
+ else:
+ activity_output.append(user_activity["total_messages"] or "No messages")
+ activity_output.append(user_activity["activity_blocks"] or "No activity")
+
+ activity_output = "\n".join(
+ f"{name}: {metric}" for name, metric in zip(["Messages", "Activity blocks"], activity_output)
+ )
+
+ return ("Activity", activity_output)
+
def format_fields(self, mapping: Mapping[str, Any], field_width: Optional[int] = None) -> str:
"""Format a mapping to be readable to a human."""
# sorting is technically superfluous but nice if you want to look for a specific field
@@ -390,10 +419,14 @@ class Information(Cog):
return out.rstrip()
@cooldown_with_role_bypass(2, 60 * 3, BucketType.member, bypass_roles=constants.STAFF_ROLES)
- @group(invoke_without_command=True, enabled=False)
+ @group(invoke_without_command=True)
@in_whitelist(channels=(constants.Channels.bot_commands,), roles=constants.STAFF_ROLES)
async def raw(self, ctx: Context, *, message: Message, json: bool = False) -> None:
"""Shows information about the raw API response."""
+ if ctx.author not in message.channel.members:
+ await ctx.send(":x: You do not have permissions to see the channel this message is in.")
+ return
+
# I *guess* it could be deleted right as the command is invoked but I felt like it wasn't worth handling
# doing this extra request is also much easier than trying to convert everything back into a dictionary again
raw_data = await ctx.bot.http.get_message(message.channel.id, message.id)
@@ -425,7 +458,7 @@ class Information(Cog):
for page in paginator.pages:
await ctx.send(page)
- @raw.command(enabled=False)
+ @raw.command()
async def json(self, ctx: Context, message: Message) -> None:
"""Shows information about the raw API response in a copy-pasteable Python format."""
await ctx.invoke(self.raw, message=message, json=True)
diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py
new file mode 100644
index 000000000..8ac96bbdb
--- /dev/null
+++ b/bot/exts/info/pep.py
@@ -0,0 +1,164 @@
+import logging
+from datetime import datetime, timedelta
+from email.parser import HeaderParser
+from io import StringIO
+from typing import Dict, Optional, Tuple
+
+from discord import Colour, Embed
+from discord.ext.commands import Cog, Context, command
+
+from bot.bot import Bot
+from bot.constants import Keys
+from bot.utils.cache import AsyncCache
+
+log = logging.getLogger(__name__)
+
+ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
+BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
+PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master"
+
+pep_cache = AsyncCache()
+
+GITHUB_API_HEADERS = {}
+if Keys.github:
+ GITHUB_API_HEADERS["Authorization"] = f"token {Keys.github}"
+
+
+class PythonEnhancementProposals(Cog):
+ """Cog for displaying information about PEPs."""
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.peps: Dict[int, str] = {}
+ # To avoid situations where we don't have last datetime, set this to now.
+ self.last_refreshed_peps: datetime = datetime.now()
+ self.bot.loop.create_task(self.refresh_peps_urls())
+
+ async def refresh_peps_urls(self) -> None:
+ """Refresh PEP URLs listing in every 3 hours."""
+ # Wait until HTTP client is available
+ await self.bot.wait_until_ready()
+ log.trace("Started refreshing PEP URLs.")
+ self.last_refreshed_peps = datetime.now()
+
+ async with self.bot.http_session.get(
+ PEPS_LISTING_API_URL,
+ headers=GITHUB_API_HEADERS
+ ) as resp:
+ if resp.status != 200:
+ log.warning(f"Fetching PEP URLs from GitHub API failed with code {resp.status}")
+ return
+
+ listing = await resp.json()
+
+ log.trace("Got PEP URLs listing from GitHub API")
+
+ for file in listing:
+ name = file["name"]
+ if name.startswith("pep-") and name.endswith((".rst", ".txt")):
+ pep_number = name.replace("pep-", "").split(".")[0]
+ self.peps[int(pep_number)] = file["download_url"]
+
+ log.info("Successfully refreshed PEP URLs listing.")
+
+ @staticmethod
+ def get_pep_zero_embed() -> Embed:
+ """Get information embed about PEP 0."""
+ pep_embed = Embed(
+ title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
+ url="https://www.python.org/dev/peps/"
+ )
+ pep_embed.set_thumbnail(url=ICON_URL)
+ pep_embed.add_field(name="Status", value="Active")
+ pep_embed.add_field(name="Created", value="13-Jul-2000")
+ pep_embed.add_field(name="Type", value="Informational")
+
+ return pep_embed
+
+ async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]:
+ """Validate is PEP number valid. When it isn't, return error embed, otherwise None."""
+ if (
+ pep_nr not in self.peps
+ and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now()
+ and len(str(pep_nr)) < 5
+ ):
+ await self.refresh_peps_urls()
+
+ if pep_nr not in self.peps:
+ log.trace(f"PEP {pep_nr} was not found")
+ return Embed(
+ title="PEP not found",
+ description=f"PEP {pep_nr} does not exist.",
+ colour=Colour.red()
+ )
+
+ return None
+
+ def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed:
+ """Generate PEP embed based on PEP headers data."""
+ # Assemble the embed
+ pep_embed = Embed(
+ title=f"**PEP {pep_nr} - {pep_header['Title']}**",
+ description=f"[Link]({BASE_PEP_URL}{pep_nr:04})",
+ )
+
+ pep_embed.set_thumbnail(url=ICON_URL)
+
+ # Add the interesting information
+ fields_to_check = ("Status", "Python-Version", "Created", "Type")
+ for field in fields_to_check:
+ # Check for a PEP metadata field that is present but has an empty value
+ # embed field values can't contain an empty string
+ if pep_header.get(field, ""):
+ pep_embed.add_field(name=field, value=pep_header[field])
+
+ return pep_embed
+
+ @pep_cache(arg_offset=1)
+ async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]:
+ """Fetch, generate and return PEP embed. Second item of return tuple show does getting success."""
+ response = await self.bot.http_session.get(self.peps[pep_nr])
+
+ if response.status == 200:
+ log.trace(f"PEP {pep_nr} found")
+ pep_content = await response.text()
+
+ # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179
+ pep_header = HeaderParser().parse(StringIO(pep_content))
+ return self.generate_pep_embed(pep_header, pep_nr), True
+ else:
+ log.trace(
+ f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}."
+ )
+ return Embed(
+ title="Unexpected error",
+ description="Unexpected HTTP error during PEP search. Please let us know.",
+ colour=Colour.red()
+ ), False
+
+ @command(name='pep', aliases=('get_pep', 'p'))
+ async def pep_command(self, ctx: Context, pep_number: int) -> None:
+ """Fetches information about a PEP and sends it to the channel."""
+ # Trigger typing in chat to show users that bot is responding
+ await ctx.trigger_typing()
+
+ # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs.
+ if pep_number == 0:
+ pep_embed = self.get_pep_zero_embed()
+ success = True
+ else:
+ success = False
+ if not (pep_embed := await self.validate_pep_number(pep_number)):
+ pep_embed, success = await self.get_pep_embed(pep_number)
+
+ await ctx.send(embed=pep_embed)
+ if success:
+ log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.")
+ self.bot.stats.incr(f"pep_fetches.{pep_number}")
+ else:
+ log.trace(f"Getting PEP {pep_number} failed. Error embed sent.")
+
+
+def setup(bot: Bot) -> None:
+ """Load the PEP cog."""
+ bot.add_cog(PythonEnhancementProposals(bot))
diff --git a/bot/exts/info/reddit.py b/bot/exts/info/reddit.py
index bad4c504d..6790be762 100644
--- a/bot/exts/info/reddit.py
+++ b/bot/exts/info/reddit.py
@@ -45,7 +45,7 @@ class Reddit(Cog):
"""Stop the loop task and revoke the access token when the cog is unloaded."""
self.auto_poster_loop.cancel()
if self.access_token and self.access_token.expires_at > datetime.utcnow():
- asyncio.create_task(self.revoke_access_token())
+ self.bot.closing_tasks.append(asyncio.create_task(self.revoke_access_token()))
async def init_reddit_ready(self) -> None:
"""Sets the reddit webhook when the cog is loaded."""
diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py
index 8f15f932b..da4154316 100644
--- a/bot/exts/info/tags.py
+++ b/bot/exts/info/tags.py
@@ -46,7 +46,7 @@ class Tags(Cog):
"embed": {
"description": file.read_text(encoding="utf8"),
},
- "restricted_to": "developers",
+ "restricted_to": None,
"location": f"/bot/{file}"
}
@@ -63,7 +63,7 @@ class Tags(Cog):
@staticmethod
def check_accessibility(user: Member, tag: dict) -> bool:
"""Check if user can access a tag."""
- return tag["restricted_to"].lower() in [role.name.lower() for role in user.roles]
+ return not tag["restricted_to"] or tag["restricted_to"].lower() in [role.name.lower() for role in user.roles]
@staticmethod
def _fuzzy_search(search: str, target: str) -> float:
diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py
index c062ae7f8..242b2d30f 100644
--- a/bot/exts/moderation/infraction/_scheduler.py
+++ b/bot/exts/moderation/infraction/_scheduler.py
@@ -74,8 +74,21 @@ class InfractionScheduler:
return
# Allowing mod log since this is a passive action that should be logged.
- await apply_coro
- log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.")
+ try:
+ await apply_coro
+ except discord.HTTPException as e:
+ # When user joined and then right after this left again before action completed, this can't apply roles
+ if e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't reapply {infraction['type']} to user {infraction['user']} because user left the guild."
+ )
+ else:
+ log.exception(
+ f"Got unexpected HTTPException (HTTP {e.status}, Discord code {e.code})"
+ f"when awaiting {infraction['type']} coroutine for {infraction['user']}."
+ )
+ else:
+ log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.")
async def apply_infraction(
self,
@@ -178,6 +191,10 @@ class InfractionScheduler:
log_msg = f"Failed to apply {' '.join(infr_type.split('_'))} infraction #{id_} to {user}"
if isinstance(e, discord.Forbidden):
log.warning(f"{log_msg}: bot lacks permissions.")
+ elif e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't apply {infraction['type']} to user {infraction['user']} because user left from guild."
+ )
else:
log.exception(log_msg)
failed = True
@@ -352,9 +369,16 @@ class InfractionScheduler:
log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)"
log_content = mod_role.mention
except discord.HTTPException as e:
- log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
- log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}."
- log_content = mod_role.mention
+ if e.code == 10007 or e.status == 404:
+ log.info(
+ f"Can't pardon {infraction['type']} for user {infraction['user']} because user left the guild."
+ )
+ log_text["Failure"] = "User left the guild."
+ log_content = mod_role.mention
+ else:
+ log.exception(f"Failed to deactivate infraction #{id_} ({type_})")
+ log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}."
+ log_content = mod_role.mention
# Check if the user is currently being watched by Big Brother.
try:
diff --git a/bot/exts/moderation/silence.py b/bot/exts/moderation/silence.py
index e6712b3b6..2a7ca932e 100644
--- a/bot/exts/moderation/silence.py
+++ b/bot/exts/moderation/silence.py
@@ -72,7 +72,7 @@ class SilenceNotifier(tasks.Loop):
class Silence(commands.Cog):
- """Commands for stopping channel messages for `verified` role in a channel."""
+ """Commands for stopping channel messages for `everyone` role in a channel."""
# Maps muted channel IDs to their previous overwrites for send_message and add_reactions.
# Overwrites are stored as JSON.
@@ -93,7 +93,7 @@ class Silence(commands.Cog):
await self.bot.wait_until_guild_available()
guild = self.bot.get_guild(Guild.id)
- self._verified_role = guild.get_role(Roles.verified)
+ self._everyone_role = guild.default_role
self._mod_alerts_channel = self.bot.get_channel(Channels.mod_alerts)
self.notifier = SilenceNotifier(self.bot.get_channel(Channels.mod_log))
await self._reschedule()
@@ -142,7 +142,7 @@ class Silence(commands.Cog):
async def _unsilence_wrapper(self, channel: TextChannel) -> None:
"""Unsilence `channel` and send a success/failure message."""
if not await self._unsilence(channel):
- overwrite = channel.overwrites_for(self._verified_role)
+ overwrite = channel.overwrites_for(self._everyone_role)
if overwrite.send_messages is False or overwrite.add_reactions is False:
await channel.send(MSG_UNSILENCE_MANUAL)
else:
@@ -152,14 +152,14 @@ class Silence(commands.Cog):
async def _set_silence_overwrites(self, channel: TextChannel) -> bool:
"""Set silence permission overwrites for `channel` and return True if successful."""
- overwrite = channel.overwrites_for(self._verified_role)
+ overwrite = channel.overwrites_for(self._everyone_role)
prev_overwrites = dict(send_messages=overwrite.send_messages, add_reactions=overwrite.add_reactions)
if channel.id in self.scheduler or all(val is False for val in prev_overwrites.values()):
return False
overwrite.update(send_messages=False, add_reactions=False)
- await channel.set_permissions(self._verified_role, overwrite=overwrite)
+ await channel.set_permissions(self._everyone_role, overwrite=overwrite)
await self.previous_overwrites.set(channel.id, json.dumps(prev_overwrites))
return True
@@ -188,14 +188,14 @@ class Silence(commands.Cog):
log.info(f"Tried to unsilence channel #{channel} ({channel.id}) but the channel was not silenced.")
return False
- overwrite = channel.overwrites_for(self._verified_role)
+ overwrite = channel.overwrites_for(self._everyone_role)
if prev_overwrites is None:
log.info(f"Missing previous overwrites for #{channel} ({channel.id}); defaulting to None.")
overwrite.update(send_messages=None, add_reactions=None)
else:
overwrite.update(**json.loads(prev_overwrites))
- await channel.set_permissions(self._verified_role, overwrite=overwrite)
+ await channel.set_permissions(self._everyone_role, overwrite=overwrite)
log.info(f"Unsilenced channel #{channel} ({channel.id}).")
self.scheduler.cancel(channel.id)
@@ -207,7 +207,7 @@ class Silence(commands.Cog):
await self._mod_alerts_channel.send(
f"<@&{Roles.admins}> Restored overwrites with default values after unsilencing "
f"{channel.mention}. Please check that the `Send Messages` and `Add Reactions` "
- f"overwrites for {self._verified_role.mention} are at their desired values."
+ f"overwrites for {self._everyone_role.mention} are at their desired values."
)
return True
diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py
index c42c6588f..2a24c8ec6 100644
--- a/bot/exts/moderation/verification.py
+++ b/bot/exts/moderation/verification.py
@@ -1,27 +1,18 @@
-import asyncio
import logging
import typing as t
-from contextlib import suppress
-from datetime import datetime, timedelta
import discord
-from async_rediscache import RedisCache
-from discord.ext import tasks
-from discord.ext.commands import Cog, Context, command, group, has_any_role
-from discord.utils import snowflake_time
+from discord.ext.commands import Cog, Context, command, has_any_role
from bot import constants
-from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.decorators import has_no_roles, in_whitelist
-from bot.exts.moderation.modlog import ModLog
-from bot.utils.checks import InWhitelistCheckFailure, has_no_roles_check
-from bot.utils.messages import format_user
+from bot.decorators import in_whitelist
+from bot.utils.checks import InWhitelistCheckFailure
log = logging.getLogger(__name__)
# Sent via DMs once user joins the guild
-ON_JOIN_MESSAGE = f"""
+ON_JOIN_MESSAGE = """
Welcome to Python Discord!
To show you what kind of community we are, we've created this video:
@@ -29,33 +20,10 @@ https://youtu.be/ZH26PuX3re0
As a new user, you have read-only access to a few select channels to give you a taste of what our server is like. \
In order to see the rest of the channels and to send messages, you first have to accept our rules.
-
-Please visit <#{constants.Channels.verification}> to get started. Thank you!
"""
-# Sent via DMs once user verifies
VERIFIED_MESSAGE = f"""
-Thanks for verifying yourself!
-
-For your records, these are the documents you accepted:
-
-`1)` Our rules, here: <https://pythondiscord.com/pages/rules>
-`2)` Our privacy policy, here: <https://pythondiscord.com/pages/privacy> - you can find information on how to have \
-your information removed here as well.
-
-Feel free to review them at any point!
-
-Additionally, if you'd like to receive notifications for the announcements \
-we post in <#{constants.Channels.announcements}>
-from time to time, you can send `!subscribe` to <#{constants.Channels.bot_commands}> at any time \
-to assign yourself the **Announcements** role. We'll mention this role every time we make an announcement.
-
-If you'd like to unsubscribe from the announcement notifications, simply send `!unsubscribe` to \
-<#{constants.Channels.bot_commands}>.
-"""
-
-ALTERNATE_VERIFIED_MESSAGE = f"""
-Thanks for accepting our rules!
+You are now verified!
You can find a copy of our rules for reference at <https://pythondiscord.com/pages/rules>.
@@ -71,61 +39,6 @@ To introduce you to our community, we've made the following video:
https://youtu.be/ZH26PuX3re0
"""
-# Sent via DMs to users kicked for failing to verify
-KICKED_MESSAGE = f"""
-Hi! You have been automatically kicked from Python Discord as you have failed to accept our rules \
-within `{constants.Verification.kicked_after}` days. If this was an accident, please feel free to join us again!
-
-{constants.Guild.invite}
-"""
-
-# Sent periodically in the verification channel
-REMINDER_MESSAGE = f"""
-<@&{constants.Roles.unverified}>
-
-Welcome to Python Discord! Please read the documents mentioned above and type `!accept` to gain permissions \
-to send messages in the community!
-
-You will be kicked if you don't verify within `{constants.Verification.kicked_after}` days.
-""".strip()
-
-# An async function taking a Member param
-Request = t.Callable[[discord.Member], t.Awaitable]
-
-
-class StopExecution(Exception):
- """Signals that a task should halt immediately & alert admins."""
-
- def __init__(self, reason: discord.HTTPException) -> None:
- super().__init__()
- self.reason = reason
-
-
-class Limit(t.NamedTuple):
- """Composition over config for throttling requests."""
-
- batch_size: int # Amount of requests after which to pause
- sleep_secs: int # Sleep this many seconds after each batch
-
-
-def mention_role(role_id: int) -> discord.AllowedMentions:
- """Construct an allowed mentions instance that allows pinging `role_id`."""
- return discord.AllowedMentions(roles=[discord.Object(role_id)])
-
-
-def is_verified(member: discord.Member) -> bool:
- """
- Check whether `member` is considered verified.
-
- Members are considered verified if they have at least 1 role other than
- the default role (@everyone) and the @Unverified role.
- """
- unverified_roles = {
- member.guild.get_role(constants.Roles.unverified),
- member.guild.default_role,
- }
- return len(set(member.roles) - unverified_roles) > 0
-
async def safe_dm(coro: t.Coroutine) -> None:
"""
@@ -150,411 +63,16 @@ class Verification(Cog):
"""
User verification and role management.
- There are two internal tasks in this cog:
-
- * `update_unverified_members`
- * Unverified members are given the @Unverified role after configured `unverified_after` days
- * Unverified members are kicked after configured `kicked_after` days
- * `ping_unverified`
- * Periodically ping the @Unverified role in the verification channel
-
Statistics are collected in the 'verification.' namespace.
- Moderators+ can use the `verification` command group to start or stop both internal
- tasks, if necessary. Settings are persisted in Redis across sessions.
-
- Additionally, this cog offers the !accept, !subscribe and !unsubscribe commands,
- and keeps the verification channel clean by deleting messages.
+ Additionally, this cog offers the !subscribe and !unsubscribe commands,
"""
- # Persist task settings & last sent `REMINDER_MESSAGE` id
- # RedisCache[
- # "tasks_running": int (0 or 1),
- # "last_reminder": int (discord.Message.id),
- # ]
- task_cache = RedisCache()
-
- # Create a cache for storing recipients of the alternate welcome DM.
- member_gating_cache = RedisCache()
-
def __init__(self, bot: Bot) -> None:
"""Start internal tasks."""
self.bot = bot
- self.bot.loop.create_task(self._maybe_start_tasks())
-
- def cog_unload(self) -> None:
- """
- Cancel internal tasks.
-
- This is necessary, as tasks are not automatically cancelled on cog unload.
- """
- self._stop_tasks(gracefully=False)
-
- @property
- def mod_log(self) -> ModLog:
- """Get currently loaded ModLog cog instance."""
- return self.bot.get_cog("ModLog")
-
- async def _maybe_start_tasks(self) -> None:
- """
- Poll Redis to check whether internal tasks should start.
-
- Redis must be interfaced with from an async function.
- """
- log.trace("Checking whether background tasks should begin")
- setting: t.Optional[int] = await self.task_cache.get("tasks_running") # This can be None if never set
-
- if setting:
- log.trace("Background tasks will be started")
- self.update_unverified_members.start()
- self.ping_unverified.start()
-
- def _stop_tasks(self, *, gracefully: bool) -> None:
- """
- Stop the update users & ping @Unverified tasks.
-
- If `gracefully` is True, the tasks will be able to finish their current iteration.
- Otherwise, they are cancelled immediately.
- """
- log.info(f"Stopping internal tasks ({gracefully=})")
- if gracefully:
- self.update_unverified_members.stop()
- self.ping_unverified.stop()
- else:
- self.update_unverified_members.cancel()
- self.ping_unverified.cancel()
-
- # region: automatically update unverified users
-
- async def _verify_kick(self, n_members: int) -> bool:
- """
- Determine whether `n_members` is a reasonable amount of members to kick.
-
- First, `n_members` is checked against the size of the PyDis guild. If `n_members` are
- more than the configured `kick_confirmation_threshold` of the guild, the operation
- must be confirmed by staff in #core-dev. Otherwise, the operation is seen as safe.
- """
- log.debug(f"Checking whether {n_members} members are safe to kick")
-
- await self.bot.wait_until_guild_available() # Ensure cache is populated before we grab the guild
- pydis = self.bot.get_guild(constants.Guild.id)
-
- percentage = n_members / len(pydis.members)
- if percentage < constants.Verification.kick_confirmation_threshold:
- log.debug(f"Kicking {percentage:.2%} of the guild's population is seen as safe")
- return True
-
- # Since `n_members` is a suspiciously large number, we will ask for confirmation
- log.debug("Amount of users is too large, requesting staff confirmation")
-
- core_dev_channel = pydis.get_channel(constants.Channels.dev_core)
- core_dev_ping = f"<@&{constants.Roles.core_developers}>"
-
- confirmation_msg = await core_dev_channel.send(
- f"{core_dev_ping} Verification determined that `{n_members}` members should be kicked as they haven't "
- f"verified in `{constants.Verification.kicked_after}` days. This is `{percentage:.2%}` of the guild's "
- f"population. Proceed?",
- allowed_mentions=mention_role(constants.Roles.core_developers),
- )
-
- options = (constants.Emojis.incident_actioned, constants.Emojis.incident_unactioned)
- for option in options:
- await confirmation_msg.add_reaction(option)
-
- core_dev_ids = [member.id for member in pydis.get_role(constants.Roles.core_developers).members]
-
- def check(reaction: discord.Reaction, user: discord.User) -> bool:
- """Check whether `reaction` is a valid reaction to `confirmation_msg`."""
- return (
- reaction.message.id == confirmation_msg.id # Reacted to `confirmation_msg`
- and str(reaction.emoji) in options # With one of `options`
- and user.id in core_dev_ids # By a core developer
- )
-
- timeout = 60 * 5 # Seconds, i.e. 5 minutes
- try:
- choice, _ = await self.bot.wait_for("reaction_add", check=check, timeout=timeout)
- except asyncio.TimeoutError:
- log.debug("Staff prompt not answered, aborting operation")
- return False
- finally:
- with suppress(discord.HTTPException):
- await confirmation_msg.clear_reactions()
-
- result = str(choice) == constants.Emojis.incident_actioned
- log.debug(f"Received answer: {choice}, result: {result}")
-
- # Edit the prompt message to reflect the final choice
- if result is True:
- result_msg = f":ok_hand: {core_dev_ping} Request to kick `{n_members}` members was authorized!"
- else:
- result_msg = f":warning: {core_dev_ping} Request to kick `{n_members}` members was denied!"
-
- with suppress(discord.HTTPException):
- await confirmation_msg.edit(content=result_msg)
-
- return result
-
- async def _alert_admins(self, exception: discord.HTTPException) -> None:
- """
- Ping @Admins with information about `exception`.
-
- This is used when a critical `exception` caused a verification task to abort.
- """
- await self.bot.wait_until_guild_available()
- log.info(f"Sending admin alert regarding exception: {exception}")
-
- admins_channel = self.bot.get_guild(constants.Guild.id).get_channel(constants.Channels.admins)
- ping = f"<@&{constants.Roles.admins}>"
-
- await admins_channel.send(
- f"{ping} Aborted updating unverified users due to the following exception:\n"
- f"```{exception}```\n"
- f"Internal tasks will be stopped.",
- allowed_mentions=mention_role(constants.Roles.admins),
- )
-
- async def _send_requests(self, members: t.Collection[discord.Member], request: Request, limit: Limit) -> int:
- """
- Pass `members` one by one to `request` handling Discord exceptions.
-
- This coroutine serves as a generic `request` executor for kicking members and adding
- roles, as it allows us to define the error handling logic in one place only.
-
- Any `request` has the ability to completely abort the execution by raising `StopExecution`.
- In such a case, the @Admins will be alerted of the reason attribute.
-
- To avoid rate-limits, pass a `limit` configuring the batch size and the amount of seconds
- to sleep between batches.
-
- Returns the amount of successful requests. Failed requests are logged at info level.
- """
- log.trace(f"Sending {len(members)} requests")
- n_success, bad_statuses = 0, set()
-
- for progress, member in enumerate(members, start=1):
- if is_verified(member): # Member could have verified in the meantime
- continue
- try:
- await request(member)
- except StopExecution as stop_execution:
- await self._alert_admins(stop_execution.reason)
- await self.task_cache.set("tasks_running", 0)
- self._stop_tasks(gracefully=True) # Gracefully finish current iteration, then stop
- break
- except discord.HTTPException as http_exc:
- bad_statuses.add(http_exc.status)
- else:
- n_success += 1
-
- if progress % limit.batch_size == 0:
- log.trace(f"Processed {progress} requests, pausing for {limit.sleep_secs} seconds")
- await asyncio.sleep(limit.sleep_secs)
-
- if bad_statuses:
- log.info(f"Failed to send {len(members) - n_success} requests due to following statuses: {bad_statuses}")
-
- return n_success
-
- async def _add_kick_note(self, member: discord.Member) -> None:
- """
- Post a note regarding `member` being kicked to site.
-
- Allows keeping track of kicked members for auditing purposes.
- """
- payload = {
- "active": False,
- "actor": self.bot.user.id, # Bot actions this autonomously
- "expires_at": None,
- "hidden": True,
- "reason": "Verification kick",
- "type": "note",
- "user": member.id,
- }
-
- log.trace(f"Posting kick note for member {member} ({member.id})")
- try:
- await self.bot.api_client.post("bot/infractions", json=payload)
- except ResponseCodeError as api_exc:
- log.warning("Failed to post kick note", exc_info=api_exc)
-
- async def _kick_members(self, members: t.Collection[discord.Member]) -> int:
- """
- Kick `members` from the PyDis guild.
-
- Due to strict ratelimits on sending messages (120 requests / 60 secs), we sleep for a second
- after each 2 requests to allow breathing room for other features.
-
- Note that this is a potentially destructive operation. Returns the amount of successful requests.
- """
- log.info(f"Kicking {len(members)} members (not verified after {constants.Verification.kicked_after} days)")
-
- async def kick_request(member: discord.Member) -> None:
- """Send `KICKED_MESSAGE` to `member` and kick them from the guild."""
- try:
- await safe_dm(member.send(KICKED_MESSAGE)) # Suppress disabled DMs
- except discord.HTTPException as suspicious_exception:
- raise StopExecution(reason=suspicious_exception)
- await member.kick(reason=f"User has not verified in {constants.Verification.kicked_after} days")
- await self._add_kick_note(member)
-
- n_kicked = await self._send_requests(members, kick_request, Limit(batch_size=2, sleep_secs=1))
- self.bot.stats.incr("verification.kicked", count=n_kicked)
-
- return n_kicked
-
- async def _give_role(self, members: t.Collection[discord.Member], role: discord.Role) -> int:
- """
- Give `role` to all `members`.
-
- We pause for a second after batches of 25 requests to ensure ratelimits aren't exceeded.
+ self.pending_members = set()
- Returns the amount of successful requests.
- """
- log.info(
- f"Assigning {role} role to {len(members)} members (not verified "
- f"after {constants.Verification.unverified_after} days)"
- )
-
- async def role_request(member: discord.Member) -> None:
- """Add `role` to `member`."""
- await member.add_roles(role, reason=f"Not verified after {constants.Verification.unverified_after} days")
-
- return await self._send_requests(members, role_request, Limit(batch_size=25, sleep_secs=1))
-
- async def _check_members(self) -> t.Tuple[t.Set[discord.Member], t.Set[discord.Member]]:
- """
- Check in on the verification status of PyDis members.
-
- This coroutine finds two sets of users:
- * Not verified after configured `unverified_after` days, should be given the @Unverified role
- * Not verified after configured `kicked_after` days, should be kicked from the guild
-
- These sets are always disjoint, i.e. share no common members.
- """
- await self.bot.wait_until_guild_available() # Ensure cache is ready
- pydis = self.bot.get_guild(constants.Guild.id)
-
- unverified = pydis.get_role(constants.Roles.unverified)
- current_dt = datetime.utcnow() # Discord timestamps are UTC
-
- # Users to be given the @Unverified role, and those to be kicked, these should be entirely disjoint
- for_role, for_kick = set(), set()
-
- log.debug("Checking verification status of guild members")
- for member in pydis.members:
-
- # Skip verified members, bots, and members for which we do not know their join date,
- # this should be extremely rare but docs mention that it can happen
- if is_verified(member) or member.bot or member.joined_at is None:
- continue
-
- # At this point, we know that `member` is an unverified user, and we will decide what
- # to do with them based on time passed since their join date
- since_join = current_dt - member.joined_at
-
- if since_join > timedelta(days=constants.Verification.kicked_after):
- for_kick.add(member) # User should be removed from the guild
-
- elif (
- since_join > timedelta(days=constants.Verification.unverified_after)
- and unverified not in member.roles
- ):
- for_role.add(member) # User should be given the @Unverified role
-
- log.debug(f"Found {len(for_role)} users for {unverified} role, {len(for_kick)} users to be kicked")
- return for_role, for_kick
-
- @tasks.loop(minutes=30)
- async def update_unverified_members(self) -> None:
- """
- Periodically call `_check_members` and update unverified members accordingly.
-
- After each run, a summary will be sent to the modlog channel. If a suspiciously high
- amount of members to be kicked is found, the operation is guarded by `_verify_kick`.
- """
- log.info("Updating unverified guild members")
-
- await self.bot.wait_until_guild_available()
- unverified = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.unverified)
-
- for_role, for_kick = await self._check_members()
-
- if not for_role:
- role_report = f"Found no users to be assigned the {unverified.mention} role."
- else:
- n_roles = await self._give_role(for_role, unverified)
- role_report = f"Assigned {unverified.mention} role to `{n_roles}`/`{len(for_role)}` members."
-
- if not for_kick:
- kick_report = "Found no users to be kicked."
- elif not await self._verify_kick(len(for_kick)):
- kick_report = f"Not authorized to kick `{len(for_kick)}` members."
- else:
- n_kicks = await self._kick_members(for_kick)
- kick_report = f"Kicked `{n_kicks}`/`{len(for_kick)}` members from the guild."
-
- await self.mod_log.send_log_message(
- icon_url=self.bot.user.avatar_url,
- colour=discord.Colour.blurple(),
- title="Verification system",
- text=f"{kick_report}\n{role_report}",
- )
-
- # endregion
- # region: periodically ping @Unverified
-
- @tasks.loop(hours=constants.Verification.reminder_frequency)
- async def ping_unverified(self) -> None:
- """
- Delete latest `REMINDER_MESSAGE` and send it again.
-
- This utilizes RedisCache to persist the latest reminder message id.
- """
- await self.bot.wait_until_guild_available()
- verification = self.bot.get_guild(constants.Guild.id).get_channel(constants.Channels.verification)
-
- last_reminder: t.Optional[int] = await self.task_cache.get("last_reminder")
-
- if last_reminder is not None:
- log.trace(f"Found verification reminder message in cache, deleting: {last_reminder}")
-
- with suppress(discord.HTTPException): # If something goes wrong, just ignore it
- await self.bot.http.delete_message(verification.id, last_reminder)
-
- log.trace("Sending verification reminder")
- new_reminder = await verification.send(
- REMINDER_MESSAGE, allowed_mentions=mention_role(constants.Roles.unverified),
- )
-
- await self.task_cache.set("last_reminder", new_reminder.id)
-
- @ping_unverified.before_loop
- async def _before_first_ping(self) -> None:
- """
- Sleep until `REMINDER_MESSAGE` should be sent again.
-
- If latest reminder is not cached, exit instantly. Otherwise, wait wait until the
- configured `reminder_frequency` has passed.
- """
- last_reminder: t.Optional[int] = await self.task_cache.get("last_reminder")
-
- if last_reminder is None:
- log.trace("Latest verification reminder message not cached, task will not wait")
- return
-
- # Convert cached message id into a timestamp
- time_since = datetime.utcnow() - snowflake_time(last_reminder)
- log.trace(f"Time since latest verification reminder: {time_since}")
-
- to_sleep = timedelta(hours=constants.Verification.reminder_frequency) - time_since
- log.trace(f"Time to sleep until next ping: {to_sleep}")
-
- # Delta can be negative if `reminder_frequency` has already passed
- secs = max(to_sleep.total_seconds(), 0)
- await asyncio.sleep(secs)
-
- # endregion
# region: listeners
@Cog.listener()
@@ -565,22 +83,11 @@ class Verification(Cog):
raw_member = await self.bot.http.get_member(member.guild.id, member.id)
- # If the user has the is_pending flag set, they will be using the alternate
+ # If the user has the pending flag set, they will be using the alternate
# gate and will not need a welcome DM with verification instructions.
# We will send them an alternate DM once they verify with the welcome
- # video.
- if raw_member.get("is_pending"):
- await self.member_gating_cache.set(member.id, True)
-
- # TODO: Temporary, remove soon after asking joe.
- await self.mod_log.send_log_message(
- icon_url=self.bot.user.avatar_url,
- colour=discord.Colour.blurple(),
- title="New native gated user",
- channel_id=constants.Channels.user_log,
- text=f"<@{member.id}> ({member.id})",
- )
-
+ # video when they pass the gate.
+ if raw_member.get("pending"):
return
log.trace(f"Sending on join message to new member: {member.id}")
@@ -592,193 +99,18 @@ class Verification(Cog):
@Cog.listener()
async def on_member_update(self, before: discord.Member, after: discord.Member) -> None:
"""Check if we need to send a verification DM to a gated user."""
- before_roles = [role.id for role in before.roles]
- after_roles = [role.id for role in after.roles]
-
- if constants.Roles.verified not in before_roles and constants.Roles.verified in after_roles:
- if await self.member_gating_cache.pop(after.id):
- try:
- # If the member has not received a DM from our !accept command
- # and has gone through the alternate gating system we should send
- # our alternate welcome DM which includes info such as our welcome
- # video.
- await safe_dm(after.send(ALTERNATE_VERIFIED_MESSAGE))
- except discord.HTTPException:
- log.exception("DM dispatch failed on unexpected error code")
-
- @Cog.listener()
- async def on_message(self, message: discord.Message) -> None:
- """Check new message event for messages to the checkpoint channel & process."""
- if message.channel.id != constants.Channels.verification:
- return # Only listen for #checkpoint messages
-
- if message.content == REMINDER_MESSAGE:
- return # Ignore bots own verification reminder
-
- if message.author.bot:
- # They're a bot, delete their message after the delay.
- await message.delete(delay=constants.Verification.bot_message_delete_delay)
- return
-
- # if a user mentions a role or guild member
- # alert the mods in mod-alerts channel
- if message.mentions or message.role_mentions:
- log.debug(
- f"{message.author} mentioned one or more users "
- f"and/or roles in {message.channel.name}"
- )
-
- embed_text = (
- f"{format_user(message.author)} sent a message in "
- f"{message.channel.mention} that contained user and/or role mentions."
- f"\n\n**Original message:**\n>>> {message.content}"
- )
-
- # Send pretty mod log embed to mod-alerts
- await self.mod_log.send_log_message(
- icon_url=constants.Icons.filtering,
- colour=discord.Colour(constants.Colours.soft_red),
- title=f"User/Role mentioned in {message.channel.name}",
- text=embed_text,
- thumbnail=message.author.avatar_url_as(static_format="png"),
- channel_id=constants.Channels.mod_alerts,
- )
-
- ctx: Context = await self.bot.get_context(message)
- if ctx.command is not None and ctx.command.name == "accept":
- return
-
- if any(r.id == constants.Roles.verified for r in ctx.author.roles):
- log.info(
- f"{ctx.author} posted '{ctx.message.content}' "
- "in the verification channel, but is already verified."
- )
- return
-
- log.debug(
- f"{ctx.author} posted '{ctx.message.content}' in the verification "
- "channel. We are providing instructions how to verify."
- )
- await ctx.send(
- f"{ctx.author.mention} Please type `!accept` to verify that you accept our rules, "
- f"and gain access to the rest of the server.",
- delete_after=20
- )
-
- log.trace(f"Deleting the message posted by {ctx.author}")
- with suppress(discord.NotFound):
- await ctx.message.delete()
-
- # endregion
- # region: task management commands
-
- @has_any_role(*constants.MODERATION_ROLES)
- @group(name="verification")
- async def verification_group(self, ctx: Context) -> None:
- """Manage internal verification tasks."""
- if ctx.invoked_subcommand is None:
- await ctx.send_help(ctx.command)
-
- @verification_group.command(name="status")
- async def status_cmd(self, ctx: Context) -> None:
- """Check whether verification tasks are running."""
- log.trace("Checking status of verification tasks")
-
- if self.update_unverified_members.is_running():
- update_status = f"{constants.Emojis.incident_actioned} Member update task is running."
- else:
- update_status = f"{constants.Emojis.incident_unactioned} Member update task is **not** running."
-
- mention = f"<@&{constants.Roles.unverified}>"
- if self.ping_unverified.is_running():
- ping_status = f"{constants.Emojis.incident_actioned} Ping {mention} task is running."
- else:
- ping_status = f"{constants.Emojis.incident_unactioned} Ping {mention} task is **not** running."
-
- embed = discord.Embed(
- title="Verification system",
- description=f"{update_status}\n{ping_status}",
- colour=discord.Colour.blurple(),
- )
- await ctx.send(embed=embed)
-
- @verification_group.command(name="start")
- async def start_cmd(self, ctx: Context) -> None:
- """Start verification tasks if they are not already running."""
- log.info("Starting verification tasks")
-
- if not self.update_unverified_members.is_running():
- self.update_unverified_members.start()
-
- if not self.ping_unverified.is_running():
- self.ping_unverified.start()
-
- await self.task_cache.set("tasks_running", 1)
-
- colour = discord.Colour.blurple()
- await ctx.send(embed=discord.Embed(title="Verification system", description="Done. :ok_hand:", colour=colour))
-
- @verification_group.command(name="stop", aliases=["kill"])
- async def stop_cmd(self, ctx: Context) -> None:
- """Stop verification tasks."""
- log.info("Stopping verification tasks")
-
- self._stop_tasks(gracefully=False)
- await self.task_cache.set("tasks_running", 0)
-
- colour = discord.Colour.blurple()
- await ctx.send(embed=discord.Embed(title="Verification system", description="Tasks canceled.", colour=colour))
+ if before.pending is True and after.pending is False:
+ try:
+ # If the member has not received a DM from our !accept command
+ # and has gone through the alternate gating system we should send
+ # our alternate welcome DM which includes info such as our welcome
+ # video.
+ await safe_dm(after.send(VERIFIED_MESSAGE))
+ except discord.HTTPException:
+ log.exception("DM dispatch failed on unexpected error code")
# endregion
- # region: accept and subscribe commands
-
- def _bump_verified_stats(self, verified_member: discord.Member) -> None:
- """
- Increment verification stats for `verified_member`.
-
- Each member falls into one of the three categories:
- * Verified within 24 hours after joining
- * Does not have @Unverified role yet
- * Does have @Unverified role
-
- Stats for member kicking are handled separately.
- """
- if verified_member.joined_at is None: # Docs mention this can happen
- return
-
- if (datetime.utcnow() - verified_member.joined_at) < timedelta(hours=24):
- category = "accepted_on_day_one"
- elif constants.Roles.unverified not in [role.id for role in verified_member.roles]:
- category = "accepted_before_unverified"
- else:
- category = "accepted_after_unverified"
-
- log.trace(f"Bumping verification stats in category: {category}")
- self.bot.stats.incr(f"verification.{category}")
-
- @command(name='accept', aliases=('verified', 'accepted'), hidden=True)
- @has_no_roles(constants.Roles.verified)
- @in_whitelist(channels=(constants.Channels.verification,))
- async def accept_command(self, ctx: Context, *_) -> None: # We don't actually care about the args
- """Accept our rules and gain access to the rest of the server."""
- log.debug(f"{ctx.author} called !accept. Assigning the 'Developer' role.")
- await ctx.author.add_roles(discord.Object(constants.Roles.verified), reason="Accepted the rules")
-
- self._bump_verified_stats(ctx.author) # This checks for @Unverified so make sure it's not yet removed
-
- if constants.Roles.unverified in [role.id for role in ctx.author.roles]:
- log.debug(f"Removing Unverified role from: {ctx.author}")
- await ctx.author.remove_roles(discord.Object(constants.Roles.unverified))
-
- try:
- await safe_dm(ctx.author.send(VERIFIED_MESSAGE))
- except discord.HTTPException:
- log.exception(f"Sending welcome message failed for {ctx.author}.")
- finally:
- log.trace(f"Deleting accept message by {ctx.author}.")
- with suppress(discord.NotFound):
- self.mod_log.ignore(constants.Event.message_delete, ctx.message.id)
- await ctx.message.delete()
+ # region: subscribe commands
@command(name='subscribe')
@in_whitelist(channels=(constants.Channels.bot_commands,))
@@ -839,30 +171,23 @@ class Verification(Cog):
if isinstance(error, InWhitelistCheckFailure):
error.handled = True
- @staticmethod
- async def bot_check(ctx: Context) -> bool:
- """Block any command within the verification channel that is not !accept."""
- is_verification = ctx.channel.id == constants.Channels.verification
- if is_verification and await has_no_roles_check(ctx, *constants.MODERATION_ROLES):
- return ctx.command.name == "accept"
- else:
- return True
-
@command(name='verify')
@has_any_role(*constants.MODERATION_ROLES)
- async def apply_developer_role(self, ctx: Context, user: discord.Member) -> None:
- """Command for moderators to apply the Developer role to any user."""
+ async def perform_manual_verification(self, ctx: Context, user: discord.Member) -> None:
+ """Command for moderators to verify any user."""
log.trace(f'verify command called by {ctx.author} for {user.id}.')
- developer_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.verified)
- if developer_role in user.roles:
- log.trace(f'{user.id} is already a developer, aborting.')
- await ctx.send(f'{constants.Emojis.cross_mark} {user} is already a developer.')
+ if not user.pending:
+ log.trace(f'{user.id} is already verified, aborting.')
+ await ctx.send(f'{constants.Emojis.cross_mark} {user.mention} is already verified.')
return
- await user.add_roles(developer_role)
- log.trace(f'Developer role successfully applied to {user.id}')
- await ctx.send(f'{constants.Emojis.check_mark} Developer role applied to {user}.')
+ # Adding a role automatically verifies the user, so we add and remove the Announcements role.
+ temporary_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.announcements)
+ await user.add_roles(temporary_role)
+ await user.remove_roles(temporary_role)
+ log.trace(f'{user.id} manually verified.')
+ await ctx.send(f'{constants.Emojis.check_mark} {user.mention} is now verified.')
# endregion
diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py
index 4d48d2c1b..0cbce6a51 100644
--- a/bot/exts/moderation/voice_gate.py
+++ b/bot/exts/moderation/voice_gate.py
@@ -5,7 +5,6 @@ from datetime import datetime, timedelta
import discord
from async_rediscache import RedisCache
-from dateutil import parser
from discord import Colour, Member, VoiceState
from discord.ext.commands import Cog, Context, command
@@ -29,7 +28,7 @@ FAILED_MESSAGE = (
)
MESSAGE_FIELD_MAP = {
- "verified_at": f"have been verified for less than {GateConf.minimum_days_verified} days",
+ "joined_at": f"have been on the server for less than {GateConf.minimum_days_member} days",
"voice_banned": "have an active voice ban infraction",
"total_messages": f"have sent less than {GateConf.minimum_messages} messages",
"activity_blocks": f"have been active for fewer than {GateConf.minimum_activity_blocks} ten-minute blocks",
@@ -149,14 +148,8 @@ class VoiceGate(Cog):
await ctx.author.send(embed=embed)
return
- # Pre-parse this for better code style
- if data["verified_at"] is not None:
- data["verified_at"] = parser.isoparse(data["verified_at"])
- else:
- data["verified_at"] = datetime.utcnow() - timedelta(days=3)
-
checks = {
- "verified_at": data["verified_at"] > datetime.utcnow() - timedelta(days=GateConf.minimum_days_verified),
+ "joined_at": ctx.author.joined_at > datetime.utcnow() - timedelta(days=GateConf.minimum_days_member),
"total_messages": data["total_messages"] < GateConf.minimum_messages,
"voice_banned": data["voice_banned"],
"activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks
diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py
index 7118dee02..f9fc12dc3 100644
--- a/bot/exts/moderation/watchchannels/_watchchannel.py
+++ b/bot/exts/moderation/watchchannels/_watchchannel.py
@@ -342,11 +342,14 @@ class WatchChannel(metaclass=CogABCMeta):
"""Takes care of unloading the cog and canceling the consumption task."""
self.log.trace("Unloading the cog")
if self._consume_task and not self._consume_task.done():
+ def done_callback(task: asyncio.Task) -> None:
+ """Send exception when consuming task have been cancelled."""
+ try:
+ task.result()
+ except asyncio.CancelledError:
+ self.log.info(
+ f"The consume task of {type(self).__name__} was canceled. Messages may be lost."
+ )
+
+ self._consume_task.add_done_callback(done_callback)
self._consume_task.cancel()
- try:
- self._consume_task.result()
- except asyncio.CancelledError as e:
- self.log.exception(
- "The consume task was canceled. Messages may be lost.",
- exc_info=e
- )
diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py
index a77dbe156..df2ce586e 100644
--- a/bot/exts/moderation/watchchannels/talentpool.py
+++ b/bot/exts/moderation/watchchannels/talentpool.py
@@ -64,12 +64,12 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
@nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",))
@has_any_role(*STAFF_ROLES)
- async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None:
+ async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None:
"""
Relay messages sent by the given `user` to the `#talent-pool` channel.
- A `reason` for adding the user to the talent pool is required and will be displayed
- in the header when relaying messages of this user to the channel.
+ A `reason` for adding the user to the talent pool is optional.
+ If given, it will be displayed in the header when relaying messages of this user to the channel.
"""
if user.bot:
await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.")
@@ -202,7 +202,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
f"{self.api_endpoint}/{nomination_id}",
json={field: reason}
)
-
+ await self.fetch_user_cache() # Update cache.
await ctx.send(f":white_check_mark: Updated the {field} of the nomination!")
@Cog.listener()
@@ -243,8 +243,8 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
actor = guild.get_member(actor_id)
active = nomination_object["active"]
- log.debug(active)
- log.debug(type(nomination_object["inserted_at"]))
+
+ reason = nomination_object["reason"] or "*None*"
start_date = time.format_infraction(nomination_object["inserted_at"])
if active:
@@ -254,7 +254,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
Status: **Active**
Date: {start_date}
Actor: {actor.mention if actor else actor_id}
- Reason: {nomination_object["reason"]}
+ Reason: {reason}
Nomination ID: `{nomination_object["id"]}`
===============
"""
@@ -267,7 +267,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
Status: Inactive
Date: {start_date}
Actor: {actor.mention if actor else actor_id}
- Reason: {nomination_object["reason"]}
+ Reason: {reason}
End date: {end_date}
Unwatch reason: {nomination_object["end_reason"]}
diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py
index 69d623581..a4c828f95 100644
--- a/bot/exts/utils/bot.py
+++ b/bot/exts/utils/bot.py
@@ -5,7 +5,7 @@ from discord import Embed, TextChannel
from discord.ext.commands import Cog, Context, command, group, has_any_role
from bot.bot import Bot
-from bot.constants import Guild, MODERATION_ROLES, Roles, URLs
+from bot.constants import Guild, MODERATION_ROLES, URLs
log = logging.getLogger(__name__)
@@ -17,13 +17,11 @@ class BotCog(Cog, name="Bot"):
self.bot = bot
@group(invoke_without_command=True, name="bot", hidden=True)
- @has_any_role(Roles.verified)
async def botinfo_group(self, ctx: Context) -> None:
"""Bot informational commands."""
await ctx.send_help(ctx.command)
@botinfo_group.command(name='about', aliases=('info',), hidden=True)
- @has_any_role(Roles.verified)
async def about_command(self, ctx: Context) -> None:
"""Get information about the bot."""
embed = Embed(
diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py
index bf25cb4c2..8acaf9131 100644
--- a/bot/exts/utils/clean.py
+++ b/bot/exts/utils/clean.py
@@ -191,7 +191,7 @@ class Clean(Cog):
channel_id=Channels.mod_log,
)
- @group(invoke_without_command=True, name="clean", aliases=["purge"])
+ @group(invoke_without_command=True, name="clean", aliases=["clear", "purge"])
@has_any_role(*MODERATION_ROLES)
async def clean_group(self, ctx: Context) -> None:
"""Commands for cleaning messages in channels."""
diff --git a/bot/exts/utils/jams.py b/bot/exts/utils/jams.py
index 1c0988343..98fbcb303 100644
--- a/bot/exts/utils/jams.py
+++ b/bot/exts/utils/jams.py
@@ -93,10 +93,6 @@ class CodeJams(commands.Cog):
connect=True
),
guild.default_role: PermissionOverwrite(read_messages=False, connect=False),
- guild.get_role(Roles.verified): PermissionOverwrite(
- read_messages=False,
- connect=False
- )
}
# Rest of members should just have read_messages
diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py
index 8e7e6ba36..eb92dfca7 100644
--- a/bot/exts/utils/utils.py
+++ b/bot/exts/utils/utils.py
@@ -2,10 +2,7 @@ import difflib
import logging
import re
import unicodedata
-from datetime import datetime, timedelta
-from email.parser import HeaderParser
-from io import StringIO
-from typing import Dict, Optional, Tuple, Union
+from typing import Tuple, Union
from discord import Colour, Embed, utils
from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role
@@ -17,7 +14,6 @@ from bot.converters import Snowflake
from bot.decorators import in_whitelist
from bot.pagination import LinePaginator
from bot.utils import messages
-from bot.utils.cache import AsyncCache
from bot.utils.time import time_since
log = logging.getLogger(__name__)
@@ -44,23 +40,12 @@ If the implementation is easy to explain, it may be a good idea.
Namespaces are one honking great idea -- let's do more of those!
"""
-ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png"
-
-pep_cache = AsyncCache()
-
class Utils(Cog):
"""A selection of utilities which don't have a clear category."""
- BASE_PEP_URL = "http://www.python.org/dev/peps/pep-"
- BASE_GITHUB_PEP_URL = "https://raw.githubusercontent.com/python/peps/master/pep-"
- PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master"
-
def __init__(self, bot: Bot):
self.bot = bot
- self.peps: Dict[int, str] = {}
- self.last_refreshed_peps: Optional[datetime] = None
- self.bot.loop.create_task(self.refresh_peps_urls())
@command()
@in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES)
@@ -207,126 +192,6 @@ class Utils(Cog):
for reaction in options:
await message.add_reaction(reaction)
- # region: PEP
-
- async def refresh_peps_urls(self) -> None:
- """Refresh PEP URLs listing in every 3 hours."""
- # Wait until HTTP client is available
- await self.bot.wait_until_ready()
- log.trace("Started refreshing PEP URLs.")
-
- async with self.bot.http_session.get(self.PEPS_LISTING_API_URL) as resp:
- listing = await resp.json()
-
- log.trace("Got PEP URLs listing from GitHub API")
-
- for file in listing:
- name = file["name"]
- if name.startswith("pep-") and name.endswith((".rst", ".txt")):
- pep_number = name.replace("pep-", "").split(".")[0]
- self.peps[int(pep_number)] = file["download_url"]
-
- self.last_refreshed_peps = datetime.now()
- log.info("Successfully refreshed PEP URLs listing.")
-
- @command(name='pep', aliases=('get_pep', 'p'))
- async def pep_command(self, ctx: Context, pep_number: int) -> None:
- """Fetches information about a PEP and sends it to the channel."""
- # Trigger typing in chat to show users that bot is responding
- await ctx.trigger_typing()
-
- # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs.
- if pep_number == 0:
- pep_embed = self.get_pep_zero_embed()
- success = True
- else:
- success = False
- if not (pep_embed := await self.validate_pep_number(pep_number)):
- pep_embed, success = await self.get_pep_embed(pep_number)
-
- await ctx.send(embed=pep_embed)
- if success:
- log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.")
- self.bot.stats.incr(f"pep_fetches.{pep_number}")
- else:
- log.trace(f"Getting PEP {pep_number} failed. Error embed sent.")
-
- @staticmethod
- def get_pep_zero_embed() -> Embed:
- """Get information embed about PEP 0."""
- pep_embed = Embed(
- title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**",
- url="https://www.python.org/dev/peps/"
- )
- pep_embed.set_thumbnail(url=ICON_URL)
- pep_embed.add_field(name="Status", value="Active")
- pep_embed.add_field(name="Created", value="13-Jul-2000")
- pep_embed.add_field(name="Type", value="Informational")
-
- return pep_embed
-
- async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]:
- """Validate is PEP number valid. When it isn't, return error embed, otherwise None."""
- if (
- pep_nr not in self.peps
- and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now()
- and len(str(pep_nr)) < 5
- ):
- await self.refresh_peps_urls()
-
- if pep_nr not in self.peps:
- log.trace(f"PEP {pep_nr} was not found")
- return Embed(
- title="PEP not found",
- description=f"PEP {pep_nr} does not exist.",
- colour=Colour.red()
- )
-
- return None
-
- def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed:
- """Generate PEP embed based on PEP headers data."""
- # Assemble the embed
- pep_embed = Embed(
- title=f"**PEP {pep_nr} - {pep_header['Title']}**",
- description=f"[Link]({self.BASE_PEP_URL}{pep_nr:04})",
- )
-
- pep_embed.set_thumbnail(url=ICON_URL)
-
- # Add the interesting information
- fields_to_check = ("Status", "Python-Version", "Created", "Type")
- for field in fields_to_check:
- # Check for a PEP metadata field that is present but has an empty value
- # embed field values can't contain an empty string
- if pep_header.get(field, ""):
- pep_embed.add_field(name=field, value=pep_header[field])
-
- return pep_embed
-
- @pep_cache(arg_offset=1)
- async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]:
- """Fetch, generate and return PEP embed. Second item of return tuple show does getting success."""
- response = await self.bot.http_session.get(self.peps[pep_nr])
-
- if response.status == 200:
- log.trace(f"PEP {pep_nr} found")
- pep_content = await response.text()
-
- # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179
- pep_header = HeaderParser().parse(StringIO(pep_content))
- return self.generate_pep_embed(pep_header, pep_nr), True
- else:
- log.trace(
- f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}."
- )
- return Embed(
- title="Unexpected error",
- description="Unexpected HTTP error during PEP search. Please let us know.",
- colour=Colour.red()
- ), False
- # endregion
-
def setup(bot: Bot) -> None:
"""Load the Utils cog."""
diff --git a/bot/log.py b/bot/log.py
index 13141de40..0935666d1 100644
--- a/bot/log.py
+++ b/bot/log.py
@@ -6,7 +6,6 @@ from pathlib import Path
import coloredlogs
import sentry_sdk
-from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.redis import RedisIntegration
@@ -67,9 +66,9 @@ def setup_sentry() -> None:
dsn=constants.Bot.sentry_dsn,
integrations=[
sentry_logging,
- AioHttpIntegration(),
RedisIntegration(),
- ]
+ ],
+ release=f"bot@{constants.GIT_SHA}"
)
diff --git a/bot/resources/elements.json b/bot/resources/elements.json
index 2dc9b6fd6..a3ac5b99f 100644
--- a/bot/resources/elements.json
+++ b/bot/resources/elements.json
@@ -32,7 +32,6 @@
"gallium",
"germanium",
"arsenic",
- "selenium",
"bromine",
"krypton",
"rubidium",
diff --git a/bot/resources/tags/codeblock.md b/bot/resources/tags/codeblock.md
index 8d48bdf06..ac64656e5 100644
--- a/bot/resources/tags/codeblock.md
+++ b/bot/resources/tags/codeblock.md
@@ -1,7 +1,7 @@
Here's how to format Python code on Discord:
-\```py
+\`\`\`py
print('Hello world!')
-\```
+\`\`\`
**These are backticks, not quotes.** Check [this](https://superuser.com/questions/254076/how-do-i-type-the-tick-and-backtick-characters-on-windows/254077#254077) out if you can't find the backtick key.
diff --git a/bot/rules/burst_shared.py b/bot/rules/burst_shared.py
index 0e66df69c..bbe9271b3 100644
--- a/bot/rules/burst_shared.py
+++ b/bot/rules/burst_shared.py
@@ -2,20 +2,11 @@ from typing import Dict, Iterable, List, Optional, Tuple
from discord import Member, Message
-from bot.constants import Channels
-
async def apply(
last_message: Message, recent_messages: List[Message], config: Dict[str, int]
) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
- """
- Detects repeated messages sent by multiple users.
-
- This filter never triggers in the verification channel.
- """
- if last_message.channel.id == Channels.verification:
- return
-
+ """Detects repeated messages sent by multiple users."""
total_recent = len(recent_messages)
if total_recent > config['max']:
diff --git a/config-default.yml b/config-default.yml
index 006743342..175460a31 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -173,7 +173,6 @@ guild:
# Special
bot_commands: &BOT_CMD 267659945086812160
esoteric: 470884583684964352
- verification: 352442727016693763
voice_gate: 764802555427029012
# Staff
@@ -244,8 +243,6 @@ guild:
python_community: &PY_COMMUNITY_ROLE 458226413825294336
sprinters: &SPRINTERS 758422482289426471
- unverified: 739794855945044069
- verified: 352427296948486144 # @Developers on PyDis
voice_verified: 764802720779337729
# Staff
@@ -323,6 +320,7 @@ filter:
keys:
site_api: !ENV "BOT_API_KEY"
+ github: !ENV "GITHUB_API_KEY"
urls:
@@ -488,7 +486,7 @@ redirect_output:
duck_pond:
- threshold: 4
+ threshold: 5
channel_blacklist:
- *ANNOUNCEMENTS
- *PYNEWS_CHANNEL
@@ -513,20 +511,8 @@ python_news:
webhook: *PYNEWS_WEBHOOK
-verification:
- unverified_after: 3 # Days after which non-Developers receive the @Unverified role
- kicked_after: 30 # Days after which non-Developers get kicked from the guild
- reminder_frequency: 28 # Hours between @Unverified pings
- bot_message_delete_delay: 10 # Seconds before deleting bots response in #verification
-
- # Number in range [0, 1] determining the percentage of unverified users that are safe
- # to be kicked from the guild in one batch, any larger amount will require staff confirmation,
- # set this to 0 to require explicit approval for batches of any size
- kick_confirmation_threshold: 0.01 # 1%
-
-
voice_gate:
- minimum_days_verified: 3 # How many days the user must have been verified for
+ minimum_days_member: 3 # How many days the user must have been a member for
minimum_messages: 50 # How many messages a user must have to be eligible for voice
bot_message_delete_delay: 10 # Seconds before deleting bot's response in Voice Gate
minimum_activity_blocks: 3 # Number of 10 minute blocks during which a user must have been active
diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py
index daede54c5..d077be960 100644
--- a/tests/bot/exts/info/test_information.py
+++ b/tests/bot/exts/info/test_information.py
@@ -355,6 +355,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
self.assertEqual(
textwrap.dedent(f"""
Joined: {"1 year ago"}
+ Verified: {"True"}
Roles: &Moderators
""").strip(),
embed.fields[1].value
diff --git a/tests/bot/exts/moderation/test_silence.py b/tests/bot/exts/moderation/test_silence.py
index 104293d8e..fa5fc9e81 100644
--- a/tests/bot/exts/moderation/test_silence.py
+++ b/tests/bot/exts/moderation/test_silence.py
@@ -117,15 +117,6 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):
self.bot.get_guild.assert_called_once_with(Guild.id)
@autospec(silence, "SilenceNotifier", pass_mocks=False)
- async def test_async_init_got_role(self):
- """Got `Roles.verified` role from guild."""
- guild = self.bot.get_guild()
- guild.get_role.side_effect = lambda id_: Mock(id=id_)
-
- await self.cog._async_init()
- self.assertEqual(self.cog._verified_role.id, Roles.verified)
-
- @autospec(silence, "SilenceNotifier", pass_mocks=False)
async def test_async_init_got_channels(self):
"""Got channels from bot."""
self.bot.get_channel.side_effect = lambda id_: MockTextChannel(id=id_)
@@ -302,7 +293,7 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase):
self.assertFalse(self.overwrite.send_messages)
self.assertFalse(self.overwrite.add_reactions)
self.channel.set_permissions.assert_awaited_once_with(
- self.cog._verified_role,
+ self.cog._everyone_role,
overwrite=self.overwrite
)
@@ -435,7 +426,7 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase):
"""Channel's `send_message` and `add_reactions` overwrites were restored."""
await self.cog._unsilence(self.channel)
self.channel.set_permissions.assert_awaited_once_with(
- self.cog._verified_role,
+ self.cog._everyone_role,
overwrite=self.overwrite,
)
@@ -449,7 +440,7 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase):
await self.cog._unsilence(self.channel)
self.channel.set_permissions.assert_awaited_once_with(
- self.cog._verified_role,
+ self.cog._everyone_role,
overwrite=self.overwrite,
)
diff --git a/tests/bot/exts/utils/test_jams.py b/tests/bot/exts/utils/test_jams.py
index 45e7b5b51..85d6a1173 100644
--- a/tests/bot/exts/utils/test_jams.py
+++ b/tests/bot/exts/utils/test_jams.py
@@ -118,11 +118,9 @@ class JamCreateTeamTests(unittest.IsolatedAsyncioTestCase):
self.assertTrue(overwrites[member].read_messages)
self.assertTrue(overwrites[member].connect)
- # Everyone and verified role overwrite
+ # Everyone role overwrite
self.assertFalse(overwrites[self.guild.default_role].read_messages)
self.assertFalse(overwrites[self.guild.default_role].connect)
- self.assertFalse(overwrites[self.guild.get_role(Roles.verified)].read_messages)
- self.assertFalse(overwrites[self.guild.get_role(Roles.verified)].connect)
async def test_team_channels_creation(self):
"""Should create new voice and text channel for team."""
diff --git a/tests/bot/test_api.py b/tests/bot/test_api.py
index 99e942813..76bcb481d 100644
--- a/tests/bot/test_api.py
+++ b/tests/bot/test_api.py
@@ -13,14 +13,6 @@ class APIClientTests(unittest.IsolatedAsyncioTestCase):
cls.error_api_response = MagicMock()
cls.error_api_response.status = 999
- def test_loop_is_not_running_by_default(self):
- """The event loop should not be running by default."""
- self.assertFalse(api.loop_is_running())
-
- async def test_loop_is_running_in_async_context(self):
- """The event loop should be running in an async context."""
- self.assertTrue(api.loop_is_running())
-
def test_response_code_error_default_initialization(self):
"""Test the default initialization of `ResponseCodeError` without `text` or `json`"""
error = api.ResponseCodeError(response=self.error_api_response)
diff --git a/tests/helpers.py b/tests/helpers.py
index 870f66197..496363ae3 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -230,7 +230,7 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin
spec_set = member_instance
def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None:
- default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False}
+ default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False, "pending": False}
super().__init__(**collections.ChainMap(kwargs, default_kwargs))
self.roles = [MockRole(name="@everyone", position=1, id=0)]