diff options
| author | 2020-07-22 12:38:52 -0700 | |
|---|---|---|
| committer | 2020-07-22 12:38:52 -0700 | |
| commit | cd50c7f8f1ad4c68b2e941d636f4c411d1fb8a44 (patch) | |
| tree | 199ff60aa57ff3c7eb9269621606afabd3193850 | |
| parent | Jam Tests: space out lines for readability (diff) | |
| parent | Use max_units for time since join in user command instead of precision (diff) | |
Merge branch 'master' into jam-test
63 files changed, 3594 insertions, 869 deletions
| diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 000000000..8760b35ec --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,32 @@ +name: "Code scanning - action" + +on: +  push: +  pull_request: +  schedule: +    - cron: '0 12 * * *' + +jobs: +  CodeQL-Build: + +    runs-on: ubuntu-latest + +    steps: +    - name: Checkout repository +      uses: actions/checkout@v2 +      with: +        fetch-depth: 2 + +    - run: git checkout HEAD^2 +      if: ${{ github.event_name == 'pull_request' }} + +    - name: Initialize CodeQL +      uses: github/codeql-action/init@v1 +      with: +        languages: python + +    - name: Autobuild +      uses: github/codeql-action/autobuild@v1 + +    - name: Perform CodeQL Analysis +      uses: github/codeql-action/analyze@v1 diff --git a/Dockerfile b/Dockerfile index 06a538b2a..0b1674e7a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,6 +6,11 @@ ENV PIP_NO_CACHE_DIR=false \      PIPENV_IGNORE_VIRTUALENVS=1 \      PIPENV_NOSPIN=1 +RUN apt-get -y update \ +    && apt-get install -y \ +        git \ +    && rm -rf /var/lib/apt/lists/* +  # Install pipenv  RUN pip install -U pipenv @@ -12,7 +12,7 @@ beautifulsoup4 = "~=4.9"  colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}  coloredlogs = "~=14.0"  deepdiff = "~=4.0" -discord.py = "~=1.3.2" +discord-py = {git = "https://github.com/Rapptz/discord.py.git",ref = "0bc15fa130b8f01fe2d67446a2184d474b0d0ba7"}  fakeredis = "~=1.4"  feedparser = "~=5.2"  fuzzywuzzy = "~=0.17" @@ -50,4 +50,5 @@ precommit = "pre-commit install"  build = "docker build -t pythondiscord/bot:latest -f Dockerfile ."  push = "docker push pythondiscord/bot:latest"  test = "coverage run -m unittest" +html = "coverage html"  report = "coverage report" diff --git a/Pipfile.lock b/Pipfile.lock index 0e591710c..4b9d092d4 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@  {      "_meta": {          "hash": { -            "sha256": "0297accc3d614d3da8080b89d56ef7fe489c28a0ada8102df396a604af7ee330" +            "sha256": "8a53baefbbd2a0f3fbaf831f028b23d257a5e28b5efa1260661d74604f4113b8"          },          "pipfile-spec": 6,          "requires": { @@ -63,6 +63,7 @@                  "sha256:41a9d4eb17db805f30ed172f3f609fe0c2b16657fb15b1b67df19d251dd93c0d",                  "sha256:7c19477a9450824cb79f9949fd238f4148e2c0dca67756a2868863c387209f04"              ], +            "markers": "python_version >= '3.6'",              "version": "==3.2.2"          },          "alabaster": { @@ -77,6 +78,7 @@                  "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f",                  "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"              ], +            "markers": "python_full_version >= '3.5.3'",              "version": "==3.0.1"          },          "attrs": { @@ -84,6 +86,7 @@                  "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",                  "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==19.3.0"          },          "babel": { @@ -91,6 +94,7 @@                  "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38",                  "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==2.8.0"          },          "beautifulsoup4": { @@ -104,10 +108,10 @@          },          "certifi": {              "hashes": [ -                "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304", -                "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519" +                "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", +                "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"              ], -            "version": "==2020.4.5.1" +            "version": "==2020.6.20"          },          "cffi": {              "hashes": [ @@ -154,7 +158,6 @@                  "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff",                  "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"              ], -            "index": "pypi",              "markers": "sys_platform == 'win32'",              "version": "==0.4.3"          }, @@ -174,26 +177,16 @@              "index": "pypi",              "version": "==4.3.2"          }, -        "discord": { -            "hashes": [ -                "sha256:9d4debb4a37845543bd4b92cb195bc53a302797333e768e70344222857ff1559", -                "sha256:ff6653655e342e7721dfb3f10421345fd852c2a33f2cca912b1c39b3778a9429" -            ], -            "index": "pypi", -            "version": "==1.0.1" -        }, -        "discord.py": { -            "hashes": [ -                "sha256:406871b06d86c3dc49fba63238519f28628dac946fef8a0e22988ff58ec05580", -                "sha256:ad00e34c72d2faa8db2157b651d05f3c415d7d05078e7e41dc9e8dc240051beb" -            ], -            "version": "==1.3.3" +        "discord-py": { +            "git": "https://github.com/Rapptz/discord.py.git", +            "ref": "0bc15fa130b8f01fe2d67446a2184d474b0d0ba7"          },          "docutils": {              "hashes": [                  "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",                  "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",              "version": "==0.16"          },          "fakeredis": { @@ -264,6 +257,7 @@                  "sha256:fa2dc05b87d97acc1c6ae63f3e0f39eae5246565232484b08db6bf2dc1580678",                  "sha256:fe7d6ce9f6a5fbe24f09d95ea93e9c7271abc4e1565da511e1449b107b4d7848"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==1.0.1"          },          "humanfriendly": { @@ -271,20 +265,23 @@                  "sha256:bf52ec91244819c780341a3438d5d7b09f431d3f113a475147ac9b7b167a3d12",                  "sha256:e78960b31198511f45fd455534ae7645a6207d33e512d2e842c766d15d9c8080"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",              "version": "==8.2"          },          "idna": {              "hashes": [ -                "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", -                "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" +                "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", +                "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"              ], -            "version": "==2.9" +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", +            "version": "==2.10"          },          "imagesize": {              "hashes": [                  "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",                  "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==1.2.0"          },          "jinja2": { @@ -292,6 +289,7 @@                  "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0",                  "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",              "version": "==2.11.2"          },          "lxml": { @@ -370,15 +368,16 @@                  "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",                  "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==1.1.1"          },          "more-itertools": {              "hashes": [ -                "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be", -                "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982" +                "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", +                "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"              ],              "index": "pypi", -            "version": "==8.3.0" +            "version": "==8.4.0"          },          "multidict": {              "hashes": [ @@ -400,19 +399,22 @@                  "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255",                  "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"              ], +            "markers": "python_version >= '3.5'",              "version": "==4.7.6"          },          "ordered-set": {              "hashes": [ -                "sha256:a31008c57f9c9776b12eb8841b1f61d1e4d70dfbbe8875ccfa2403c54af3d51b" +                "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95"              ], -            "version": "==4.0.1" +            "markers": "python_version >= '3.5'", +            "version": "==4.0.2"          },          "packaging": {              "hashes": [                  "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",                  "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==20.4"          },          "pamqp": { @@ -461,6 +463,7 @@                  "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",                  "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==2.20"          },          "pygments": { @@ -468,6 +471,7 @@                  "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",                  "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"              ], +            "markers": "python_version >= '3.5'",              "version": "==2.6.1"          },          "pyparsing": { @@ -475,6 +479,7 @@                  "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",                  "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"              ], +            "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==2.4.7"          },          "python-dateutil": { @@ -511,32 +516,34 @@          },          "redis": {              "hashes": [ -                "sha256:2ef11f489003f151777c064c5dbc6653dfb9f3eade159bcadc524619fddc2242", -                "sha256:6d65e84bc58091140081ee9d9c187aab0480097750fac44239307a3bdf0b1251" +                "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", +                "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"              ], -            "version": "==3.5.2" +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", +            "version": "==3.5.3"          },          "requests": {              "hashes": [ -                "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", -                "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" +                "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", +                "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"              ],              "index": "pypi", -            "version": "==2.23.0" +            "version": "==2.24.0"          },          "sentry-sdk": {              "hashes": [ -                "sha256:0e5e947d0f7a969314aa23669a94a9712be5a688ff069ff7b9fc36c66adc160c", -                "sha256:799a8bf76b012e3030a881be00e97bc0b922ce35dde699c6537122b751d80e2c" +                "sha256:da06bc3641e81ec2c942f87a0676cd9180044fa3d1697524a0005345997542e2", +                "sha256:e80d61af85d99a1222c1a3e2a24023618374cd50a99673aa7fa3cf920e7d813b"              ],              "index": "pypi", -            "version": "==0.14.4" +            "version": "==0.16.0"          },          "six": {              "hashes": [                  "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",                  "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==1.15.0"          },          "snowballstemmer": { @@ -548,16 +555,17 @@          },          "sortedcontainers": {              "hashes": [ -                "sha256:974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a", -                "sha256:d9e96492dd51fae31e60837736b38fe42a187b5404c16606ff7ee7cd582d4c60" +                "sha256:4e73a757831fc3ca4de2859c422564239a31d8213d09a2a666e375807034d2ba", +                "sha256:c633ebde8580f241f274c1f8994a665c0e54a17724fecd0cae2f079e09c36d3f"              ], -            "version": "==2.1.0" +            "version": "==2.2.2"          },          "soupsieve": {              "hashes": [                  "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55",                  "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232"              ], +            "markers": "python_version >= '3.5'",              "version": "==2.0.1"          },          "sphinx": { @@ -573,6 +581,7 @@                  "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",                  "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"              ], +            "markers": "python_version >= '3.5'",              "version": "==1.0.2"          },          "sphinxcontrib-devhelp": { @@ -580,6 +589,7 @@                  "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",                  "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"              ], +            "markers": "python_version >= '3.5'",              "version": "==1.0.2"          },          "sphinxcontrib-htmlhelp": { @@ -587,6 +597,7 @@                  "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",                  "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"              ], +            "markers": "python_version >= '3.5'",              "version": "==1.0.3"          },          "sphinxcontrib-jsmath": { @@ -594,6 +605,7 @@                  "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",                  "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"              ], +            "markers": "python_version >= '3.5'",              "version": "==1.0.1"          },          "sphinxcontrib-qthelp": { @@ -601,6 +613,7 @@                  "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",                  "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"              ], +            "markers": "python_version >= '3.5'",              "version": "==1.0.3"          },          "sphinxcontrib-serializinghtml": { @@ -608,6 +621,7 @@                  "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",                  "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"              ], +            "markers": "python_version >= '3.5'",              "version": "==1.1.4"          },          "statsd": { @@ -623,6 +637,7 @@                  "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527",                  "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",              "version": "==1.25.9"          },          "websockets": { @@ -650,6 +665,7 @@                  "sha256:e898a0863421650f0bebac8ba40840fc02258ef4714cb7e1fd76b6a6354bda36",                  "sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b"              ], +            "markers": "python_full_version >= '3.6.1'",              "version": "==8.1"          },          "yarl": { @@ -672,6 +688,7 @@                  "sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080",                  "sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2"              ], +            "markers": "python_version >= '3.5'",              "version": "==1.4.2"          }      }, @@ -688,6 +705,7 @@                  "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",                  "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==19.3.0"          },          "cfgv": { @@ -695,50 +713,55 @@                  "sha256:1ccf53320421aeeb915275a196e23b3b8ae87dea8ac6698b1638001d4a486d53",                  "sha256:c8e8f552ffcc6194f4e18dd4f68d9aef0c0d58ae7e7be8c82bee3c5e9edfa513"              ], +            "markers": "python_full_version >= '3.6.1'",              "version": "==3.1.0"          },          "coverage": {              "hashes": [ -                "sha256:00f1d23f4336efc3b311ed0d807feb45098fc86dee1ca13b3d6768cdab187c8a", -                "sha256:01333e1bd22c59713ba8a79f088b3955946e293114479bbfc2e37d522be03355", -                "sha256:0cb4be7e784dcdc050fc58ef05b71aa8e89b7e6636b99967fadbdba694cf2b65", -                "sha256:0e61d9803d5851849c24f78227939c701ced6704f337cad0a91e0972c51c1ee7", -                "sha256:1601e480b9b99697a570cea7ef749e88123c04b92d84cedaa01e117436b4a0a9", -                "sha256:2742c7515b9eb368718cd091bad1a1b44135cc72468c731302b3d641895b83d1", -                "sha256:2d27a3f742c98e5c6b461ee6ef7287400a1956c11421eb574d843d9ec1f772f0", -                "sha256:402e1744733df483b93abbf209283898e9f0d67470707e3c7516d84f48524f55", -                "sha256:5c542d1e62eece33c306d66fe0a5c4f7f7b3c08fecc46ead86d7916684b36d6c", -                "sha256:5f2294dbf7875b991c381e3d5af2bcc3494d836affa52b809c91697449d0eda6", -                "sha256:6402bd2fdedabbdb63a316308142597534ea8e1895f4e7d8bf7476c5e8751fef", -                "sha256:66460ab1599d3cf894bb6baee8c684788819b71a5dc1e8fa2ecc152e5d752019", -                "sha256:782caea581a6e9ff75eccda79287daefd1d2631cc09d642b6ee2d6da21fc0a4e", -                "sha256:79a3cfd6346ce6c13145731d39db47b7a7b859c0272f02cdb89a3bdcbae233a0", -                "sha256:7a5bdad4edec57b5fb8dae7d3ee58622d626fd3a0be0dfceda162a7035885ecf", -                "sha256:8fa0cbc7ecad630e5b0f4f35b0f6ad419246b02bc750de7ac66db92667996d24", -                "sha256:a027ef0492ede1e03a8054e3c37b8def89a1e3c471482e9f046906ba4f2aafd2", -                "sha256:a3f3654d5734a3ece152636aad89f58afc9213c6520062db3978239db122f03c", -                "sha256:a82b92b04a23d3c8a581fc049228bafde988abacba397d57ce95fe95e0338ab4", -                "sha256:acf3763ed01af8410fc36afea23707d4ea58ba7e86a8ee915dfb9ceff9ef69d0", -                "sha256:adeb4c5b608574a3d647011af36f7586811a2c1197c861aedb548dd2453b41cd", -                "sha256:b83835506dfc185a319031cf853fa4bb1b3974b1f913f5bb1a0f3d98bdcded04", -                "sha256:bb28a7245de68bf29f6fb199545d072d1036a1917dca17a1e75bbb919e14ee8e", -                "sha256:bf9cb9a9fd8891e7efd2d44deb24b86d647394b9705b744ff6f8261e6f29a730", -                "sha256:c317eaf5ff46a34305b202e73404f55f7389ef834b8dbf4da09b9b9b37f76dd2", -                "sha256:dbe8c6ae7534b5b024296464f387d57c13caa942f6d8e6e0346f27e509f0f768", -                "sha256:de807ae933cfb7f0c7d9d981a053772452217df2bf38e7e6267c9cbf9545a796", -                "sha256:dead2ddede4c7ba6cb3a721870f5141c97dc7d85a079edb4bd8d88c3ad5b20c7", -                "sha256:dec5202bfe6f672d4511086e125db035a52b00f1648d6407cc8e526912c0353a", -                "sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489", -                "sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052" -            ], -            "index": "pypi", -            "version": "==5.1" +                "sha256:0fc4e0d91350d6f43ef6a61f64a48e917637e1dcfcba4b4b7d543c628ef82c2d", +                "sha256:10f2a618a6e75adf64329f828a6a5b40244c1c50f5ef4ce4109e904e69c71bd2", +                "sha256:12eaccd86d9a373aea59869bc9cfa0ab6ba8b1477752110cb4c10d165474f703", +                "sha256:1874bdc943654ba46d28f179c1846f5710eda3aeb265ff029e0ac2b52daae404", +                "sha256:1dcebae667b73fd4aa69237e6afb39abc2f27520f2358590c1b13dd90e32abe7", +                "sha256:1e58fca3d9ec1a423f1b7f2aa34af4f733cbfa9020c8fe39ca451b6071237405", +                "sha256:214eb2110217f2636a9329bc766507ab71a3a06a8ea30cdeebb47c24dce5972d", +                "sha256:25fe74b5b2f1b4abb11e103bb7984daca8f8292683957d0738cd692f6a7cc64c", +                "sha256:32ecee61a43be509b91a526819717d5e5650e009a8d5eda8631a59c721d5f3b6", +                "sha256:3740b796015b889e46c260ff18b84683fa2e30f0f75a171fb10d2bf9fb91fc70", +                "sha256:3b2c34690f613525672697910894b60d15800ac7e779fbd0fccf532486c1ba40", +                "sha256:41d88736c42f4a22c494c32cc48a05828236e37c991bd9760f8923415e3169e4", +                "sha256:42fa45a29f1059eda4d3c7b509589cc0343cd6bbf083d6118216830cd1a51613", +                "sha256:4bb385a747e6ae8a65290b3df60d6c8a692a5599dc66c9fa3520e667886f2e10", +                "sha256:509294f3e76d3f26b35083973fbc952e01e1727656d979b11182f273f08aa80b", +                "sha256:5c74c5b6045969b07c9fb36b665c9cac84d6c174a809fc1b21bdc06c7836d9a0", +                "sha256:60a3d36297b65c7f78329b80120f72947140f45b5c7a017ea730f9112b40f2ec", +                "sha256:6f91b4492c5cde83bfe462f5b2b997cdf96a138f7c58b1140f05de5751623cf1", +                "sha256:7403675df5e27745571aba1c957c7da2dacb537c21e14007ec3a417bf31f7f3d", +                "sha256:87bdc8135b8ee739840eee19b184804e5d57f518578ffc797f5afa2c3c297913", +                "sha256:8a3decd12e7934d0254939e2bf434bf04a5890c5bf91a982685021786a08087e", +                "sha256:9702e2cb1c6dec01fb8e1a64c015817c0800a6eca287552c47a5ee0ebddccf62", +                "sha256:a4d511012beb967a39580ba7d2549edf1e6865a33e5fe51e4dce550522b3ac0e", +                "sha256:bbb387811f7a18bdc61a2ea3d102be0c7e239b0db9c83be7bfa50f095db5b92a", +                "sha256:bfcc811883699ed49afc58b1ed9f80428a18eb9166422bce3c31a53dba00fd1d", +                "sha256:c32aa13cc3fe86b0f744dfe35a7f879ee33ac0a560684fef0f3e1580352b818f", +                "sha256:ca63dae130a2e788f2b249200f01d7fa240f24da0596501d387a50e57aa7075e", +                "sha256:d54d7ea74cc00482a2410d63bf10aa34ebe1c49ac50779652106c867f9986d6b", +                "sha256:d67599521dff98ec8c34cd9652cbcfe16ed076a2209625fca9dc7419b6370e5c", +                "sha256:d82db1b9a92cb5c67661ca6616bdca6ff931deceebb98eecbd328812dab52032", +                "sha256:d9ad0a988ae20face62520785ec3595a5e64f35a21762a57d115dae0b8fb894a", +                "sha256:ebf2431b2d457ae5217f3a1179533c456f3272ded16f8ed0b32961a6d90e38ee", +                "sha256:ed9a21502e9223f563e071759f769c3d6a2e1ba5328c31e86830368e8d78bc9c", +                "sha256:f50632ef2d749f541ca8e6c07c9928a37f87505ce3a9f20c8446ad310f1aa87b" +            ], +            "index": "pypi", +            "version": "==5.2"          },          "distlib": {              "hashes": [ -                "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21" +                "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb", +                "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"              ], -            "version": "==0.3.0" +            "version": "==0.3.1"          },          "filelock": {              "hashes": [ @@ -749,19 +772,19 @@          },          "flake8": {              "hashes": [ -                "sha256:c69ac1668e434d37a2d2880b3ca9aafd54b3a10a3ac1ab101d22f29e29cf8634", -                "sha256:ccaa799ef9893cebe69fdfefed76865aeaefbb94cb8545617b2298786a4de9a5" +                "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c", +                "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"              ],              "index": "pypi", -            "version": "==3.8.2" +            "version": "==3.8.3"          },          "flake8-annotations": {              "hashes": [ -                "sha256:9091d920406a7ff10e401e0dd1baa396d1d7d2e3d101a9beecf815f5894ad554", -                "sha256:f59fdceb8c8f380a20aed20e1ba8a57bde05935958166c52be2249f113f7ab75" +                "sha256:babc81a17a5f1a63464195917e20d3e8663fb712b3633d4522dbfc407cff31b3", +                "sha256:fcd833b415726a7a374922c95a5c47a7a4d8ea71cb4a586369c665e7476146e1"              ],              "index": "pypi", -            "version": "==2.1.0" +            "version": "==2.2.0"          },          "flake8-bugbear": {              "hashes": [ @@ -819,10 +842,11 @@          },          "identify": {              "hashes": [ -                "sha256:0f3c3aac62b51b86fea6ff52fe8ff9e06f57f10411502443809064d23e16f1c2", -                "sha256:f9ad3d41f01e98eb066b6e05c5b184fd1e925fadec48eb165b4e01c72a1ef3a7" +                "sha256:c4d07f2b979e3931894170a9e0d4b8281e6905ea6d018c326f7ffefaf20db680", +                "sha256:dac33eff90d57164e289fb20bf4e131baef080947ee9bf45efcd0da8d19064bf"              ], -            "version": "==1.4.16" +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", +            "version": "==1.4.21"          },          "mccabe": {              "hashes": [ @@ -833,31 +857,32 @@          },          "nodeenv": {              "hashes": [ -                "sha256:5b2438f2e42af54ca968dd1b374d14a1194848955187b0e5e4be1f73813a5212" +                "sha256:4b0b77afa3ba9b54f4b6396e60b0c83f59eaeb2d63dc3cc7a70f7f4af96c82bc"              ], -            "version": "==1.3.5" +            "version": "==1.4.0"          },          "pep8-naming": {              "hashes": [ -                "sha256:5d9f1056cb9427ce344e98d1a7f5665710e2f20f748438e308995852cfa24164", -                "sha256:f3b4a5f9dd72b991bf7d8e2a341d2e1aa3a884a769b5aaac4f56825c1763bf3a" +                "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724", +                "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"              ],              "index": "pypi", -            "version": "==0.10.0" +            "version": "==0.11.1"          },          "pre-commit": {              "hashes": [ -                "sha256:5559e09afcac7808933951ffaf4ff9aac524f31efbc3f24d021540b6c579813c", -                "sha256:703e2e34cbe0eedb0d319eff9f7b83e2022bb5a3ab5289a6a8841441076514d0" +                "sha256:1657663fdd63a321a4a739915d7d03baedd555b25054449090f97bb0cb30a915", +                "sha256:e8b1315c585052e729ab7e99dcca5698266bedce9067d21dc909c23e3ceed626"              ],              "index": "pypi", -            "version": "==2.4.0" +            "version": "==2.6.0"          },          "pycodestyle": {              "hashes": [                  "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",                  "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==2.6.0"          },          "pydocstyle": { @@ -865,6 +890,7 @@                  "sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586",                  "sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5"              ], +            "markers": "python_version >= '3.5'",              "version": "==5.0.2"          },          "pyflakes": { @@ -872,6 +898,7 @@                  "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",                  "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==2.2.0"          },          "pyyaml": { @@ -896,6 +923,7 @@                  "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",                  "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"              ], +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",              "version": "==1.15.0"          },          "snowballstemmer": { @@ -922,10 +950,11 @@          },          "virtualenv": {              "hashes": [ -                "sha256:a116629d4e7f4d03433b8afa27f43deba09d48bc48f5ecefa4f015a178efb6cf", -                "sha256:a730548b27366c5e6cbdf6f97406d861cccece2e22275e8e1a757aeff5e00c70" +                "sha256:c11a475400e98450403c0364eb3a2d25d42f71cf1493da64390487b666de4324", +                "sha256:e10cc66f40cbda459720dfe1d334c4dc15add0d80f09108224f171006a97a172"              ], -            "version": "==20.0.21" +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", +            "version": "==20.0.26"          }      }  } @@ -1,6 +1,6 @@  # Python Utility Bot -[](https://discord.gg/2B963hn) +[](https://discord.gg/2B963hn)  [](https://dev.azure.com/python-discord/Python%20Discord/_build/latest?definitionId=1&branchName=master)  [](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master)  [](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master) diff --git a/bot/__main__.py b/bot/__main__.py index 4e0d4a111..5382f5502 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -24,11 +24,13 @@ sentry_sdk.init(      ]  ) +allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES]  bot = Bot(      command_prefix=when_mentioned_or(constants.Bot.prefix),      activity=discord.Game(name="Commands: !help"),      case_insensitive=True,      max_messages=10_000, +    allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles)  )  # Internal/debug @@ -52,6 +54,7 @@ bot.load_extension("bot.cogs.verification")  # Feature cogs  bot.load_extension("bot.cogs.alias")  bot.load_extension("bot.cogs.defcon") +bot.load_extension("bot.cogs.dm_relay")  bot.load_extension("bot.cogs.duck_pond")  bot.load_extension("bot.cogs.eval")  bot.load_extension("bot.cogs.information") @@ -63,6 +66,7 @@ bot.load_extension("bot.cogs.reddit")  bot.load_extension("bot.cogs.reminders")  bot.load_extension("bot.cogs.site")  bot.load_extension("bot.cogs.snekbox") +bot.load_extension("bot.cogs.source")  bot.load_extension("bot.cogs.stats")  bot.load_extension("bot.cogs.sync")  bot.load_extension("bot.cogs.tags") diff --git a/bot/cogs/clean.py b/bot/cogs/clean.py index 368d91c85..f436e531a 100644 --- a/bot/cogs/clean.py +++ b/bot/cogs/clean.py @@ -45,6 +45,7 @@ class Clean(Cog):          bots_only: bool = False,          user: User = None,          regex: Optional[str] = None, +        until_message: Optional[Message] = None,      ) -> None:          """A helper function that does the actual message cleaning."""          def predicate_bots_only(message: Message) -> bool: @@ -129,6 +130,20 @@ class Clean(Cog):                  if not self.cleaning:                      return +                # If we are looking for specific message. +                if until_message: + +                    # we could use ID's here however in case if the message we are looking for gets deleted, +                    # we won't have a way to figure that out thus checking for datetime should be more reliable +                    if message.created_at < until_message.created_at: +                        # means we have found the message until which we were supposed to be deleting. +                        break + +                    # Since we will be using `delete_messages` method of a TextChannel and we need message objects to +                    # use it as well as to send logs we will start appending messages here instead adding them from +                    # purge. +                    messages.append(message) +                  # If the message passes predicate, let's save it.                  if predicate is None or predicate(message):                      message_ids.append(message.id) @@ -138,7 +153,14 @@ class Clean(Cog):          # Now let's delete the actual messages with purge.          self.mod_log.ignore(Event.message_delete, *message_ids)          for channel in channels: -            messages += await channel.purge(limit=amount, check=predicate) +            if until_message: +                for i in range(0, len(messages), 100): +                    # while purge automatically handles the amount of messages +                    # delete_messages only allows for up to 100 messages at once +                    # thus we need to paginate the amount to always be <= 100 +                    await channel.delete_messages(messages[i:i + 100]) +            else: +                messages += await channel.purge(limit=amount, check=predicate)          # Reverse the list to restore chronological order          if messages: @@ -221,6 +243,17 @@ class Clean(Cog):          """Delete all messages that match a certain regex, stop cleaning after traversing `amount` messages."""          await self._clean_messages(amount, ctx, regex=regex, channels=channels) +    @clean_group.command(name="message", aliases=["messages"]) +    @with_role(*MODERATION_ROLES) +    async def clean_message(self, ctx: Context, message: Message) -> None: +        """Delete all messages until certain message, stop cleaning after hitting the `message`.""" +        await self._clean_messages( +            CleanMessages.message_limit, +            ctx, +            channels=[message.channel], +            until_message=message +        ) +      @clean_group.command(name="stop", aliases=["cancel", "abort"])      @with_role(*MODERATION_ROLES)      async def clean_cancel(self, ctx: Context) -> None: diff --git a/bot/cogs/dm_relay.py b/bot/cogs/dm_relay.py new file mode 100644 index 000000000..0d8f340b4 --- /dev/null +++ b/bot/cogs/dm_relay.py @@ -0,0 +1,124 @@ +import logging +from typing import Optional + +import discord +from discord import Color +from discord.ext import commands +from discord.ext.commands import Cog + +from bot import constants +from bot.bot import Bot +from bot.converters import UserMentionOrID +from bot.utils import RedisCache +from bot.utils.checks import in_whitelist_check, with_role_check +from bot.utils.messages import send_attachments +from bot.utils.webhooks import send_webhook + +log = logging.getLogger(__name__) + + +class DMRelay(Cog): +    """Relay direct messages to and from the bot.""" + +    # RedisCache[str, t.Union[discord.User.id, discord.Member.id]] +    dm_cache = RedisCache() + +    def __init__(self, bot: Bot): +        self.bot = bot +        self.webhook_id = constants.Webhooks.dm_log +        self.webhook = None +        self.bot.loop.create_task(self.fetch_webhook()) + +    @commands.command(aliases=("reply",)) +    async def send_dm(self, ctx: commands.Context, member: Optional[UserMentionOrID], *, message: str) -> None: +        """ +        Allows you to send a DM to a user from the bot. + +        If `member` is not provided, it will send to the last user who DM'd the bot. + +        This feature should be used extremely sparingly. Use ModMail if you need to have a serious +        conversation with a user. This is just for responding to extraordinary DMs, having a little +        fun with users, and telling people they are DMing the wrong bot. + +        NOTE: This feature will be removed if it is overused. +        """ +        if not member: +            user_id = await self.dm_cache.get("last_user") +            member = ctx.guild.get_member(user_id) if user_id else None + +        # If we still don't have a Member at this point, give up +        if not member: +            log.debug("This bot has never gotten a DM, or the RedisCache has been cleared.") +            await ctx.message.add_reaction("❌") +            return + +        try: +            await member.send(message) +        except discord.errors.Forbidden: +            log.debug("User has disabled DMs.") +            await ctx.message.add_reaction("❌") +        else: +            await ctx.message.add_reaction("✅") +            self.bot.stats.incr("dm_relay.dm_sent") + +    async def fetch_webhook(self) -> None: +        """Fetches the webhook object, so we can post to it.""" +        await self.bot.wait_until_guild_available() + +        try: +            self.webhook = await self.bot.fetch_webhook(self.webhook_id) +        except discord.HTTPException: +            log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`") + +    @Cog.listener() +    async def on_message(self, message: discord.Message) -> None: +        """Relays the message's content and attachments to the dm_log channel.""" +        # Only relay DMs from humans +        if message.author.bot or message.guild or self.webhook is None: +            return + +        if message.clean_content: +            await send_webhook( +                webhook=self.webhook, +                content=message.clean_content, +                username=f"{message.author.display_name} ({message.author.id})", +                avatar_url=message.author.avatar_url +            ) +            await self.dm_cache.set("last_user", message.author.id) +            self.bot.stats.incr("dm_relay.dm_received") + +        # Handle any attachments +        if message.attachments: +            try: +                await send_attachments(message, self.webhook) +            except (discord.errors.Forbidden, discord.errors.NotFound): +                e = discord.Embed( +                    description=":x: **This message contained an attachment, but it could not be retrieved**", +                    color=Color.red() +                ) +                await send_webhook( +                    webhook=self.webhook, +                    embed=e, +                    username=f"{message.author.display_name} ({message.author.id})", +                    avatar_url=message.author.avatar_url +                ) +            except discord.HTTPException: +                log.exception("Failed to send an attachment to the webhook") + +    def cog_check(self, ctx: commands.Context) -> bool: +        """Only allow moderators to invoke the commands in this cog.""" +        checks = [ +            with_role_check(ctx, *constants.MODERATION_ROLES), +            in_whitelist_check( +                ctx, +                channels=[constants.Channels.dm_log], +                redirect=None, +                fail_silently=True, +            ) +        ] +        return all(checks) + + +def setup(bot: Bot) -> None: +    """Load the DMRelay  cog.""" +    bot.add_cog(DMRelay(bot)) diff --git a/bot/cogs/duck_pond.py b/bot/cogs/duck_pond.py index 37d1786a2..7021069fa 100644 --- a/bot/cogs/duck_pond.py +++ b/bot/cogs/duck_pond.py @@ -1,5 +1,5 @@  import logging -from typing import Optional, Union +from typing import Union  import discord  from discord import Color, Embed, Member, Message, RawReactionActionEvent, User, errors @@ -8,6 +8,7 @@ from discord.ext.commands import Cog  from bot import constants  from bot.bot import Bot  from bot.utils.messages import send_attachments +from bot.utils.webhooks import send_webhook  log = logging.getLogger(__name__) @@ -18,6 +19,7 @@ class DuckPond(Cog):      def __init__(self, bot: Bot):          self.bot = bot          self.webhook_id = constants.Webhooks.duck_pond +        self.webhook = None          self.bot.loop.create_task(self.fetch_webhook())      async def fetch_webhook(self) -> None: @@ -47,24 +49,6 @@ class DuckPond(Cog):                          return True          return False -    async def send_webhook( -        self, -        content: Optional[str] = None, -        username: Optional[str] = None, -        avatar_url: Optional[str] = None, -        embed: Optional[Embed] = None, -    ) -> None: -        """Send a webhook to the duck_pond channel.""" -        try: -            await self.webhook.send( -                content=content, -                username=username, -                avatar_url=avatar_url, -                embed=embed -            ) -        except discord.HTTPException: -            log.exception("Failed to send a message to the Duck Pool webhook") -      async def count_ducks(self, message: Message) -> int:          """          Count the number of ducks in the reactions of a specific message. @@ -94,10 +78,9 @@ class DuckPond(Cog):      async def relay_message(self, message: Message) -> None:          """Relays the message's content and attachments to the duck pond channel.""" -        clean_content = message.clean_content - -        if clean_content: -            await self.send_webhook( +        if message.clean_content: +            await send_webhook( +                webhook=self.webhook,                  content=message.clean_content,                  username=message.author.display_name,                  avatar_url=message.author.avatar_url @@ -111,7 +94,8 @@ class DuckPond(Cog):                      description=":x: **This message contained an attachment, but it could not be retrieved**",                      color=Color.red()                  ) -                await self.send_webhook( +                await send_webhook( +                    webhook=self.webhook,                      embed=e,                      username=message.author.display_name,                      avatar_url=message.author.avatar_url diff --git a/bot/cogs/error_handler.py b/bot/cogs/error_handler.py index 5de961116..233851e41 100644 --- a/bot/cogs/error_handler.py +++ b/bot/cogs/error_handler.py @@ -170,7 +170,7 @@ class ErrorHandler(Cog):              await prepared_help_command              self.bot.stats.incr("errors.too_many_arguments")          elif isinstance(e, errors.BadArgument): -            await ctx.send(f"Bad argument: {e}\n") +            await ctx.send("Bad argument: Please double-check your input arguments and try again.\n")              await prepared_help_command              self.bot.stats.incr("errors.bad_argument")          elif isinstance(e, errors.BadUnionArgument): diff --git a/bot/cogs/filtering.py b/bot/cogs/filtering.py index 1d9fddb12..bd665f424 100644 --- a/bot/cogs/filtering.py +++ b/bot/cogs/filtering.py @@ -1,10 +1,13 @@ +import asyncio  import logging  import re -from typing import Optional, Union +from datetime import datetime, timedelta +from typing import List, Mapping, Optional, Tuple, Union +import dateutil  import discord.errors  from dateutil.relativedelta import relativedelta -from discord import Colour, Member, Message, TextChannel +from discord import Colour, HTTPException, Member, Message, NotFound, TextChannel  from discord.ext.commands import Cog  from discord.utils import escape_markdown @@ -14,6 +17,8 @@ from bot.constants import (      Channels, Colours,      Filter, Icons, URLs  ) +from bot.utils.redis_cache import RedisCache +from bot.utils.scheduling import Scheduler  log = logging.getLogger(__name__) @@ -40,6 +45,8 @@ TOKEN_WATCHLIST_PATTERNS = [  ]  WATCHLIST_PATTERNS = WORD_WATCHLIST_PATTERNS + TOKEN_WATCHLIST_PATTERNS +DAYS_BETWEEN_ALERTS = 3 +  def expand_spoilers(text: str) -> str:      """Return a string containing all interpretations of a spoilered message.""" @@ -49,11 +56,19 @@ def expand_spoilers(text: str) -> str:      ) +OFFENSIVE_MSG_DELETE_TIME = timedelta(days=Filter.offensive_msg_delete_days) + +  class Filtering(Cog):      """Filtering out invites, blacklisting domains, and warning us of certain regular expressions.""" +    # Redis cache mapping a user ID to the last timestamp a bad nickname alert was sent +    name_alerts = RedisCache() +      def __init__(self, bot: Bot):          self.bot = bot +        self.scheduler = Scheduler(self.__class__.__name__) +        self.name_lock = asyncio.Lock()          staff_mistake_str = "If you believe this was a mistake, please let staff know!"          self.filters = { @@ -66,7 +81,8 @@ class Filtering(Cog):                  "notification_msg": (                      "Your post has been removed for abusing Unicode character rendering (aka Zalgo text). "                      f"{staff_mistake_str}" -                ) +                ), +                "schedule_deletion": False              },              "filter_invites": {                  "enabled": Filter.filter_invites, @@ -77,7 +93,8 @@ class Filtering(Cog):                  "notification_msg": (                      f"Per Rule 6, your invite link has been removed. {staff_mistake_str}\n\n"                      r"Our server rules can be found here: <https://pythondiscord.com/pages/rules>" -                ) +                ), +                "schedule_deletion": False              },              "filter_domains": {                  "enabled": Filter.filter_domains, @@ -87,22 +104,27 @@ class Filtering(Cog):                  "user_notification": Filter.notify_user_domains,                  "notification_msg": (                      f"Your URL has been removed because it matched a blacklisted domain. {staff_mistake_str}" -                ) +                ), +                "schedule_deletion": False              },              "watch_regex": {                  "enabled": Filter.watch_regex,                  "function": self._has_watch_regex_match,                  "type": "watchlist",                  "content_only": True, +                "schedule_deletion": True              },              "watch_rich_embeds": {                  "enabled": Filter.watch_rich_embeds,                  "function": self._has_rich_embed,                  "type": "watchlist",                  "content_only": False, -            }, +                "schedule_deletion": False +            }          } +        self.bot.loop.create_task(self.reschedule_offensive_msg_deletion()) +      @property      def mod_log(self) -> ModLog:          """Get currently loaded ModLog cog instance.""" @@ -112,6 +134,7 @@ class Filtering(Cog):      async def on_message(self, msg: Message) -> None:          """Invoke message filter for new messages."""          await self._filter_message(msg) +        await self.check_bad_words_in_name(msg.author)      @Cog.listener()      async def on_message_edit(self, before: Message, after: Message) -> None: @@ -126,24 +149,116 @@ class Filtering(Cog):              delta = relativedelta(after.edited_at, before.edited_at).microseconds          await self._filter_message(after, delta) -    async def _filter_message(self, msg: Message, delta: Optional[int] = None) -> None: -        """Filter the input message to see if it violates any of our rules, and then respond accordingly.""" +    @staticmethod +    def get_name_matches(name: str) -> List[re.Match]: +        """Check bad words from passed string (name). Return list of matches.""" +        matches = [] +        for pattern in WATCHLIST_PATTERNS: +            if match := pattern.search(name): +                matches.append(match) +        return matches + +    async def check_send_alert(self, member: Member) -> bool: +        """When there is less than 3 days after last alert, return `False`, otherwise `True`.""" +        if last_alert := await self.name_alerts.get(member.id): +            last_alert = datetime.utcfromtimestamp(last_alert) +            if datetime.utcnow() - timedelta(days=DAYS_BETWEEN_ALERTS) < last_alert: +                log.trace(f"Last alert was too recent for {member}'s nickname.") +                return False + +        return True + +    async def check_bad_words_in_name(self, member: Member) -> None: +        """Send a mod alert every 3 days if a username still matches a watchlist pattern.""" +        # Use lock to avoid race conditions +        async with self.name_lock: +            # Check whether the users display name contains any words in our blacklist +            matches = self.get_name_matches(member.display_name) + +            if not matches or not await self.check_send_alert(member): +                return + +            log.info(f"Sending bad nickname alert for '{member.display_name}' ({member.id}).") + +            log_string = ( +                f"**User:** {member.mention} (`{member.id}`)\n" +                f"**Display Name:** {member.display_name}\n" +                f"**Bad Matches:** {', '.join(match.group() for match in matches)}" +            ) + +            await self.mod_log.send_log_message( +                icon_url=Icons.token_removed, +                colour=Colours.soft_red, +                title="Username filtering alert", +                text=log_string, +                channel_id=Channels.mod_alerts, +                thumbnail=member.avatar_url +            ) + +            # Update time when alert sent +            await self.name_alerts.set(member.id, datetime.utcnow().timestamp()) + +    async def filter_eval(self, result: str, msg: Message) -> bool: +        """ +        Filter the result of an !eval to see if it violates any of our rules, and then respond accordingly. + +        Also requires the original message, to check whether to filter and for mod logs. +        Returns whether a filter was triggered or not. +        """ +        filter_triggered = False          # Should we filter this message? -        role_whitelisted = False +        if self._check_filter(msg): +            for filter_name, _filter in self.filters.items(): +                # Is this specific filter enabled in the config? +                # We also do not need to worry about filters that take the full message, +                # since all we have is an arbitrary string. +                if _filter["enabled"] and _filter["content_only"]: +                    match = await _filter["function"](result) -        if type(msg.author) is Member:  # Only Member has roles, not User. -            for role in msg.author.roles: -                if role.id in Filter.role_whitelist: -                    role_whitelisted = True +                    if match: +                        # If this is a filter (not a watchlist), we set the variable so we know +                        # that it has been triggered +                        if _filter["type"] == "filter": +                            filter_triggered = True -        filter_message = ( -            msg.channel.id not in Filter.channel_whitelist  # Channel not in whitelist -            and not role_whitelisted                        # Role not in whitelist -            and not msg.author.bot                          # Author not a bot -        ) +                        # We do not have to check against DM channels since !eval cannot be used there. +                        channel_str = f"in {msg.channel.mention}" + +                        message_content, additional_embeds, additional_embeds_msg = self._add_stats( +                            filter_name, match, result +                        ) + +                        message = ( +                            f"The {filter_name} {_filter['type']} was triggered " +                            f"by **{msg.author}** " +                            f"(`{msg.author.id}`) {channel_str} using !eval with " +                            f"[the following message]({msg.jump_url}):\n\n" +                            f"{message_content}" +                        ) + +                        log.debug(message) + +                        # Send pretty mod log embed to mod-alerts +                        await self.mod_log.send_log_message( +                            icon_url=Icons.filtering, +                            colour=Colour(Colours.soft_red), +                            title=f"{_filter['type'].title()} triggered!", +                            text=message, +                            thumbnail=msg.author.avatar_url_as(static_format="png"), +                            channel_id=Channels.mod_alerts, +                            ping_everyone=Filter.ping_everyone, +                            additional_embeds=additional_embeds, +                            additional_embeds_msg=additional_embeds_msg +                        ) -        # If none of the above, we can start filtering. -        if filter_message: +                        break  # We don't want multiple filters to trigger + +        return filter_triggered + +    async def _filter_message(self, msg: Message, delta: Optional[int] = None) -> None: +        """Filter the input message to see if it violates any of our rules, and then respond accordingly.""" +        # Should we filter this message? +        if self._check_filter(msg):              for filter_name, _filter in self.filters.items():                  # Is this specific filter enabled in the config?                  if _filter["enabled"]: @@ -183,21 +298,28 @@ class Filtering(Cog):                              if _filter["user_notification"]:                                  await self.notify_member(msg.author, _filter["notification_msg"], msg.channel) +                        # If the message is classed as offensive, we store it in the site db and +                        # it will be deleted it after one week. +                        if _filter["schedule_deletion"] and not is_private: +                            delete_date = (msg.created_at + OFFENSIVE_MSG_DELETE_TIME).isoformat() +                            data = { +                                'id': msg.id, +                                'channel_id': msg.channel.id, +                                'delete_date': delete_date +                            } + +                            await self.bot.api_client.post('bot/offensive-messages', json=data) +                            self.schedule_msg_delete(data) +                            log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}") +                          if is_private:                              channel_str = "via DM"                          else:                              channel_str = f"in {msg.channel.mention}" -                        # Word and match stats for watch_regex -                        if filter_name == "watch_regex": -                            surroundings = match.string[max(match.start() - 10, 0): match.end() + 10] -                            message_content = ( -                                f"**Match:** '{match[0]}'\n" -                                f"**Location:** '...{escape_markdown(surroundings)}...'\n" -                                f"\n**Original Message:**\n{escape_markdown(msg.content)}" -                            ) -                        else:  # Use content of discord Message -                            message_content = msg.content +                        message_content, additional_embeds, additional_embeds_msg = self._add_stats( +                            filter_name, match, msg.content +                        )                          message = (                              f"The {filter_name} {_filter['type']} was triggered " @@ -209,30 +331,6 @@ class Filtering(Cog):                          log.debug(message) -                        self.bot.stats.incr(f"filters.{filter_name}") - -                        additional_embeds = None -                        additional_embeds_msg = None - -                        # The function returns True for invalid invites. -                        # They have no data so additional embeds can't be created for them. -                        if filter_name == "filter_invites" and match is not True: -                            additional_embeds = [] -                            for invite, data in match.items(): -                                embed = discord.Embed(description=( -                                    f"**Members:**\n{data['members']}\n" -                                    f"**Active:**\n{data['active']}" -                                )) -                                embed.set_author(name=data["name"]) -                                embed.set_thumbnail(url=data["icon"]) -                                embed.set_footer(text=f"Guild Invite Code: {invite}") -                                additional_embeds.append(embed) -                            additional_embeds_msg = "For the following guild(s):" - -                        elif filter_name == "watch_rich_embeds": -                            additional_embeds = msg.embeds -                            additional_embeds_msg = "With the following embed(s):" -                          # Send pretty mod log embed to mod-alerts                          await self.mod_log.send_log_message(                              icon_url=Icons.filtering, @@ -248,6 +346,63 @@ class Filtering(Cog):                          break  # We don't want multiple filters to trigger +    def _add_stats(self, name: str, match: Union[re.Match, dict, bool, List[discord.Embed]], content: str) -> Tuple[ +        str, Optional[List[discord.Embed]], Optional[str] +    ]: +        """Adds relevant statistical information to the relevant filter and increments the bot's stats.""" +        # Word and match stats for watch_regex +        if name == "watch_regex": +            surroundings = match.string[max(match.start() - 10, 0): match.end() + 10] +            message_content = ( +                f"**Match:** '{match[0]}'\n" +                f"**Location:** '...{escape_markdown(surroundings)}...'\n" +                f"\n**Original Message:**\n{escape_markdown(content)}" +            ) +        else:  # Use original content +            message_content = content + +        additional_embeds = None +        additional_embeds_msg = None + +        self.bot.stats.incr(f"filters.{name}") + +        # The function returns True for invalid invites. +        # They have no data so additional embeds can't be created for them. +        if name == "filter_invites" and match is not True: +            additional_embeds = [] +            for invite, data in match.items(): +                embed = discord.Embed(description=( +                    f"**Members:**\n{data['members']}\n" +                    f"**Active:**\n{data['active']}" +                )) +                embed.set_author(name=data["name"]) +                embed.set_thumbnail(url=data["icon"]) +                embed.set_footer(text=f"Guild Invite Code: {invite}") +                additional_embeds.append(embed) +            additional_embeds_msg = "For the following guild(s):" + +        elif name == "watch_rich_embeds": +            additional_embeds = match +            additional_embeds_msg = "With the following embed(s):" + +        return message_content, additional_embeds, additional_embeds_msg + +    @staticmethod +    def _check_filter(msg: Message) -> bool: +        """Check whitelists to see if we should filter this message.""" +        role_whitelisted = False + +        if type(msg.author) is Member:  # Only Member has roles, not User. +            for role in msg.author.roles: +                if role.id in Filter.role_whitelist: +                    role_whitelisted = True + +        return ( +            msg.channel.id not in Filter.channel_whitelist  # Channel not in whitelist +            and not role_whitelisted                        # Role not in whitelist +            and not msg.author.bot                          # Author not a bot +        ) +      @staticmethod      async def _has_watch_regex_match(text: str) -> Union[bool, re.Match]:          """ @@ -300,7 +455,7 @@ class Filtering(Cog):          Attempts to catch some of common ways to try to cheat the system.          """ -        # Remove backslashes to prevent escape character aroundfuckery like +        # Remove backslashes to prevent escape character around fuckery like          # discord\.gg/gdudes-pony-farm          text = text.replace("\\", "") @@ -340,7 +495,7 @@ class Filtering(Cog):          return invite_data if invite_data else False      @staticmethod -    async def _has_rich_embed(msg: Message) -> bool: +    async def _has_rich_embed(msg: Message) -> Union[bool, List[discord.Embed]]:          """Determines if `msg` contains any rich embeds not auto-generated from a URL."""          if msg.embeds:              for embed in msg.embeds: @@ -349,7 +504,7 @@ class Filtering(Cog):                      if not embed.url or embed.url not in urls:                          # If `embed.url` does not exist or if `embed.url` is not part of the content                          # of the message, it's unlikely to be an auto-generated embed by Discord. -                        return True +                        return msg.embeds                      else:                          log.trace(                              "Found a rich embed sent by a regular user account, " @@ -369,6 +524,44 @@ class Filtering(Cog):          except discord.errors.Forbidden:              await channel.send(f"{filtered_member.mention} {reason}") +    def schedule_msg_delete(self, msg: dict) -> None: +        """Delete an offensive message once its deletion date is reached.""" +        delete_at = dateutil.parser.isoparse(msg['delete_date']).replace(tzinfo=None) +        self.scheduler.schedule_at(delete_at, msg['id'], self.delete_offensive_msg(msg)) + +    async def reschedule_offensive_msg_deletion(self) -> None: +        """Get all the pending message deletion from the API and reschedule them.""" +        await self.bot.wait_until_ready() +        response = await self.bot.api_client.get('bot/offensive-messages',) + +        now = datetime.utcnow() + +        for msg in response: +            delete_at = dateutil.parser.isoparse(msg['delete_date']).replace(tzinfo=None) + +            if delete_at < now: +                await self.delete_offensive_msg(msg) +            else: +                self.schedule_msg_delete(msg) + +    async def delete_offensive_msg(self, msg: Mapping[str, str]) -> None: +        """Delete an offensive message, and then delete it from the db.""" +        try: +            channel = self.bot.get_channel(msg['channel_id']) +            if channel: +                msg_obj = await channel.fetch_message(msg['id']) +                await msg_obj.delete() +        except NotFound: +            log.info( +                f"Tried to delete message {msg['id']}, but the message can't be found " +                f"(it has been probably already deleted)." +            ) +        except HTTPException as e: +            log.warning(f"Failed to delete message {msg['id']}: status {e.status}") + +        await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}') +        log.info(f"Deleted the offensive message with id {msg['id']}.") +  def setup(bot: Bot) -> None:      """Load the Filtering cog.""" diff --git a/bot/cogs/help.py b/bot/cogs/help.py index 542f19139..3d1d6fd10 100644 --- a/bot/cogs/help.py +++ b/bot/cogs/help.py @@ -8,6 +8,7 @@ from typing import List, Union  from discord import Colour, Embed, Member, Message, NotFound, Reaction, User  from discord.ext.commands import Bot, Cog, Command, Context, Group, HelpCommand  from fuzzywuzzy import fuzz, process +from fuzzywuzzy.utils import full_process  from bot import constants  from bot.constants import Channels, Emojis, STAFF_ROLES @@ -36,13 +37,12 @@ async def help_cleanup(bot: Bot, author: Member, message: Message) -> None:      await message.add_reaction(DELETE_EMOJI) -    try: -        await bot.wait_for("reaction_add", check=check, timeout=300) -        await message.delete() -    except TimeoutError: -        await message.remove_reaction(DELETE_EMOJI, bot.user) -    except NotFound: -        pass +    with suppress(NotFound): +        try: +            await bot.wait_for("reaction_add", check=check, timeout=300) +            await message.delete() +        except TimeoutError: +            await message.remove_reaction(DELETE_EMOJI, bot.user)  class HelpQueryNotFound(ValueError): @@ -146,7 +146,13 @@ class CustomHelpCommand(HelpCommand):          Will return an instance of the `HelpQueryNotFound` exception with the error message and possible matches.          """          choices = await self.get_all_help_choices() -        result = process.extractBests(string, choices, scorer=fuzz.ratio, score_cutoff=60) + +        # Run fuzzywuzzy's processor beforehand, and avoid matching if processed string is empty +        # This avoids fuzzywuzzy from raising a warning on inputs with only non-alphanumeric characters +        if (processed := full_process(string)): +            result = process.extractBests(processed, choices, scorer=fuzz.ratio, score_cutoff=60, processor=None) +        else: +            result = []          return HelpQueryNotFound(f'Query "{string}" not found.', dict(result)) @@ -299,7 +305,7 @@ class CustomHelpCommand(HelpCommand):              embed,              prefix=description,              max_lines=COMMANDS_PER_PAGE, -            max_size=2040, +            max_size=2000,          )      async def send_bot_help(self, mapping: dict) -> None: @@ -346,7 +352,7 @@ class CustomHelpCommand(HelpCommand):              # add any remaining command help that didn't get added in the last iteration above.              pages.append(page) -        await LinePaginator.paginate(pages, self.context, embed=embed, max_lines=1, max_size=2040) +        await LinePaginator.paginate(pages, self.context, embed=embed, max_lines=1, max_size=2000)  class Help(Cog): diff --git a/bot/cogs/help_channels.py b/bot/cogs/help_channels.py index 70cef339a..0c8cbb417 100644 --- a/bot/cogs/help_channels.py +++ b/bot/cogs/help_channels.py @@ -1,12 +1,10 @@  import asyncio -import inspect  import json  import logging  import random  import typing as t  from collections import deque -from contextlib import suppress -from datetime import datetime +from datetime import datetime, timedelta, timezone  from pathlib import Path  import discord @@ -15,6 +13,7 @@ from discord.ext import commands  from bot import constants  from bot.bot import Bot +from bot.utils import RedisCache  from bot.utils.checks import with_role_check  from bot.utils.scheduling import Scheduler @@ -22,7 +21,7 @@ log = logging.getLogger(__name__)  ASKING_GUIDE_URL = "https://pythondiscord.com/pages/asking-good-questions/"  MAX_CHANNELS_PER_CATEGORY = 50 -EXCLUDED_CHANNELS = (constants.Channels.how_to_get_help,) +EXCLUDED_CHANNELS = (constants.Channels.how_to_get_help, constants.Channels.cooldown)  HELP_CHANNEL_TOPIC = """  This is a Python help channel. You can claim your own help channel in the Python Help: Available category. @@ -35,9 +34,6 @@ and will be yours until it has been inactive for {constants.HelpChannels.idle_mi  is closed manually with `!close`. When that happens, it will be set to **dormant** and moved into \  the **Help: Dormant** category. -You may claim a new channel once every {constants.HelpChannels.claim_minutes} minutes. If you \ -currently cannot send a message in this channel, it means you are on cooldown and need to wait. -  Try to write the best question you can by providing a detailed description and telling us what \  you've tried already. For more information on asking a good question, \  check out our guide on [asking good questions]({ASKING_GUIDE_URL}). @@ -57,14 +53,7 @@ through our guide for [asking a good question]({ASKING_GUIDE_URL}).  CoroutineFunc = t.Callable[..., t.Coroutine] -class TaskData(t.NamedTuple): -    """Data for a scheduled task.""" - -    wait_time: int -    callback: t.Awaitable - - -class HelpChannels(Scheduler, commands.Cog): +class HelpChannels(commands.Cog):      """      Manage the help channel system of the guild. @@ -99,13 +88,23 @@ class HelpChannels(Scheduler, commands.Cog):      Help channels are named after the chemical elements in `bot/resources/elements.json`.      """ -    def __init__(self, bot: Bot): -        super().__init__() +    # This cache tracks which channels are claimed by which members. +    # RedisCache[discord.TextChannel.id, t.Union[discord.User.id, discord.Member.id]] +    help_channel_claimants = RedisCache() + +    # This cache maps a help channel to whether it has had any +    # activity other than the original claimant. True being no other +    # activity and False being other activity. +    # RedisCache[discord.TextChannel.id, bool] +    unanswered = RedisCache() +    # This dictionary maps a help channel to the time it was claimed +    # RedisCache[discord.TextChannel.id, UtcPosixTimestamp] +    claim_times = RedisCache() + +    def __init__(self, bot: Bot):          self.bot = bot -        self.help_channel_claimants: ( -            t.Dict[discord.TextChannel, t.Union[discord.Member, discord.User]] -        ) = {} +        self.scheduler = Scheduler(self.__class__.__name__)          # Categories          self.available_category: discord.CategoryChannel = None @@ -125,16 +124,6 @@ class HelpChannels(Scheduler, commands.Cog):          self.on_message_lock = asyncio.Lock()          self.init_task = self.bot.loop.create_task(self.init_cog()) -        # Stats - -        # This dictionary maps a help channel to the time it was claimed -        self.claim_times: t.Dict[int, datetime] = {} - -        # This dictionary maps a help channel to whether it has had any -        # activity other than the original claimant. True being no other -        # activity and False being other activity. -        self.unanswered: t.Dict[int, bool] = {} -      def cog_unload(self) -> None:          """Cancel the init task and scheduled tasks when the cog unloads."""          log.trace("Cog unload: cancelling the init_cog task") @@ -144,7 +133,7 @@ class HelpChannels(Scheduler, commands.Cog):          for task in self.queue_tasks:              task.cancel() -        self.cancel_all() +        self.scheduler.cancel_all()      def create_channel_queue(self) -> asyncio.Queue:          """ @@ -197,7 +186,7 @@ class HelpChannels(Scheduler, commands.Cog):      async def dormant_check(self, ctx: commands.Context) -> bool:          """Return True if the user is the help channel claimant or passes the role check.""" -        if self.help_channel_claimants.get(ctx.channel) == ctx.author: +        if await self.help_channel_claimants.get(ctx.channel.id) == ctx.author.id:              log.trace(f"{ctx.author} is the help channel claimant, passing the check for dormant.")              self.bot.stats.incr("help.dormant_invoke.claimant")              return True @@ -222,15 +211,17 @@ class HelpChannels(Scheduler, commands.Cog):          log.trace("close command invoked; checking if the channel is in-use.")          if ctx.channel.category == self.in_use_category:              if await self.dormant_check(ctx): -                with suppress(KeyError): -                    del self.help_channel_claimants[ctx.channel] +                # Remove the claimant and the cooldown role +                await self.help_channel_claimants.delete(ctx.channel.id)                  await self.remove_cooldown_role(ctx.author) +                  # Ignore missing task when cooldown has passed but the channel still isn't dormant. -                self.cancel_task(ctx.author.id, ignore_missing=True) +                if ctx.author.id in self.scheduler: +                    self.scheduler.cancel(ctx.author.id)                  await self.move_to_dormant(ctx.channel, "command") -                self.cancel_task(ctx.channel.id) +                self.scheduler.cancel(ctx.channel.id)          else:              log.debug(f"{ctx.author} invoked command 'dormant' outside an in-use help channel") @@ -284,6 +275,15 @@ class HelpChannels(Scheduler, commands.Cog):              if channel.category_id == category.id and not self.is_excluded_channel(channel):                  yield channel +    async def get_in_use_time(self, channel_id: int) -> t.Optional[timedelta]: +        """Return the duration `channel_id` has been in use. Return None if it's not in use.""" +        log.trace(f"Calculating in use time for channel {channel_id}.") + +        claimed_timestamp = await self.claim_times.get(channel_id) +        if claimed_timestamp: +            claimed = datetime.utcfromtimestamp(claimed_timestamp) +            return datetime.utcnow() - claimed +      @staticmethod      def get_names() -> t.List[str]:          """ @@ -386,7 +386,7 @@ class HelpChannels(Scheduler, commands.Cog):          log.trace("Initialising the cog.")          await self.init_categories() -        await self.reset_send_permissions() +        await self.check_cooldowns()          self.channel_queue = self.create_channel_queue()          self.name_queue = self.create_name_queue() @@ -463,16 +463,15 @@ class HelpChannels(Scheduler, commands.Cog):          else:              # Cancel the existing task, if any.              if has_task: -                self.cancel_task(channel.id) - -            data = TaskData(idle_seconds - time_elapsed, self.move_idle_channel(channel)) +                self.scheduler.cancel(channel.id) +            delay = idle_seconds - time_elapsed              log.info(                  f"#{channel} ({channel.id}) is still active; " -                f"scheduling it to be moved after {data.wait_time} seconds." +                f"scheduling it to be moved after {delay} seconds."              ) -            self.schedule_task(channel.id, data) +            self.scheduler.schedule_later(delay, channel.id, self.move_idle_channel(channel))      async def move_to_bottom_position(self, channel: discord.TextChannel, category_id: int, **options) -> None:          """ @@ -546,19 +545,17 @@ class HelpChannels(Scheduler, commands.Cog):          self.bot.stats.incr(f"help.dormant_calls.{caller}") -        if channel.id in self.claim_times: -            claimed = self.claim_times[channel.id] -            in_use_time = datetime.now() - claimed +        in_use_time = await self.get_in_use_time(channel.id) +        if in_use_time:              self.bot.stats.timing("help.in_use_time", in_use_time) -        if channel.id in self.unanswered: -            if self.unanswered[channel.id]: -                self.bot.stats.incr("help.sessions.unanswered") -            else: -                self.bot.stats.incr("help.sessions.answered") +        unanswered = await self.unanswered.get(channel.id) +        if unanswered: +            self.bot.stats.incr("help.sessions.unanswered") +        elif unanswered is not None: +            self.bot.stats.incr("help.sessions.answered")          log.trace(f"Position of #{channel} ({channel.id}) is actually {channel.position}.") -          log.trace(f"Sending dormant message for #{channel} ({channel.id}).")          embed = discord.Embed(description=DORMANT_MSG)          await channel.send(embed=embed) @@ -579,8 +576,7 @@ class HelpChannels(Scheduler, commands.Cog):          timeout = constants.HelpChannels.idle_minutes * 60          log.trace(f"Scheduling #{channel} ({channel.id}) to become dormant in {timeout} sec.") -        data = TaskData(timeout, self.move_idle_channel(channel)) -        self.schedule_task(channel.id, data) +        self.scheduler.schedule_later(timeout, channel.id, self.move_idle_channel(channel))          self.report_stats()      async def notify(self) -> None: @@ -615,11 +611,13 @@ class HelpChannels(Scheduler, commands.Cog):              channel = self.bot.get_channel(constants.HelpChannels.notify_channel)              mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_roles) +            allowed_roles = [discord.Object(id_) for id_ in constants.HelpChannels.notify_roles]              message = await channel.send(                  f"{mentions} A new available help channel is needed but there "                  f"are no more dormant ones. Consider freeing up some in-use channels manually by " -                f"using the `{constants.Bot.prefix}dormant` command within the channels." +                f"using the `{constants.Bot.prefix}dormant` command within the channels.", +                allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles)              )              self.bot.stats.incr("help.out_of_channel_alerts") @@ -637,17 +635,17 @@ class HelpChannels(Scheduler, commands.Cog):          if self.is_in_category(channel, constants.Categories.help_in_use):              log.trace(f"Checking if #{channel} ({channel.id}) has been answered.") -            # Check if there is an entry in unanswered (does not persist across restarts) -            if channel.id in self.unanswered: -                claimant = self.help_channel_claimants.get(channel) -                if not claimant: -                    # The mapping for this channel was lost, we can't do anything. +            # Check if there is an entry in unanswered +            if await self.unanswered.contains(channel.id): +                claimant_id = await self.help_channel_claimants.get(channel.id) +                if not claimant_id: +                    # The mapping for this channel doesn't exist, we can't do anything.                      return                  # Check the message did not come from the claimant -                if claimant.id != message.author.id: +                if claimant_id != message.author.id:                      # Mark the channel as answered -                    self.unanswered[channel.id] = False +                    await self.unanswered.set(channel.id, False)      @commands.Cog.listener()      async def on_message(self, message: discord.Message) -> None: @@ -680,12 +678,15 @@ class HelpChannels(Scheduler, commands.Cog):              await self.move_to_in_use(channel)              await self.revoke_send_permissions(message.author)              # Add user with channel for dormant check. -            self.help_channel_claimants[channel] = message.author +            await self.help_channel_claimants.set(channel.id, message.author.id)              self.bot.stats.incr("help.claimed") -            self.claim_times[channel.id] = datetime.now() -            self.unanswered[channel.id] = True +            # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. +            timestamp = datetime.now(timezone.utc).timestamp() +            await self.claim_times.set(channel.id, timestamp) + +            await self.unanswered.set(channel.id, True)              log.trace(f"Releasing on_message lock for {message.id}.") @@ -710,25 +711,38 @@ class HelpChannels(Scheduler, commands.Cog):          log.info(f"Claimant of #{msg.channel} ({msg.author}) deleted message, channel is empty now. Rescheduling task.")          # Cancel existing dormant task before scheduling new. -        self.cancel_task(msg.channel.id) +        self.scheduler.cancel(msg.channel.id) -        task = TaskData(constants.HelpChannels.deleted_idle_minutes * 60, self.move_idle_channel(msg.channel)) -        self.schedule_task(msg.channel.id, task) +        delay = constants.HelpChannels.deleted_idle_minutes * 60 +        self.scheduler.schedule_later(delay, msg.channel.id, self.move_idle_channel(msg.channel))      async def is_empty(self, channel: discord.TextChannel) -> bool:          """Return True if the most recent message in `channel` is the bot's `AVAILABLE_MSG`."""          msg = await self.get_last_message(channel)          return self.match_bot_embed(msg, AVAILABLE_MSG) -    async def reset_send_permissions(self) -> None: -        """Reset send permissions in the Available category for claimants.""" -        log.trace("Resetting send permissions in the Available category.") +    async def check_cooldowns(self) -> None: +        """Remove expired cooldowns and re-schedule active ones.""" +        log.trace("Checking all cooldowns to remove or re-schedule them.")          guild = self.bot.get_guild(constants.Guild.id) +        cooldown = constants.HelpChannels.claim_minutes * 60 -        # TODO: replace with a persistent cache cause checking every member is quite slow -        for member in guild.members: -            if self.is_claimant(member): +        for channel_id, member_id in await self.help_channel_claimants.items(): +            member = guild.get_member(member_id) +            if not member: +                continue  # Member probably left the guild. + +            in_use_time = await self.get_in_use_time(channel_id) + +            if not in_use_time or in_use_time.seconds > cooldown: +                # Remove the role if no claim time could be retrieved or if the cooldown expired. +                # Since the channel is in the claimants cache, it is definitely strange for a time +                # to not exist. However, it isn't a reason to keep the user stuck with a cooldown.                  await self.remove_cooldown_role(member) +            else: +                # The member is still on a cooldown; re-schedule it for the remaining time. +                delay = cooldown - in_use_time.seconds +                self.scheduler.schedule_later(delay, member.id, self.remove_cooldown_role(member))      async def add_cooldown_role(self, member: discord.Member) -> None:          """Add the help cooldown role to `member`.""" @@ -779,13 +793,11 @@ class HelpChannels(Scheduler, commands.Cog):          # Cancel the existing task, if any.          # Would mean the user somehow bypassed the lack of permissions (e.g. user is guild owner). -        self.cancel_task(member.id, ignore_missing=True) +        if member.id in self.scheduler: +            self.scheduler.cancel(member.id) -        timeout = constants.HelpChannels.claim_minutes * 60 -        callback = self.remove_cooldown_role(member) - -        log.trace(f"Scheduling {member}'s ({member.id}) send message permissions to be reinstated.") -        self.schedule_task(member.id, TaskData(timeout, callback)) +        delay = constants.HelpChannels.claim_minutes * 60 +        self.scheduler.schedule_later(delay, member.id, self.remove_cooldown_role(member))      async def send_available_message(self, channel: discord.TextChannel) -> None:          """Send the available message by editing a dormant message or sending a new message.""" @@ -827,21 +839,6 @@ class HelpChannels(Scheduler, commands.Cog):          return channel -    async def _scheduled_task(self, data: TaskData) -> None: -        """Await the `data.callback` coroutine after waiting for `data.wait_time` seconds.""" -        try: -            log.trace(f"Waiting {data.wait_time} seconds before awaiting callback.") -            await asyncio.sleep(data.wait_time) - -            # Use asyncio.shield to prevent callback from cancelling itself. -            # The parent task (_scheduled_task) will still get cancelled. -            log.trace("Done waiting; now awaiting the callback.") -            await asyncio.shield(data.callback) -        finally: -            if inspect.iscoroutine(data.callback): -                log.trace("Explicitly closing coroutine.") -                data.callback.close() -  def validate_config() -> None:      """Raise a ValueError if the cog's config is invalid.""" diff --git a/bot/cogs/information.py b/bot/cogs/information.py index f0bd1afdb..d6090d481 100644 --- a/bot/cogs/information.py +++ b/bot/cogs/information.py @@ -226,7 +226,7 @@ class Information(Cog):          if user.nick:              name = f"{user.nick} ({name})" -        joined = time_since(user.joined_at, precision="days") +        joined = time_since(user.joined_at, max_units=3)          roles = ", ".join(role.mention for role in user.roles[1:])          description = [ diff --git a/bot/cogs/moderation/__init__.py b/bot/cogs/moderation/__init__.py index 6880ca1bd..995187ef0 100644 --- a/bot/cogs/moderation/__init__.py +++ b/bot/cogs/moderation/__init__.py @@ -1,15 +1,19 @@  from bot.bot import Bot +from .incidents import Incidents  from .infractions import Infractions  from .management import ModManagement  from .modlog import ModLog  from .silence import Silence +from .slowmode import Slowmode  from .superstarify import Superstarify  def setup(bot: Bot) -> None: -    """Load the Infractions, ModManagement, ModLog, Silence, and Superstarify cogs.""" +    """Load the Incidents, Infractions, ModManagement, ModLog, Silence, Slowmode and Superstarify cogs.""" +    bot.add_cog(Incidents(bot))      bot.add_cog(Infractions(bot))      bot.add_cog(ModLog(bot))      bot.add_cog(ModManagement(bot))      bot.add_cog(Silence(bot)) +    bot.add_cog(Slowmode(bot))      bot.add_cog(Superstarify(bot)) diff --git a/bot/cogs/moderation/incidents.py b/bot/cogs/moderation/incidents.py new file mode 100644 index 000000000..3605ab1d2 --- /dev/null +++ b/bot/cogs/moderation/incidents.py @@ -0,0 +1,407 @@ +import asyncio +import logging +import typing as t +from datetime import datetime +from enum import Enum + +import discord +from discord.ext.commands import Cog + +from bot.bot import Bot +from bot.constants import Channels, Colours, Emojis, Guild, Webhooks +from bot.utils.messages import sub_clyde + +log = logging.getLogger(__name__) + +# Amount of messages for `crawl_task` to process at most on start-up - limited to 50 +# as in practice, there should never be this many messages, and if there are, +# something has likely gone very wrong +CRAWL_LIMIT = 50 + +# Seconds for `crawl_task` to sleep after adding reactions to a message +CRAWL_SLEEP = 2 + + +class Signal(Enum): +    """ +    Recognized incident status signals. + +    This binds emoji to actions. The bot will only react to emoji linked here. +    All other signals are seen as invalid. +    """ + +    ACTIONED = Emojis.incident_actioned +    NOT_ACTIONED = Emojis.incident_unactioned +    INVESTIGATING = Emojis.incident_investigating + + +# Reactions from non-mod roles will be removed +ALLOWED_ROLES: t.Set[int] = set(Guild.moderation_roles) + +# Message must have all of these emoji to pass the `has_signals` check +ALL_SIGNALS: t.Set[str] = {signal.value for signal in Signal} + +# An embed coupled with an optional file to be dispatched +# If the file is not None, the embed attempts to show it in its body +FileEmbed = t.Tuple[discord.Embed, t.Optional[discord.File]] + + +async def download_file(attachment: discord.Attachment) -> t.Optional[discord.File]: +    """ +    Download & return `attachment` file. + +    If the download fails, the reason is logged and None will be returned. +    404 and 403 errors are only logged at debug level. +    """ +    log.debug(f"Attempting to download attachment: {attachment.filename}") +    try: +        return await attachment.to_file() +    except (discord.NotFound, discord.Forbidden) as exc: +        log.debug(f"Failed to download attachment: {exc}") +    except Exception: +        log.exception("Failed to download attachment") + + +async def make_embed(incident: discord.Message, outcome: Signal, actioned_by: discord.Member) -> FileEmbed: +    """ +    Create an embed representation of `incident` for the #incidents-archive channel. + +    The name & discriminator of `actioned_by` and `outcome` will be presented in the +    embed footer. Additionally, the embed is coloured based on `outcome`. + +    The author of `incident` is not shown in the embed. It is assumed that this piece +    of information will be relayed in other ways, e.g. webhook username. + +    As mentions in embeds do not ping, we do not need to use `incident.clean_content`. + +    If `incident` contains attachments, the first attachment will be downloaded and +    returned alongside the embed. The embed attempts to display the attachment. +    Should the download fail, we fallback on linking the `proxy_url`, which should +    remain functional for some time after the original message is deleted. +    """ +    log.trace(f"Creating embed for {incident.id=}") + +    if outcome is Signal.ACTIONED: +        colour = Colours.soft_green +        footer = f"Actioned by {actioned_by}" +    else: +        colour = Colours.soft_red +        footer = f"Rejected by {actioned_by}" + +    embed = discord.Embed( +        description=incident.content, +        timestamp=datetime.utcnow(), +        colour=colour, +    ) +    embed.set_footer(text=footer, icon_url=actioned_by.avatar_url) + +    if incident.attachments: +        attachment = incident.attachments[0]  # User-sent messages can only contain one attachment +        file = await download_file(attachment) + +        if file is not None: +            embed.set_image(url=f"attachment://{attachment.filename}")  # Embed displays the attached file +        else: +            embed.set_author(name="[Failed to relay attachment]", url=attachment.proxy_url)  # Embed links the file +    else: +        file = None + +    return embed, file + + +def is_incident(message: discord.Message) -> bool: +    """True if `message` qualifies as an incident, False otherwise.""" +    conditions = ( +        message.channel.id == Channels.incidents,  # Message sent in #incidents +        not message.author.bot,                    # Not by a bot +        not message.content.startswith("#"),       # Doesn't start with a hash +        not message.pinned,                        # And isn't header +    ) +    return all(conditions) + + +def own_reactions(message: discord.Message) -> t.Set[str]: +    """Get the set of reactions placed on `message` by the bot itself.""" +    return {str(reaction.emoji) for reaction in message.reactions if reaction.me} + + +def has_signals(message: discord.Message) -> bool: +    """True if `message` already has all `Signal` reactions, False otherwise.""" +    return ALL_SIGNALS.issubset(own_reactions(message)) + + +async def add_signals(incident: discord.Message) -> None: +    """ +    Add `Signal` member emoji to `incident` as reactions. + +    If the emoji has already been placed on `incident` by the bot, it will be skipped. +    """ +    existing_reacts = own_reactions(incident) + +    for signal_emoji in Signal: +        if signal_emoji.value in existing_reacts:  # This would not raise, but it is a superfluous API call +            log.trace(f"Skipping emoji as it's already been placed: {signal_emoji}") +        else: +            log.trace(f"Adding reaction: {signal_emoji}") +            await incident.add_reaction(signal_emoji.value) + + +class Incidents(Cog): +    """ +    Automation for the #incidents channel. + +    This cog does not provide a command API, it only reacts to the following events. + +    On start-up: +        * Crawl #incidents and add missing `Signal` emoji where appropriate +        * This is to retro-actively add the available options for messages which +          were sent while the bot wasn't listening +        * Pinned messages and message starting with # do not qualify as incidents +        * See: `crawl_incidents` + +    On message: +        * Add `Signal` member emoji if message qualifies as an incident +        * Ignore messages starting with # +            * Use this if verbal communication is necessary +            * Each such message must be deleted manually once appropriate +        * See: `on_message` + +    On reaction: +        * Remove reaction if not permitted +            * User does not have any of the roles in `ALLOWED_ROLES` +            * Used emoji is not a `Signal` member +        * If `Signal.ACTIONED` or `Signal.NOT_ACTIONED` were chosen, attempt to +          relay the incident message to #incidents-archive +        * If relay successful, delete original message +        * See: `on_raw_reaction_add` + +    Please refer to function docstrings for implementation details. +    """ + +    def __init__(self, bot: Bot) -> None: +        """Prepare `event_lock` and schedule `crawl_task` on start-up.""" +        self.bot = bot + +        self.event_lock = asyncio.Lock() +        self.crawl_task = self.bot.loop.create_task(self.crawl_incidents()) + +    async def crawl_incidents(self) -> None: +        """ +        Crawl #incidents and add missing emoji where necessary. + +        This is to catch-up should an incident be reported while the bot wasn't listening. +        After adding each reaction, we take a short break to avoid drowning in ratelimits. + +        Once this task is scheduled, listeners that change messages should await it. +        The crawl assumes that the channel history doesn't change as we go over it. + +        Behaviour is configured by: `CRAWL_LIMIT`, `CRAWL_SLEEP`. +        """ +        await self.bot.wait_until_guild_available() +        incidents: discord.TextChannel = self.bot.get_channel(Channels.incidents) + +        log.debug(f"Crawling messages in #incidents: {CRAWL_LIMIT=}, {CRAWL_SLEEP=}") +        async for message in incidents.history(limit=CRAWL_LIMIT): + +            if not is_incident(message): +                log.trace(f"Skipping message {message.id}: not an incident") +                continue + +            if has_signals(message): +                log.trace(f"Skipping message {message.id}: already has all signals") +                continue + +            await add_signals(message) +            await asyncio.sleep(CRAWL_SLEEP) + +        log.debug("Crawl task finished!") + +    async def archive(self, incident: discord.Message, outcome: Signal, actioned_by: discord.Member) -> bool: +        """ +        Relay an embed representation of `incident` to the #incidents-archive channel. + +        The following pieces of information are relayed: +            * Incident message content (as embed description) +            * Incident attachment (if image, shown in archive embed) +            * Incident author name (as webhook author) +            * Incident author avatar (as webhook avatar) +            * Resolution signal `outcome` (as embed colour & footer) +            * Moderator `actioned_by` (name & discriminator shown in footer) + +        If `incident` contains an attachment, we try to add it to the archive embed. There is +        no handing of extensions / file types - we simply dispatch the attachment file with the +        webhook, and try to display it in the embed. Testing indicates that if the attachment +        cannot be displayed (e.g. a text file), it's invisible in the embed, with no error. + +        Return True if the relay finishes successfully. If anything goes wrong, meaning +        not all information was relayed, return False. This signals that the original +        message is not safe to be deleted, as we will lose some information. +        """ +        log.debug(f"Archiving incident: {incident.id} (outcome: {outcome}, actioned by: {actioned_by})") +        embed, attachment_file = await make_embed(incident, outcome, actioned_by) + +        try: +            webhook = await self.bot.fetch_webhook(Webhooks.incidents_archive) +            await webhook.send( +                embed=embed, +                username=sub_clyde(incident.author.name), +                avatar_url=incident.author.avatar_url, +                file=attachment_file, +            ) +        except Exception: +            log.exception(f"Failed to archive incident {incident.id} to #incidents-archive") +            return False +        else: +            log.trace("Message archived successfully!") +            return True + +    def make_confirmation_task(self, incident: discord.Message, timeout: int = 5) -> asyncio.Task: +        """ +        Create a task to wait `timeout` seconds for `incident` to be deleted. + +        If `timeout` passes, this will raise `asyncio.TimeoutError`, signaling that we haven't +        been able to confirm that the message was deleted. +        """ +        log.trace(f"Confirmation task will wait {timeout=} seconds for {incident.id=} to be deleted") + +        def check(payload: discord.RawReactionActionEvent) -> bool: +            return payload.message_id == incident.id + +        coroutine = self.bot.wait_for(event="raw_message_delete", check=check, timeout=timeout) +        return self.bot.loop.create_task(coroutine) + +    async def process_event(self, reaction: str, incident: discord.Message, member: discord.Member) -> None: +        """ +        Process a `reaction_add` event in #incidents. + +        First, we check that the reaction is a recognized `Signal` member, and that it was sent by +        a permitted user (at least one role in `ALLOWED_ROLES`). If not, the reaction is removed. + +        If the reaction was either `Signal.ACTIONED` or `Signal.NOT_ACTIONED`, we attempt to relay +        the report to #incidents-archive. If successful, the original message is deleted. + +        We do not release `event_lock` until we receive the corresponding `message_delete` event. +        This ensures that if there is a racing event awaiting the lock, it will fail to find the +        message, and will abort. There is a `timeout` to ensure that this doesn't hold the lock +        forever should something go wrong. +        """ +        members_roles: t.Set[int] = {role.id for role in member.roles} +        if not members_roles & ALLOWED_ROLES:  # Intersection is truthy on at least 1 common element +            log.debug(f"Removing invalid reaction: user {member} is not permitted to send signals") +            await incident.remove_reaction(reaction, member) +            return + +        try: +            signal = Signal(reaction) +        except ValueError: +            log.debug(f"Removing invalid reaction: emoji {reaction} is not a valid signal") +            await incident.remove_reaction(reaction, member) +            return + +        log.trace(f"Received signal: {signal}") + +        if signal not in (Signal.ACTIONED, Signal.NOT_ACTIONED): +            log.debug("Reaction was valid, but no action is currently defined for it") +            return + +        relay_successful = await self.archive(incident, signal, actioned_by=member) +        if not relay_successful: +            log.trace("Original message will not be deleted as we failed to relay it to the archive") +            return + +        timeout = 5  # Seconds +        confirmation_task = self.make_confirmation_task(incident, timeout) + +        log.trace("Deleting original message") +        await incident.delete() + +        log.trace(f"Awaiting deletion confirmation: {timeout=} seconds") +        try: +            await confirmation_task +        except asyncio.TimeoutError: +            log.warning(f"Did not receive incident deletion confirmation within {timeout} seconds!") +        else: +            log.trace("Deletion was confirmed") + +    async def resolve_message(self, message_id: int) -> t.Optional[discord.Message]: +        """ +        Get `discord.Message` for `message_id` from cache, or API. + +        We first look into the local cache to see if the message is present. + +        If not, we try to fetch the message from the API. This is necessary for messages +        which were sent before the bot's current session. + +        In an edge-case, it is also possible that the message was already deleted, and +        the API will respond with a 404. In such a case, None will be returned. +        This signals that the event for `message_id` should be ignored. +        """ +        await self.bot.wait_until_guild_available()  # First make sure that the cache is ready +        log.trace(f"Resolving message for: {message_id=}") +        message: t.Optional[discord.Message] = self.bot._connection._get_message(message_id) + +        if message is not None: +            log.trace("Message was found in cache") +            return message + +        log.trace("Message not found, attempting to fetch") +        try: +            message = await self.bot.get_channel(Channels.incidents).fetch_message(message_id) +        except discord.NotFound: +            log.trace("Message doesn't exist, it was likely already relayed") +        except Exception: +            log.exception(f"Failed to fetch message {message_id}!") +        else: +            log.trace("Message fetched successfully!") +            return message + +    @Cog.listener() +    async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent) -> None: +        """ +        Pre-process `payload` and pass it to `process_event` if appropriate. + +        We abort instantly if `payload` doesn't relate to a message sent in #incidents, +        or if it was sent by a bot. + +        If `payload` relates to a message in #incidents, we first ensure that `crawl_task` has +        finished, to make sure we don't mutate channel state as we're crawling it. + +        Next, we acquire `event_lock` - to prevent racing, events are processed one at a time. + +        Once we have the lock, the `discord.Message` object for this event must be resolved. +        If the lock was previously held by an event which successfully relayed the incident, +        this will fail and we abort the current event. + +        Finally, with both the lock and the `discord.Message` instance in our hands, we delegate +        to `process_event` to handle the event. + +        The justification for using a raw listener is the need to receive events for messages +        which were not cached in the current session. As a result, a certain amount of +        complexity is introduced, but at the moment this doesn't appear to be avoidable. +        """ +        if payload.channel_id != Channels.incidents or payload.member.bot: +            return + +        log.trace(f"Received reaction add event in #incidents, waiting for crawler: {self.crawl_task.done()=}") +        await self.crawl_task + +        log.trace(f"Acquiring event lock: {self.event_lock.locked()=}") +        async with self.event_lock: +            message = await self.resolve_message(payload.message_id) + +            if message is None: +                log.debug("Listener will abort as related message does not exist!") +                return + +            if not is_incident(message): +                log.debug("Ignoring event for a non-incident message") +                return + +            await self.process_event(str(payload.emoji), message, payload.member) +            log.trace("Releasing event lock") + +    @Cog.listener() +    async def on_message(self, message: discord.Message) -> None: +        """Pass `message` to `add_signals` if and only if it satisfies `is_incident`.""" +        if is_incident(message): +            await add_signals(message) diff --git a/bot/cogs/moderation/infractions.py b/bot/cogs/moderation/infractions.py index 5bfaad796..8df642428 100644 --- a/bot/cogs/moderation/infractions.py +++ b/bot/cogs/moderation/infractions.py @@ -53,7 +53,7 @@ class Infractions(InfractionScheduler, commands.Cog):      # region: Permanent infractions      @command() -    async def warn(self, ctx: Context, user: Member, *, reason: str = None) -> None: +    async def warn(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None:          """Warn a user for the given reason."""          infraction = await utils.post_infraction(ctx, user, "warning", reason, active=False)          if infraction is None: @@ -62,12 +62,12 @@ class Infractions(InfractionScheduler, commands.Cog):          await self.apply_infraction(ctx, infraction, user)      @command() -    async def kick(self, ctx: Context, user: Member, *, reason: str = None) -> None: +    async def kick(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None:          """Kick a user for the given reason.""" -        await self.apply_kick(ctx, user, reason, active=False) +        await self.apply_kick(ctx, user, reason)      @command() -    async def ban(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None: +    async def ban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None:          """Permanently ban a user for the given reason and stop watching them with Big Brother."""          await self.apply_ban(ctx, user, reason) @@ -75,7 +75,7 @@ class Infractions(InfractionScheduler, commands.Cog):      # region: Temporary infractions      @command(aliases=["mute"]) -    async def tempmute(self, ctx: Context, user: Member, duration: Expiry, *, reason: str = None) -> None: +    async def tempmute(self, ctx: Context, user: Member, duration: Expiry, *, reason: t.Optional[str] = None) -> None:          """          Temporarily mute a user for the given reason and duration. @@ -94,7 +94,14 @@ class Infractions(InfractionScheduler, commands.Cog):          await self.apply_mute(ctx, user, reason, expires_at=duration)      @command() -    async def tempban(self, ctx: Context, user: FetchedMember, duration: Expiry, *, reason: str = None) -> None: +    async def tempban( +        self, +        ctx: Context, +        user: FetchedMember, +        duration: Expiry, +        *, +        reason: t.Optional[str] = None +    ) -> None:          """          Temporarily ban a user for the given reason and duration. @@ -116,7 +123,7 @@ class Infractions(InfractionScheduler, commands.Cog):      # region: Permanent shadow infractions      @command(hidden=True) -    async def note(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None: +    async def note(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None:          """Create a private note for a user with the given reason without notifying the user."""          infraction = await utils.post_infraction(ctx, user, "note", reason, hidden=True, active=False)          if infraction is None: @@ -125,12 +132,12 @@ class Infractions(InfractionScheduler, commands.Cog):          await self.apply_infraction(ctx, infraction, user)      @command(hidden=True, aliases=['shadowkick', 'skick']) -    async def shadow_kick(self, ctx: Context, user: Member, *, reason: str = None) -> None: +    async def shadow_kick(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None:          """Kick a user for the given reason without notifying the user.""" -        await self.apply_kick(ctx, user, reason, hidden=True, active=False) +        await self.apply_kick(ctx, user, reason, hidden=True)      @command(hidden=True, aliases=['shadowban', 'sban']) -    async def shadow_ban(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None: +    async def shadow_ban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None:          """Permanently ban a user for the given reason without notifying the user."""          await self.apply_ban(ctx, user, reason, hidden=True) @@ -138,7 +145,13 @@ class Infractions(InfractionScheduler, commands.Cog):      # region: Temporary shadow infractions      @command(hidden=True, aliases=["shadowtempmute, stempmute", "shadowmute", "smute"]) -    async def shadow_tempmute(self, ctx: Context, user: Member, duration: Expiry, *, reason: str = None) -> None: +    async def shadow_tempmute( +        self, ctx: Context, +        user: Member, +        duration: Expiry, +        *, +        reason: t.Optional[str] = None +    ) -> None:          """          Temporarily mute a user for the given reason and duration without notifying the user. @@ -163,7 +176,7 @@ class Infractions(InfractionScheduler, commands.Cog):          user: FetchedMember,          duration: Expiry,          *, -        reason: str = None +        reason: t.Optional[str] = None      ) -> None:          """          Temporarily ban a user for the given reason and duration without notifying the user. @@ -198,7 +211,7 @@ class Infractions(InfractionScheduler, commands.Cog):      # endregion      # region: Base apply functions -    async def apply_mute(self, ctx: Context, user: Member, reason: str, **kwargs) -> None: +    async def apply_mute(self, ctx: Context, user: Member, reason: t.Optional[str], **kwargs) -> None:          """Apply a mute infraction with kwargs passed to `post_infraction`."""          if await utils.get_active_infraction(ctx, user, "mute"):              return @@ -218,7 +231,7 @@ class Infractions(InfractionScheduler, commands.Cog):          await self.apply_infraction(ctx, infraction, user, action())      @respect_role_hierarchy() -    async def apply_kick(self, ctx: Context, user: Member, reason: str, **kwargs) -> None: +    async def apply_kick(self, ctx: Context, user: Member, reason: t.Optional[str], **kwargs) -> None:          """Apply a kick infraction with kwargs passed to `post_infraction`."""          infraction = await utils.post_infraction(ctx, user, "kick", reason, active=False, **kwargs)          if infraction is None: @@ -226,11 +239,14 @@ class Infractions(InfractionScheduler, commands.Cog):          self.mod_log.ignore(Event.member_remove, user.id) -        action = user.kick(reason=textwrap.shorten(reason, width=512, placeholder="...")) +        if reason: +            reason = textwrap.shorten(reason, width=512, placeholder="...") + +        action = user.kick(reason=reason)          await self.apply_infraction(ctx, infraction, user, action)      @respect_role_hierarchy() -    async def apply_ban(self, ctx: Context, user: UserSnowflake, reason: str, **kwargs) -> None: +    async def apply_ban(self, ctx: Context, user: UserSnowflake, reason: t.Optional[str], **kwargs) -> None:          """          Apply a ban infraction with kwargs passed to `post_infraction`. @@ -259,9 +275,10 @@ class Infractions(InfractionScheduler, commands.Cog):          self.mod_log.ignore(Event.member_remove, user.id) -        truncated_reason = textwrap.shorten(reason, width=512, placeholder="...") +        if reason: +            reason = textwrap.shorten(reason, width=512, placeholder="...") -        action = ctx.guild.ban(user, reason=truncated_reason, delete_message_days=0) +        action = ctx.guild.ban(user, reason=reason, delete_message_days=0)          await self.apply_infraction(ctx, infraction, user, action)          if infraction.get('expires_at') is not None: @@ -281,7 +298,7 @@ class Infractions(InfractionScheduler, commands.Cog):      # endregion      # region: Base pardon functions -    async def pardon_mute(self, user_id: int, guild: discord.Guild, reason: str) -> t.Dict[str, str]: +    async def pardon_mute(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]:          """Remove a user's muted role, DM them a notification, and return a log dict."""          user = guild.get_member(user_id)          log_text = {} @@ -307,7 +324,7 @@ class Infractions(InfractionScheduler, commands.Cog):          return log_text -    async def pardon_ban(self, user_id: int, guild: discord.Guild, reason: str) -> t.Dict[str, str]: +    async def pardon_ban(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]:          """Remove a user's ban on the Discord guild and return a log dict."""          user = discord.Object(user_id)          log_text = {} diff --git a/bot/cogs/moderation/management.py b/bot/cogs/moderation/management.py index c39c7f3bc..672bb0e9c 100644 --- a/bot/cogs/moderation/management.py +++ b/bot/cogs/moderation/management.py @@ -135,11 +135,11 @@ class ModManagement(commands.Cog):          if 'expires_at' in request_data:              # A scheduled task should only exist if the old infraction wasn't permanent              if old_infraction['expires_at']: -                self.infractions_cog.cancel_task(new_infraction['id']) +                self.infractions_cog.scheduler.cancel(new_infraction['id'])              # If the infraction was not marked as permanent, schedule a new expiration task              if request_data['expires_at']: -                self.infractions_cog.schedule_task(new_infraction['id'], new_infraction) +                self.infractions_cog.schedule_expiration(new_infraction)              log_text += f"""                  Previous expiry: {old_infraction['expires_at'] or "Permanent"} @@ -268,12 +268,12 @@ class ModManagement(commands.Cog):              User: {self.bot.get_user(user_id)} (`{user_id}`)              Type: **{infraction["type"]}**              Shadow: {hidden} -            Reason: {infraction["reason"] or "*None*"}              Created: {created}              Expires: {expires}              Remaining: {remaining}              Actor: {actor.mention if actor else actor_id}              ID: `{infraction["id"]}` +            Reason: {infraction["reason"] or "*None*"}              {"**===============**" if active else "==============="}          """) diff --git a/bot/cogs/moderation/modlog.py b/bot/cogs/moderation/modlog.py index 41472c64c..0a63f57b8 100644 --- a/bot/cogs/moderation/modlog.py +++ b/bot/cogs/moderation/modlog.py @@ -24,7 +24,6 @@ GUILD_CHANNEL = t.Union[discord.CategoryChannel, discord.TextChannel, discord.Vo  CHANNEL_CHANGES_UNSUPPORTED = ("permissions",)  CHANNEL_CHANGES_SUPPRESSED = ("_overwrites", "position") -MEMBER_CHANGES_SUPPRESSED = ("status", "activities", "_client_status", "nick")  ROLE_CHANGES_UNSUPPORTED = ("colour", "permissions")  VOICE_STATE_ATTRIBUTES = { @@ -122,7 +121,12 @@ class ModLog(Cog, name="ModLog"):                  content = "@everyone"          channel = self.bot.get_channel(channel_id) -        log_message = await channel.send(content=content, embed=embed, files=files) +        log_message = await channel.send( +            content=content, +            embed=embed, +            files=files, +            allowed_mentions=discord.AllowedMentions(everyone=True) +        )          if additional_embeds:              if additional_embeds_msg: @@ -452,6 +456,21 @@ class ModLog(Cog, name="ModLog"):              channel_id=Channels.mod_log          ) +    @staticmethod +    def get_role_diff(before: t.List[discord.Role], after: t.List[discord.Role]) -> t.List[str]: +        """Return a list of strings describing the roles added and removed.""" +        changes = [] +        before_roles = set(before) +        after_roles = set(after) + +        for role in (before_roles - after_roles): +            changes.append(f"**Role removed:** {role.name} (`{role.id}`)") + +        for role in (after_roles - before_roles): +            changes.append(f"**Role added:** {role.name} (`{role.id}`)") + +        return changes +      @Cog.listener()      async def on_member_update(self, before: discord.Member, after: discord.Member) -> None:          """Log member update event to user log.""" @@ -462,74 +481,27 @@ class ModLog(Cog, name="ModLog"):              self._ignored[Event.member_update].remove(before.id)              return -        diff = DeepDiff(before, after) -        changes = [] -        done = [] - -        diff_values = {} - -        diff_values.update(diff.get("values_changed", {})) -        diff_values.update(diff.get("type_changes", {})) -        diff_values.update(diff.get("iterable_item_removed", {})) -        diff_values.update(diff.get("iterable_item_added", {})) - -        diff_user = DeepDiff(before._user, after._user) - -        diff_values.update(diff_user.get("values_changed", {})) -        diff_values.update(diff_user.get("type_changes", {})) -        diff_values.update(diff_user.get("iterable_item_removed", {})) -        diff_values.update(diff_user.get("iterable_item_added", {})) +        changes = self.get_role_diff(before.roles, after.roles) -        for key, value in diff_values.items(): -            if not key:  # Not sure why, but it happens -                continue - -            key = key[5:]  # Remove "root." prefix - -            if "[" in key: -                key = key.split("[", 1)[0] +        # The regex is a simple way to exclude all sequence and mapping types. +        diff = DeepDiff(before, after, exclude_regex_paths=r".*\[.*") -            if "." in key: -                key = key.split(".", 1)[0] +        # A type change seems to always take precedent over a value change. Furthermore, it will +        # include the value change along with the type change anyway. Therefore, it's OK to +        # "overwrite" values_changed; in practice there will never even be anything to overwrite. +        diff_values = {**diff.get("values_changed", {}), **diff.get("type_changes", {})} -            if key in done or key in MEMBER_CHANGES_SUPPRESSED: +        for attr, value in diff_values.items(): +            if not attr:  # Not sure why, but it happens.                  continue -            if key == "_roles": -                new_roles = after.roles -                old_roles = before.roles - -                for role in old_roles: -                    if role not in new_roles: -                        changes.append(f"**Role removed:** {role.name} (`{role.id}`)") - -                for role in new_roles: -                    if role not in old_roles: -                        changes.append(f"**Role added:** {role.name} (`{role.id}`)") - -            else: -                new = value.get("new_value") -                old = value.get("old_value") - -                if new and old: -                    changes.append(f"**{key.title()}:** `{old}` **→** `{new}`") - -            done.append(key) - -        if before.name != after.name: -            changes.append( -                f"**Username:** `{before.name}` **→** `{after.name}`" -            ) +            attr = attr[5:]  # Remove "root." prefix. +            attr = attr.replace("_", " ").replace(".", " ").capitalize() -        if before.discriminator != after.discriminator: -            changes.append( -                f"**Discriminator:** `{before.discriminator}` **→** `{after.discriminator}`" -            ) +            new = value.get("new_value") +            old = value.get("old_value") -        if before.display_name != after.display_name: -            changes.append( -                f"**Display name:** `{before.display_name}` **→** `{after.display_name}`" -            ) +            changes.append(f"**{attr}:** `{old}` **→** `{new}`")          if not changes:              return @@ -543,8 +515,10 @@ class ModLog(Cog, name="ModLog"):          message = f"**{member_str}** (`{after.id}`)\n{message}"          await self.send_log_message( -            Icons.user_update, Colour.blurple(), -            "Member updated", message, +            icon_url=Icons.user_update, +            colour=Colour.blurple(), +            title="Member updated", +            text=message,              thumbnail=after.avatar_url_as(static_format="png"),              channel_id=Channels.user_log          ) diff --git a/bot/cogs/moderation/scheduler.py b/bot/cogs/moderation/scheduler.py index b03d89537..601e238c9 100644 --- a/bot/cogs/moderation/scheduler.py +++ b/bot/cogs/moderation/scheduler.py @@ -1,4 +1,3 @@ -import asyncio  import logging  import textwrap  import typing as t @@ -23,13 +22,13 @@ from .utils import UserSnowflake  log = logging.getLogger(__name__) -class InfractionScheduler(Scheduler): +class InfractionScheduler:      """Handles the application, pardoning, and expiration of infractions."""      def __init__(self, bot: Bot, supported_infractions: t.Container[str]): -        super().__init__() -          self.bot = bot +        self.scheduler = Scheduler(self.__class__.__name__) +          self.bot.loop.create_task(self.reschedule_infractions(supported_infractions))      @property @@ -49,7 +48,7 @@ class InfractionScheduler(Scheduler):          )          for infraction in infractions:              if infraction["expires_at"] is not None and infraction["type"] in supported_infractions: -                self.schedule_task(infraction["id"], infraction) +                self.schedule_expiration(infraction)      async def reapply_infraction(          self, @@ -127,18 +126,17 @@ class InfractionScheduler(Scheduler):                      dm_result = ":incoming_envelope: "                      dm_log_text = "\nDM: Sent" +        end_msg = ""          if infraction["actor"] == self.bot.user.id:              log.trace(                  f"Infraction #{id_} actor is bot; including the reason in the confirmation message."              ) - -            end_msg = f" (reason: {textwrap.shorten(reason, width=1500, placeholder='...')})" +            if reason: +                end_msg = f" (reason: {textwrap.shorten(reason, width=1500, placeholder='...')})"          elif ctx.channel.id not in STAFF_CHANNELS:              log.trace(                  f"Infraction #{id_} context is not in a staff channel; omitting infraction count."              ) - -            end_msg = ""          else:              log.trace(f"Fetching total infraction count for {user}.") @@ -156,7 +154,7 @@ class InfractionScheduler(Scheduler):                  await action_coro                  if expiry:                      # Schedule the expiration of the infraction. -                    self.schedule_task(infraction["id"], infraction) +                    self.schedule_expiration(infraction)              except discord.HTTPException as e:                  # Accordingly display that applying the infraction failed.                  confirm_msg = ":x: failed to apply" @@ -279,7 +277,7 @@ class InfractionScheduler(Scheduler):                  # Cancel pending expiration task.                  if infraction["expires_at"] is not None: -                    self.cancel_task(infraction["id"]) +                    self.scheduler.cancel(infraction["id"])          # Accordingly display whether the user was successfully notified via DM.          dm_emoji = "" @@ -416,7 +414,7 @@ class InfractionScheduler(Scheduler):          # Cancel the expiration task.          if infraction["expires_at"] is not None: -            self.cancel_task(infraction["id"]) +            self.scheduler.cancel(infraction["id"])          # Send a log message to the mod log.          if send_log: @@ -450,7 +448,7 @@ class InfractionScheduler(Scheduler):          """          raise NotImplementedError -    async def _scheduled_task(self, infraction: utils.Infraction) -> None: +    def schedule_expiration(self, infraction: utils.Infraction) -> None:          """          Marks an infraction expired after the delay from time of scheduling to time of expiration. @@ -458,8 +456,4 @@ class InfractionScheduler(Scheduler):          expiration task is cancelled.          """          expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) -        await time.wait_until(expiry) - -        # Because deactivate_infraction() explicitly cancels this scheduled task, it is shielded -        # to avoid prematurely cancelling itself. -        await asyncio.shield(self.deactivate_infraction(infraction)) +        self.scheduler.schedule_at(expiry, infraction["id"], self.deactivate_infraction(infraction)) diff --git a/bot/cogs/moderation/silence.py b/bot/cogs/moderation/silence.py index 25febfa51..ae4fb7b64 100644 --- a/bot/cogs/moderation/silence.py +++ b/bot/cogs/moderation/silence.py @@ -11,6 +11,7 @@ from bot.bot import Bot  from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, Roles  from bot.converters import HushDurationConverter  from bot.utils.checks import with_role_check +from bot.utils.scheduling import Scheduler  log = logging.getLogger(__name__) @@ -58,7 +59,9 @@ class Silence(commands.Cog):      def __init__(self, bot: Bot):          self.bot = bot +        self.scheduler = Scheduler(self.__class__.__name__)          self.muted_channels = set() +          self._get_instance_vars_task = self.bot.loop.create_task(self._get_instance_vars())          self._get_instance_vars_event = asyncio.Event() @@ -90,9 +93,8 @@ class Silence(commands.Cog):              return          await ctx.send(f"{Emojis.check_mark} silenced current channel for {duration} minute(s).") -        await asyncio.sleep(duration*60) -        log.info("Unsilencing channel after set delay.") -        await ctx.invoke(self.unsilence) + +        self.scheduler.schedule_later(duration * 60, ctx.channel.id, ctx.invoke(self.unsilence))      @commands.command(aliases=("unhush",))      async def unsilence(self, ctx: Context) -> None: @@ -103,7 +105,9 @@ class Silence(commands.Cog):          """          await self._get_instance_vars_event.wait()          log.debug(f"Unsilencing channel #{ctx.channel} from {ctx.author}'s command.") -        if await self._unsilence(ctx.channel): +        if not await self._unsilence(ctx.channel): +            await ctx.send(f"{Emojis.cross_mark} current channel was not silenced.") +        else:              await ctx.send(f"{Emojis.check_mark} unsilenced current channel.")      async def _silence(self, channel: TextChannel, persistent: bool, duration: Optional[int]) -> bool: @@ -140,6 +144,7 @@ class Silence(commands.Cog):          if current_overwrite.send_messages is False:              await channel.set_permissions(self._verified_role, **dict(current_overwrite, send_messages=None))              log.info(f"Unsilenced channel #{channel} ({channel.id}).") +            self.scheduler.cancel(channel.id)              self.notifier.remove_channel(channel)              self.muted_channels.discard(channel)              return True diff --git a/bot/cogs/moderation/slowmode.py b/bot/cogs/moderation/slowmode.py new file mode 100644 index 000000000..1d055afac --- /dev/null +++ b/bot/cogs/moderation/slowmode.py @@ -0,0 +1,97 @@ +import logging +from datetime import datetime +from typing import Optional + +from dateutil.relativedelta import relativedelta +from discord import TextChannel +from discord.ext.commands import Cog, Context, group + +from bot.bot import Bot +from bot.constants import Emojis, MODERATION_ROLES +from bot.converters import DurationDelta +from bot.decorators import with_role_check +from bot.utils import time + +log = logging.getLogger(__name__) + +SLOWMODE_MAX_DELAY = 21600  # seconds + + +class Slowmode(Cog): +    """Commands for getting and setting slowmode delays of text channels.""" + +    def __init__(self, bot: Bot) -> None: +        self.bot = bot + +    @group(name='slowmode', aliases=['sm'], invoke_without_command=True) +    async def slowmode_group(self, ctx: Context) -> None: +        """Get or set the slowmode delay for the text channel this was invoked in or a given text channel.""" +        await ctx.send_help(ctx.command) + +    @slowmode_group.command(name='get', aliases=['g']) +    async def get_slowmode(self, ctx: Context, channel: Optional[TextChannel]) -> None: +        """Get the slowmode delay for a text channel.""" +        # Use the channel this command was invoked in if one was not given +        if channel is None: +            channel = ctx.channel + +        delay = relativedelta(seconds=channel.slowmode_delay) +        humanized_delay = time.humanize_delta(delay) + +        await ctx.send(f'The slowmode delay for {channel.mention} is {humanized_delay}.') + +    @slowmode_group.command(name='set', aliases=['s']) +    async def set_slowmode(self, ctx: Context, channel: Optional[TextChannel], delay: DurationDelta) -> None: +        """Set the slowmode delay for a text channel.""" +        # Use the channel this command was invoked in if one was not given +        if channel is None: +            channel = ctx.channel + +        # Convert `dateutil.relativedelta.relativedelta` to `datetime.timedelta` +        # Must do this to get the delta in a particular unit of time +        utcnow = datetime.utcnow() +        slowmode_delay = (utcnow + delay - utcnow).total_seconds() + +        humanized_delay = time.humanize_delta(delay) + +        # Ensure the delay is within discord's limits +        if slowmode_delay <= SLOWMODE_MAX_DELAY: +            log.info(f'{ctx.author} set the slowmode delay for #{channel} to {humanized_delay}.') + +            await channel.edit(slowmode_delay=slowmode_delay) +            await ctx.send( +                f'{Emojis.check_mark} The slowmode delay for {channel.mention} is now {humanized_delay}.' +            ) + +        else: +            log.info( +                f'{ctx.author} tried to set the slowmode delay of #{channel} to {humanized_delay}, ' +                'which is not between 0 and 6 hours.' +            ) + +            await ctx.send( +                f'{Emojis.cross_mark} The slowmode delay must be between 0 and 6 hours.' +            ) + +    @slowmode_group.command(name='reset', aliases=['r']) +    async def reset_slowmode(self, ctx: Context, channel: Optional[TextChannel]) -> None: +        """Reset the slowmode delay for a text channel to 0 seconds.""" +        # Use the channel this command was invoked in if one was not given +        if channel is None: +            channel = ctx.channel + +        log.info(f'{ctx.author} reset the slowmode delay for #{channel} to 0 seconds.') + +        await channel.edit(slowmode_delay=0) +        await ctx.send( +            f'{Emojis.check_mark} The slowmode delay for {channel.mention} has been reset to 0 seconds.' +        ) + +    def cog_check(self, ctx: Context) -> bool: +        """Only allow moderators to invoke the commands in this cog.""" +        return with_role_check(ctx, *MODERATION_ROLES) + + +def setup(bot: Bot) -> None: +    """Load the Slowmode cog.""" +    bot.add_cog(Slowmode(bot)) diff --git a/bot/cogs/moderation/superstarify.py b/bot/cogs/moderation/superstarify.py index 45a010f00..867de815a 100644 --- a/bot/cogs/moderation/superstarify.py +++ b/bot/cogs/moderation/superstarify.py @@ -146,7 +146,7 @@ class Superstarify(InfractionScheduler, Cog):          log.debug(f"Changing nickname of {member} to {forced_nick}.")          self.mod_log.ignore(constants.Event.member_update, member.id)          await member.edit(nick=forced_nick, reason=reason) -        self.schedule_task(id_, infraction) +        self.schedule_expiration(infraction)          # Send a DM to the user to notify them of their new infraction.          await utils.notify_infraction( diff --git a/bot/cogs/python_news.py b/bot/cogs/python_news.py index d15d0371e..0ab5738a4 100644 --- a/bot/cogs/python_news.py +++ b/bot/cogs/python_news.py @@ -10,6 +10,7 @@ from discord.ext.tasks import loop  from bot import constants  from bot.bot import Bot +from bot.utils.webhooks import send_webhook  PEPS_RSS_URL = "https://www.python.org/dev/peps/peps.rss/" @@ -99,13 +100,21 @@ class PythonNews(Cog):              ):                  continue -            msg = await self.send_webhook( +            # Build an embed and send a webhook +            embed = discord.Embed(                  title=new["title"],                  description=new["summary"],                  timestamp=new_datetime,                  url=new["link"], -                webhook_profile_name=data["feed"]["title"], -                footer=data["feed"]["title"] +                colour=constants.Colours.soft_green +            ) +            embed.set_footer(text=data["feed"]["title"], icon_url=AVATAR_URL) +            msg = await send_webhook( +                webhook=self.webhook, +                username=data["feed"]["title"], +                embed=embed, +                avatar_url=AVATAR_URL, +                wait=True,              )              payload["data"]["pep"].append(pep_nr) @@ -160,15 +169,29 @@ class PythonNews(Cog):                  content = email_information["content"]                  link = THREAD_URL.format(id=thread["href"].split("/")[-2], list=maillist) -                msg = await self.send_webhook( + +                # Build an embed and send a message to the webhook +                embed = discord.Embed(                      title=thread_information["subject"],                      description=content[:500] + f"... [continue reading]({link})" if len(content) > 500 else content,                      timestamp=new_date,                      url=link, -                    author=f"{email_information['sender_name']} ({email_information['sender']['address']})", -                    author_url=MAILMAN_PROFILE_URL.format(id=email_information["sender"]["mailman_id"]), -                    webhook_profile_name=self.webhook_names[maillist], -                    footer=f"Posted to {self.webhook_names[maillist]}" +                    colour=constants.Colours.soft_green +                ) +                embed.set_author( +                    name=f"{email_information['sender_name']} ({email_information['sender']['address']})", +                    url=MAILMAN_PROFILE_URL.format(id=email_information["sender"]["mailman_id"]), +                ) +                embed.set_footer( +                    text=f"Posted to {self.webhook_names[maillist]}", +                    icon_url=AVATAR_URL, +                ) +                msg = await send_webhook( +                    webhook=self.webhook, +                    username=self.webhook_names[maillist], +                    embed=embed, +                    avatar_url=AVATAR_URL, +                    wait=True,                  )                  payload["data"][maillist].append(thread_information["thread_id"]) @@ -181,38 +204,6 @@ class PythonNews(Cog):          await self.bot.api_client.put("bot/bot-settings/news", json=payload) -    async def send_webhook(self, -                           title: str, -                           description: str, -                           timestamp: datetime, -                           url: str, -                           webhook_profile_name: str, -                           footer: str, -                           author: t.Optional[str] = None, -                           author_url: t.Optional[str] = None, -                           ) -> discord.Message: -        """Send webhook entry and return sent message.""" -        embed = discord.Embed( -            title=title, -            description=description, -            timestamp=timestamp, -            url=url, -            colour=constants.Colours.soft_green -        ) -        if author and author_url: -            embed.set_author( -                name=author, -                url=author_url -            ) -        embed.set_footer(text=footer, icon_url=AVATAR_URL) - -        return await self.webhook.send( -            embed=embed, -            username=webhook_profile_name, -            avatar_url=AVATAR_URL, -            wait=True -        ) -      async def get_thread_and_first_mail(self, maillist: str, thread_identifier: str) -> t.Tuple[t.Any, t.Any]:          """Get mail thread and first mail from mail.python.org based on `maillist` and `thread_identifier`."""          async with self.bot.http_session.get( diff --git a/bot/cogs/reddit.py b/bot/cogs/reddit.py index 3b77538a0..d853ab2ea 100644 --- a/bot/cogs/reddit.py +++ b/bot/cogs/reddit.py @@ -16,6 +16,7 @@ from bot.constants import Channels, ERROR_REPLIES, Emojis, Reddit as RedditConfi  from bot.converters import Subreddit  from bot.decorators import with_role  from bot.pagination import LinePaginator +from bot.utils.messages import sub_clyde  log = logging.getLogger(__name__) @@ -218,7 +219,8 @@ class Reddit(Cog):          for subreddit in RedditConfig.subreddits:              top_posts = await self.get_top_posts(subreddit=subreddit, time="day") -            message = await self.webhook.send(username=f"{subreddit} Top Daily Posts", embed=top_posts, wait=True) +            username = sub_clyde(f"{subreddit} Top Daily Posts") +            message = await self.webhook.send(username=username, embed=top_posts, wait=True)              if message.channel.is_news():                  await message.publish() @@ -228,8 +230,8 @@ class Reddit(Cog):          for subreddit in RedditConfig.subreddits:              # Send and pin the new weekly posts.              top_posts = await self.get_top_posts(subreddit=subreddit, time="week") - -            message = await self.webhook.send(wait=True, username=f"{subreddit} Top Weekly Posts", embed=top_posts) +            username = sub_clyde(f"{subreddit} Top Weekly Posts") +            message = await self.webhook.send(wait=True, username=username, embed=top_posts)              if subreddit.lower() == "r/python":                  if not self.channel: diff --git a/bot/cogs/reminders.py b/bot/cogs/reminders.py index c242d2920..b5998cc0e 100644 --- a/bot/cogs/reminders.py +++ b/bot/cogs/reminders.py @@ -9,28 +9,31 @@ from operator import itemgetter  import discord  from dateutil.parser import isoparse  from dateutil.relativedelta import relativedelta -from discord.ext.commands import Cog, Context, group +from discord.ext.commands import Cog, Context, Greedy, group  from bot.bot import Bot -from bot.constants import Guild, Icons, NEGATIVE_REPLIES, POSITIVE_REPLIES, STAFF_ROLES +from bot.constants import Guild, Icons, MODERATION_ROLES, POSITIVE_REPLIES, STAFF_ROLES  from bot.converters import Duration  from bot.pagination import LinePaginator  from bot.utils.checks import without_role_check +from bot.utils.messages import send_denial  from bot.utils.scheduling import Scheduler -from bot.utils.time import humanize_delta, wait_until +from bot.utils.time import humanize_delta  log = logging.getLogger(__name__)  WHITELISTED_CHANNELS = Guild.reminder_whitelist  MAXIMUM_REMINDERS = 5 +Mentionable = t.Union[discord.Member, discord.Role] -class Reminders(Scheduler, Cog): + +class Reminders(Cog):      """Provide in-channel reminder functionality."""      def __init__(self, bot: Bot):          self.bot = bot -        super().__init__() +        self.scheduler = Scheduler(self.__class__.__name__)          self.bot.loop.create_task(self.reschedule_reminders()) @@ -56,7 +59,7 @@ class Reminders(Scheduler, Cog):                  late = relativedelta(now, remind_at)                  await self.send_reminder(reminder, late)              else: -                self.schedule_task(reminder["id"], reminder) +                self.schedule_reminder(reminder)      def ensure_valid_reminder(          self, @@ -99,17 +102,58 @@ class Reminders(Scheduler, Cog):          await ctx.send(embed=embed) -    async def _scheduled_task(self, reminder: dict) -> None: +    @staticmethod +    async def _check_mentions(ctx: Context, mentions: t.Iterable[Mentionable]) -> t.Tuple[bool, str]: +        """ +        Returns whether or not the list of mentions is allowed. + +        Conditions: +        - Role reminders are Mods+ +        - Reminders for other users are Helpers+ + +        If mentions aren't allowed, also return the type of mention(s) disallowed. +        """ +        if without_role_check(ctx, *STAFF_ROLES): +            return False, "members/roles" +        elif without_role_check(ctx, *MODERATION_ROLES): +            return all(isinstance(mention, discord.Member) for mention in mentions), "roles" +        else: +            return True, "" + +    @staticmethod +    async def validate_mentions(ctx: Context, mentions: t.Iterable[Mentionable]) -> bool: +        """ +        Filter mentions to see if the user can mention, and sends a denial if not allowed. + +        Returns whether or not the validation is successful. +        """ +        mentions_allowed, disallowed_mentions = await Reminders._check_mentions(ctx, mentions) + +        if not mentions or mentions_allowed: +            return True +        else: +            await send_denial(ctx, f"You can't mention other {disallowed_mentions} in your reminder!") +            return False + +    def get_mentionables(self, mention_ids: t.List[int]) -> t.Iterator[Mentionable]: +        """Converts Role and Member ids to their corresponding objects if possible.""" +        guild = self.bot.get_guild(Guild.id) +        for mention_id in mention_ids: +            if (mentionable := (guild.get_member(mention_id) or guild.get_role(mention_id))): +                yield mentionable + +    def schedule_reminder(self, reminder: dict) -> None:          """A coroutine which sends the reminder once the time is reached, and cancels the running task."""          reminder_id = reminder["id"]          reminder_datetime = isoparse(reminder['expiration']).replace(tzinfo=None) -        # Send the reminder message once the desired duration has passed -        await wait_until(reminder_datetime) -        await self.send_reminder(reminder) +        async def _remind() -> None: +            await self.send_reminder(reminder) + +            log.debug(f"Deleting reminder {reminder_id} (the user has been reminded).") +            await self._delete_reminder(reminder_id) -        log.debug(f"Deleting reminder {reminder_id} (the user has been reminded).") -        await self._delete_reminder(reminder_id) +        self.scheduler.schedule_at(reminder_datetime, reminder_id, _remind())      async def _delete_reminder(self, reminder_id: str, cancel_task: bool = True) -> None:          """Delete a reminder from the database, given its ID, and cancel the running task.""" @@ -117,15 +161,28 @@ class Reminders(Scheduler, Cog):          if cancel_task:              # Now we can remove it from the schedule list -            self.cancel_task(reminder_id) +            self.scheduler.cancel(reminder_id) + +    async def _edit_reminder(self, reminder_id: int, payload: dict) -> dict: +        """ +        Edits a reminder in the database given the ID and payload. + +        Returns the edited reminder. +        """ +        # Send the request to update the reminder in the database +        reminder = await self.bot.api_client.patch( +            'bot/reminders/' + str(reminder_id), +            json=payload +        ) +        return reminder      async def _reschedule_reminder(self, reminder: dict) -> None:          """Reschedule a reminder object."""          log.trace(f"Cancelling old task #{reminder['id']}") -        self.cancel_task(reminder["id"]) +        self.scheduler.cancel(reminder["id"])          log.trace(f"Scheduling new task #{reminder['id']}") -        self.schedule_task(reminder["id"], reminder) +        self.schedule_reminder(reminder)      async def send_reminder(self, reminder: dict, late: relativedelta = None) -> None:          """Send the reminder.""" @@ -152,36 +209,39 @@ class Reminders(Scheduler, Cog):                  name=f"Sorry it arrived {humanize_delta(late, max_units=2)} late!"              ) +        additional_mentions = ' '.join( +            mentionable.mention for mentionable in self.get_mentionables(reminder["mentions"]) +        ) +          await channel.send( -            content=user.mention, +            content=f"{user.mention} {additional_mentions}",              embed=embed          )          await self._delete_reminder(reminder["id"])      @group(name="remind", aliases=("reminder", "reminders", "remindme"), invoke_without_command=True) -    async def remind_group(self, ctx: Context, expiration: Duration, *, content: str) -> None: +    async def remind_group( +        self, ctx: Context, mentions: Greedy[Mentionable], expiration: Duration, *, content: str +    ) -> None:          """Commands for managing your reminders.""" -        await ctx.invoke(self.new_reminder, expiration=expiration, content=content) +        await ctx.invoke(self.new_reminder, mentions=mentions, expiration=expiration, content=content)      @remind_group.command(name="new", aliases=("add", "create")) -    async def new_reminder(self, ctx: Context, expiration: Duration, *, content: str) -> t.Optional[discord.Message]: +    async def new_reminder( +        self, ctx: Context, mentions: Greedy[Mentionable], expiration: Duration, *, content: str +    ) -> None:          """          Set yourself a simple reminder.          Expiration is parsed per: http://strftime.org/          """ -        embed = discord.Embed() -          # If the user is not staff, we need to verify whether or not to make a reminder at all.          if without_role_check(ctx, *STAFF_ROLES):              # If they don't have permission to set a reminder in this channel              if ctx.channel.id not in WHITELISTED_CHANNELS: -                embed.colour = discord.Colour.red() -                embed.title = random.choice(NEGATIVE_REPLIES) -                embed.description = "Sorry, you can't do that here!" - -                return await ctx.send(embed=embed) +                await send_denial(ctx, "Sorry, you can't do that here!") +                return              # Get their current active reminders              active_reminders = await self.bot.api_client.get( @@ -194,11 +254,18 @@ class Reminders(Scheduler, Cog):              # Let's limit this, so we don't get 10 000              # reminders from kip or something like that :P              if len(active_reminders) > MAXIMUM_REMINDERS: -                embed.colour = discord.Colour.red() -                embed.title = random.choice(NEGATIVE_REPLIES) -                embed.description = "You have too many active reminders!" +                await send_denial(ctx, "You have too many active reminders!") +                return -                return await ctx.send(embed=embed) +        # Remove duplicate mentions +        mentions = set(mentions) +        mentions.discard(ctx.author) + +        # Filter mentions to see if the user can mention members/roles +        if not await self.validate_mentions(ctx, mentions): +            return + +        mention_ids = [mention.id for mention in mentions]          # Now we can attempt to actually set the reminder.          reminder = await self.bot.api_client.post( @@ -208,25 +275,30 @@ class Reminders(Scheduler, Cog):                  'channel_id': ctx.message.channel.id,                  'jump_url': ctx.message.jump_url,                  'content': content, -                'expiration': expiration.isoformat() +                'expiration': expiration.isoformat(), +                'mentions': mention_ids,              }          )          now = datetime.utcnow() - timedelta(seconds=1)          humanized_delta = humanize_delta(relativedelta(expiration, now)) +        mention_string = ( +            f"Your reminder will arrive in {humanized_delta} " +            f"and will mention {len(mentions)} other(s)!" +        )          # Confirm to the user that it worked.          await self._send_confirmation(              ctx, -            on_success=f"Your reminder will arrive in {humanized_delta}!", +            on_success=mention_string,              reminder_id=reminder["id"],              delivery_dt=expiration,          ) -        self.schedule_task(reminder["id"], reminder) +        self.schedule_reminder(reminder)      @remind_group.command(name="list") -    async def list_reminders(self, ctx: Context) -> t.Optional[discord.Message]: +    async def list_reminders(self, ctx: Context) -> None:          """View a paginated embed of all reminders for your user."""          # Get all the user's reminders from the database.          data = await self.bot.api_client.get( @@ -239,7 +311,7 @@ class Reminders(Scheduler, Cog):          # Make a list of tuples so it can be sorted by time.          reminders = sorted(              ( -                (rem['content'], rem['expiration'], rem['id']) +                (rem['content'], rem['expiration'], rem['id'], rem['mentions'])                  for rem in data              ),              key=itemgetter(1) @@ -247,13 +319,19 @@ class Reminders(Scheduler, Cog):          lines = [] -        for content, remind_at, id_ in reminders: +        for content, remind_at, id_, mentions in reminders:              # Parse and humanize the time, make it pretty :D              remind_datetime = isoparse(remind_at).replace(tzinfo=None)              time = humanize_delta(relativedelta(remind_datetime, now)) +            mentions = ", ".join( +                # Both Role and User objects have the `name` attribute +                mention.name for mention in self.get_mentionables(mentions) +            ) +            mention_string = f"\n**Mentions:** {mentions}" if mentions else "" +              text = textwrap.dedent(f""" -            **Reminder #{id_}:** *expires in {time}* (ID: {id_}) +            **Reminder #{id_}:** *expires in {time}* (ID: {id_}){mention_string}              {content}              """).strip() @@ -266,7 +344,8 @@ class Reminders(Scheduler, Cog):          # Remind the user that they have no reminders :^)          if not lines:              embed.description = "No active reminders could be found." -            return await ctx.send(embed=embed) +            await ctx.send(embed=embed) +            return          # Construct the embed and paginate it.          embed.colour = discord.Colour.blurple() @@ -286,37 +365,37 @@ class Reminders(Scheduler, Cog):      @edit_reminder_group.command(name="duration", aliases=("time",))      async def edit_reminder_duration(self, ctx: Context, id_: int, expiration: Duration) -> None:          """ -         Edit one of your reminder's expiration. +        Edit one of your reminder's expiration.          Expiration is parsed per: http://strftime.org/          """ -        # Send the request to update the reminder in the database -        reminder = await self.bot.api_client.patch( -            'bot/reminders/' + str(id_), -            json={'expiration': expiration.isoformat()} -        ) - -        # Send a confirmation message to the channel -        await self._send_confirmation( -            ctx, -            on_success="That reminder has been edited successfully!", -            reminder_id=id_, -            delivery_dt=expiration, -        ) - -        await self._reschedule_reminder(reminder) +        await self.edit_reminder(ctx, id_, {'expiration': expiration.isoformat()})      @edit_reminder_group.command(name="content", aliases=("reason",))      async def edit_reminder_content(self, ctx: Context, id_: int, *, content: str) -> None:          """Edit one of your reminder's content.""" -        # Send the request to update the reminder in the database -        reminder = await self.bot.api_client.patch( -            'bot/reminders/' + str(id_), -            json={'content': content} -        ) +        await self.edit_reminder(ctx, id_, {"content": content}) + +    @edit_reminder_group.command(name="mentions", aliases=("pings",)) +    async def edit_reminder_mentions(self, ctx: Context, id_: int, mentions: Greedy[Mentionable]) -> None: +        """Edit one of your reminder's mentions.""" +        # Remove duplicate mentions +        mentions = set(mentions) +        mentions.discard(ctx.author) + +        # Filter mentions to see if the user can mention members/roles +        if not await self.validate_mentions(ctx, mentions): +            return + +        mention_ids = [mention.id for mention in mentions] +        await self.edit_reminder(ctx, id_, {"mentions": mention_ids}) + +    async def edit_reminder(self, ctx: Context, id_: int, payload: dict) -> None: +        """Edits a reminder with the given payload, then sends a confirmation message.""" +        reminder = await self._edit_reminder(id_, payload) -        # Parse the reminder expiration back into a datetime for the confirmation message -        expiration = isoparse(reminder['expiration']).replace(tzinfo=None) +        # Parse the reminder expiration back into a datetime +        expiration = isoparse(reminder["expiration"]).replace(tzinfo=None)          # Send a confirmation message to the channel          await self._send_confirmation( diff --git a/bot/cogs/site.py b/bot/cogs/site.py index e61cd5003..ac29daa1d 100644 --- a/bot/cogs/site.py +++ b/bot/cogs/site.py @@ -33,7 +33,7 @@ class Site(Cog):          embed.colour = Colour.blurple()          embed.description = (              f"[Our official website]({url}) is an open-source community project " -            "created with Python and Flask. It contains information about the server " +            "created with Python and Django. It contains information about the server "              "itself, lets you sign up for upcoming events, has its own wiki, contains "              "a list of valuable learning resources, and much more."          ) diff --git a/bot/cogs/snekbox.py b/bot/cogs/snekbox.py index a2a7574d4..52c8b6f88 100644 --- a/bot/cogs/snekbox.py +++ b/bot/cogs/snekbox.py @@ -202,7 +202,7 @@ class Snekbox(Cog):                  output, paste_link = await self.format_output(results["stdout"])              icon = self.get_status_emoji(results) -            msg = f"{ctx.author.mention} {icon} {msg}.\n\n```py\n{output}\n```" +            msg = f"{ctx.author.mention} {icon} {msg}.\n\n```\n{output}\n```"              if paste_link:                  msg = f"{msg}\nFull output: {paste_link}" @@ -212,7 +212,14 @@ class Snekbox(Cog):              else:                  self.bot.stats.incr("snekbox.python.success") -            response = await ctx.send(msg) +            filter_cog = self.bot.get_cog("Filtering") +            filter_triggered = False +            if filter_cog: +                filter_triggered = await filter_cog.filter_eval(msg, ctx.message) +            if filter_triggered: +                response = await ctx.send("Attempt to circumvent filter detected. Moderator team has been alerted.") +            else: +                response = await ctx.send(msg)              self.bot.loop.create_task(                  wait_for_deletion(response, user_ids=(ctx.author.id,), client=ctx.bot)              ) diff --git a/bot/cogs/source.py b/bot/cogs/source.py new file mode 100644 index 000000000..f1db745cd --- /dev/null +++ b/bot/cogs/source.py @@ -0,0 +1,133 @@ +import inspect +from pathlib import Path +from typing import Optional, Tuple, Union + +from discord import Embed +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import URLs + +SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, str, commands.ExtensionNotLoaded] + + +class SourceConverter(commands.Converter): +    """Convert an argument into a help command, tag, command, or cog.""" + +    async def convert(self, ctx: commands.Context, argument: str) -> SourceType: +        """Convert argument into source object.""" +        if argument.lower().startswith("help"): +            return ctx.bot.help_command + +        cog = ctx.bot.get_cog(argument) +        if cog: +            return cog + +        cmd = ctx.bot.get_command(argument) +        if cmd: +            return cmd + +        tags_cog = ctx.bot.get_cog("Tags") +        show_tag = True + +        if not tags_cog: +            show_tag = False +        elif argument.lower() in tags_cog._cache: +            return argument.lower() + +        raise commands.BadArgument( +            f"Unable to convert `{argument}` to valid command{', tag,' if show_tag else ''} or Cog." +        ) + + +class BotSource(commands.Cog): +    """Displays information about the bot's source code.""" + +    def __init__(self, bot: Bot): +        self.bot = bot + +    @commands.command(name="source", aliases=("src",)) +    async def source_command(self, ctx: commands.Context, *, source_item: SourceConverter = None) -> None: +        """Display information and a GitHub link to the source code of a command, tag, or cog.""" +        if not source_item: +            embed = Embed(title="Bot's GitHub Repository") +            embed.add_field(name="Repository", value=f"[Go to GitHub]({URLs.github_bot_repo})") +            embed.set_thumbnail(url="https://avatars1.githubusercontent.com/u/9919") +            await ctx.send(embed=embed) +            return + +        embed = await self.build_embed(source_item) +        await ctx.send(embed=embed) + +    def get_source_link(self, source_item: SourceType) -> Tuple[str, str, Optional[int]]: +        """Build GitHub link of source item, return this link, file location and first line number.""" +        if isinstance(source_item, commands.HelpCommand): +            src = type(source_item) +            filename = inspect.getsourcefile(src) +        elif isinstance(source_item, commands.Command): +            if source_item.cog_name == "Alias": +                cmd_name = source_item.callback.__name__.replace("_alias", "") +                cmd = self.bot.get_command(cmd_name.replace("_", " ")) +                src = cmd.callback.__code__ +                filename = src.co_filename +            else: +                src = source_item.callback.__code__ +                filename = src.co_filename +        elif isinstance(source_item, str): +            tags_cog = self.bot.get_cog("Tags") +            filename = tags_cog._cache[source_item]["location"] +        else: +            src = type(source_item) +            filename = inspect.getsourcefile(src) + +        if not isinstance(source_item, str): +            lines, first_line_no = inspect.getsourcelines(src) +            lines_extension = f"#L{first_line_no}-L{first_line_no+len(lines)-1}" +        else: +            first_line_no = None +            lines_extension = "" + +        # Handle tag file location differently than others to avoid errors in some cases +        if not first_line_no: +            file_location = Path(filename).relative_to("/bot/") +        else: +            file_location = Path(filename).relative_to(Path.cwd()).as_posix() + +        url = f"{URLs.github_bot_repo}/blob/master/{file_location}{lines_extension}" + +        return url, file_location, first_line_no or None + +    async def build_embed(self, source_object: SourceType) -> Optional[Embed]: +        """Build embed based on source object.""" +        url, location, first_line = self.get_source_link(source_object) + +        if isinstance(source_object, commands.HelpCommand): +            title = "Help Command" +            description = source_object.__doc__.splitlines()[1] +        elif isinstance(source_object, commands.Command): +            if source_object.cog_name == "Alias": +                cmd_name = source_object.callback.__name__.replace("_alias", "") +                cmd = self.bot.get_command(cmd_name.replace("_", " ")) +                description = cmd.short_doc +            else: +                description = source_object.short_doc + +            title = f"Command: {source_object.qualified_name}" +        elif isinstance(source_object, str): +            title = f"Tag: {source_object}" +            description = "" +        else: +            title = f"Cog: {source_object.qualified_name}" +            description = source_object.description.splitlines()[0] + +        embed = Embed(title=title, description=description) +        embed.add_field(name="Source Code", value=f"[Go to GitHub]({url})") +        line_text = f":{first_line}" if first_line else "" +        embed.set_footer(text=f"{location}{line_text}") + +        return embed + + +def setup(bot: Bot) -> None: +    """Load the BotSource cog.""" +    bot.add_cog(BotSource(bot)) diff --git a/bot/cogs/sync/cog.py b/bot/cogs/sync/cog.py index 7cc3726b2..5ace957e7 100644 --- a/bot/cogs/sync/cog.py +++ b/bot/cogs/sync/cog.py @@ -34,18 +34,22 @@ class Sync(Cog):          for syncer in (self.role_syncer, self.user_syncer):              await syncer.sync(guild) -    async def patch_user(self, user_id: int, updated_information: Dict[str, Any]) -> None: +    async def patch_user(self, user_id: int, json: Dict[str, Any], ignore_404: bool = False) -> None:          """Send a PATCH request to partially update a user in the database."""          try: -            await self.bot.api_client.patch(f"bot/users/{user_id}", json=updated_information) +            await self.bot.api_client.patch(f"bot/users/{user_id}", json=json)          except ResponseCodeError as e:              if e.response.status != 404:                  raise -            log.warning("Unable to update user, got 404. Assuming race condition from join event.") +            if not ignore_404: +                log.warning("Unable to update user, got 404. Assuming race condition from join event.")      @Cog.listener()      async def on_guild_role_create(self, role: Role) -> None:          """Adds newly create role to the database table over the API.""" +        if role.guild.id != constants.Guild.id: +            return +          await self.bot.api_client.post(              'bot/roles',              json={ @@ -60,11 +64,17 @@ class Sync(Cog):      @Cog.listener()      async def on_guild_role_delete(self, role: Role) -> None:          """Deletes role from the database when it's deleted from the guild.""" +        if role.guild.id != constants.Guild.id: +            return +          await self.bot.api_client.delete(f'bot/roles/{role.id}')      @Cog.listener()      async def on_guild_role_update(self, before: Role, after: Role) -> None:          """Syncs role with the database if any of the stored attributes were updated.""" +        if after.guild.id != constants.Guild.id: +            return +          was_updated = (              before.name != after.name              or before.colour != after.colour @@ -93,6 +103,9 @@ class Sync(Cog):          previously left), it will update the user's information. If the user is not yet known by          the database, the user is added.          """ +        if member.guild.id != constants.Guild.id: +            return +          packed = {              'discriminator': int(member.discriminator),              'id': member.id, @@ -122,14 +135,20 @@ class Sync(Cog):      @Cog.listener()      async def on_member_remove(self, member: Member) -> None:          """Set the in_guild field to False when a member leaves the guild.""" -        await self.patch_user(member.id, updated_information={"in_guild": False}) +        if member.guild.id != constants.Guild.id: +            return + +        await self.patch_user(member.id, json={"in_guild": False})      @Cog.listener()      async def on_member_update(self, before: Member, after: Member) -> None:          """Update the roles of the member in the database if a change is detected.""" +        if after.guild.id != constants.Guild.id: +            return +          if before.roles != after.roles:              updated_information = {"roles": sorted(role.id for role in after.roles)} -            await self.patch_user(after.id, updated_information=updated_information) +            await self.patch_user(after.id, json=updated_information)      @Cog.listener()      async def on_user_update(self, before: User, after: User) -> None: @@ -140,7 +159,8 @@ class Sync(Cog):                  "name": after.name,                  "discriminator": int(after.discriminator),              } -            await self.patch_user(after.id, updated_information=updated_information) +            # A 404 likely means the user is in another guild. +            await self.patch_user(after.id, json=updated_information, ignore_404=True)      @commands.group(name='sync')      @commands.has_permissions(administrator=True) diff --git a/bot/cogs/sync/syncers.py b/bot/cogs/sync/syncers.py index 536455668..f7ba811bc 100644 --- a/bot/cogs/sync/syncers.py +++ b/bot/cogs/sync/syncers.py @@ -5,6 +5,7 @@ import typing as t  from collections import namedtuple  from functools import partial +import discord  from discord import Guild, HTTPException, Member, Message, Reaction, User  from discord.ext.commands import Context @@ -68,7 +69,11 @@ class Syncer(abc.ABC):                      )                      return None -            message = await channel.send(f"{self._CORE_DEV_MENTION}{msg_content}") +            allowed_roles = [discord.Object(constants.Roles.core_developers)] +            message = await channel.send( +                f"{self._CORE_DEV_MENTION}{msg_content}", +                allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles) +            )          else:              await message.edit(content=msg_content) diff --git a/bot/cogs/tags.py b/bot/cogs/tags.py index 6f03a3475..3d76c5c08 100644 --- a/bot/cogs/tags.py +++ b/bot/cogs/tags.py @@ -47,6 +47,7 @@ class Tags(Cog):                          "description": file.read_text(encoding="utf8"),                      },                      "restricted_to": "developers", +                    "location": f"/bot/{file}"                  }                  # Convert to a list to allow negative indexing. diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py index 6721f0e02..ef979f222 100644 --- a/bot/cogs/token_remover.py +++ b/bot/cogs/token_remover.py @@ -2,20 +2,22 @@ import base64  import binascii  import logging  import re -import struct  import typing as t -from datetime import datetime -from discord import Colour, Message +from discord import Colour, Message, NotFound  from discord.ext.commands import Cog -from discord.utils import snowflake_time +from bot import utils  from bot.bot import Bot  from bot.cogs.moderation import ModLog  from bot.constants import Channels, Colours, Event, Icons  log = logging.getLogger(__name__) +LOG_MESSAGE = ( +    "Censored a seemingly valid token sent by {author} (`{author_id}`) in {channel}, " +    "token was `{user_id}.{timestamp}.{hmac}`" +)  DELETION_MESSAGE_TEMPLATE = (      "Hey {mention}! I noticed you posted a seemingly valid Discord API "      "token in your message and have removed your message. " @@ -25,15 +27,22 @@ DELETION_MESSAGE_TEMPLATE = (      "Feel free to re-post it with the token removed. "      "If you believe this was a mistake, please let us know!"  ) -DISCORD_EPOCH_TIMESTAMP = datetime(2017, 1, 1) +DISCORD_EPOCH = 1_420_070_400  TOKEN_EPOCH = 1_293_840_000 -TOKEN_RE = re.compile( -    r"[^\s\.()\"']+"  # Matches token part 1: The user ID string, encoded as base64 -    r"\."             # Matches a literal dot between the token parts -    r"[^\s\.()\"']+"  # Matches token part 2: The creation timestamp, as an integer -    r"\."             # Matches a literal dot between the token parts -    r"[^\s\.()\"']+"  # Matches token part 3: The HMAC, unused by us, but check that it isn't empty -) + +# Three parts delimited by dots: user ID, creation timestamp, HMAC. +# The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string. +# Each part only matches base64 URL-safe characters. +# Padding has never been observed, but the padding character '=' is matched just in case. +TOKEN_RE = re.compile(r"([\w\-=]+)\.([\w\-=]+)\.([\w\-=]+)", re.ASCII) + + +class Token(t.NamedTuple): +    """A Discord Bot token.""" + +    user_id: str +    timestamp: str +    hmac: str  class TokenRemover(Cog): @@ -54,6 +63,10 @@ class TokenRemover(Cog):          See: https://discordapp.com/developers/docs/reference#snowflakes          """ +        # Ignore DMs; can't delete messages in there anyway. +        if not msg.guild or msg.author.bot: +            return +          found_token = self.find_token_in_message(msg)          if found_token:              await self.take_action(msg, found_token) @@ -65,64 +78,61 @@ class TokenRemover(Cog):          See: https://discordapp.com/developers/docs/reference#snowflakes          """ -        found_token = self.find_token_in_message(after) -        if found_token: -            await self.take_action(after, found_token) +        await self.on_message(after) -    async def take_action(self, msg: Message, found_token: str) -> None: -        """Remove the `msg` containing a token an send a mod_log message.""" -        user_id, creation_timestamp, hmac = found_token.split('.') +    async def take_action(self, msg: Message, found_token: Token) -> None: +        """Remove the `msg` containing the `found_token` and send a mod log message."""          self.mod_log.ignore(Event.message_delete, msg.id) -        await msg.delete() + +        try: +            await msg.delete() +        except NotFound: +            log.debug(f"Failed to remove token in message {msg.id}: message already deleted.") +            return +          await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) -        message = ( -            "Censored a seemingly valid token sent by " -            f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was " -            f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`" -        ) -        log.debug(message) +        log_message = self.format_log_message(msg, found_token) +        log.debug(log_message)          # Send pretty mod log embed to mod-alerts          await self.mod_log.send_log_message(              icon_url=Icons.token_removed,              colour=Colour(Colours.soft_red),              title="Token removed!", -            text=message, +            text=log_message,              thumbnail=msg.author.avatar_url_as(static_format="png"),              channel_id=Channels.mod_alerts,          )          self.bot.stats.incr("tokens.removed_tokens") +    @staticmethod +    def format_log_message(msg: Message, token: Token) -> str: +        """Return the log message to send for `token` being censored in `msg`.""" +        return LOG_MESSAGE.format( +            author=msg.author, +            author_id=msg.author.id, +            channel=msg.channel.mention, +            user_id=token.user_id, +            timestamp=token.timestamp, +            hmac='x' * len(token.hmac), +        ) +      @classmethod -    def find_token_in_message(cls, msg: Message) -> t.Optional[str]: +    def find_token_in_message(cls, msg: Message) -> t.Optional[Token]:          """Return a seemingly valid token found in `msg` or `None` if no token is found.""" -        if msg.author.bot: -            return - -        # Use findall rather than search to guard against method calls prematurely returning the +        # Use finditer rather than search to guard against method calls prematurely returning the          # token check (e.g. `message.channel.send` also matches our token pattern) -        maybe_matches = TOKEN_RE.findall(msg.content) -        for substr in maybe_matches: -            if cls.is_maybe_token(substr): +        for match in TOKEN_RE.finditer(msg.content): +            token = Token(*match.groups()) +            if cls.is_valid_user_id(token.user_id) and cls.is_valid_timestamp(token.timestamp):                  # Short-circuit on first match -                return substr +                return token          # No matching substring          return -    @classmethod -    def is_maybe_token(cls, test_str: str) -> bool: -        """Check the provided string to see if it is a seemingly valid token.""" -        try: -            user_id, creation_timestamp, hmac = test_str.split('.') -        except ValueError: -            return False - -        if cls.is_valid_user_id(user_id) and cls.is_valid_timestamp(creation_timestamp): -            return True -      @staticmethod      def is_valid_user_id(b64_content: str) -> bool:          """ @@ -130,29 +140,41 @@ class TokenRemover(Cog):          See: https://discordapp.com/developers/docs/reference#snowflakes          """ -        b64_content += '=' * (-len(b64_content) % 4) +        b64_content = utils.pad_base64(b64_content)          try: -            content: bytes = base64.b64decode(b64_content) -            return content.decode('utf-8').isnumeric() -        except (binascii.Error, UnicodeDecodeError): +            decoded_bytes = base64.urlsafe_b64decode(b64_content) +            string = decoded_bytes.decode('utf-8') + +            # isdigit on its own would match a lot of other Unicode characters, hence the isascii. +            return string.isascii() and string.isdigit() +        except (binascii.Error, ValueError):              return False      @staticmethod      def is_valid_timestamp(b64_content: str) -> bool:          """ -        Check potential token to see if it contains a valid timestamp. +        Return True if `b64_content` decodes to a valid timestamp. -        See: https://discordapp.com/developers/docs/reference#snowflakes +        If the timestamp is greater than the Discord epoch, it's probably valid. +        See: https://i.imgur.com/7WdehGn.png          """ -        b64_content += '=' * (-len(b64_content) % 4) +        b64_content = utils.pad_base64(b64_content)          try: -            content = base64.urlsafe_b64decode(b64_content) -            snowflake = struct.unpack('i', content)[0] -        except (binascii.Error, struct.error): +            decoded_bytes = base64.urlsafe_b64decode(b64_content) +            timestamp = int.from_bytes(decoded_bytes, byteorder="big") +        except (binascii.Error, ValueError) as e: +            log.debug(f"Failed to decode token timestamp '{b64_content}': {e}") +            return False + +        # Seems like newer tokens don't need the epoch added, but add anyway since an upper bound +        # is not checked. +        if timestamp + TOKEN_EPOCH >= DISCORD_EPOCH: +            return True +        else: +            log.debug(f"Invalid token timestamp '{b64_content}': smaller than Discord epoch")              return False -        return snowflake_time(snowflake + TOKEN_EPOCH) < DISCORD_EPOCH_TIMESTAMP  def setup(bot: Bot) -> None: diff --git a/bot/cogs/utils.py b/bot/cogs/utils.py index 73b4a1c0a..697bf60ce 100644 --- a/bot/cogs/utils.py +++ b/bot/cogs/utils.py @@ -6,7 +6,7 @@ from email.parser import HeaderParser  from io import StringIO  from typing import Tuple, Union -from discord import Colour, Embed +from discord import Colour, Embed, utils  from discord.ext.commands import BadArgument, Cog, Context, command  from bot.bot import Bot @@ -145,7 +145,7 @@ class Utils(Cog):                  u_code = f"\\U{digit:>08}"              url = f"https://www.compart.com/en/unicode/U+{digit:>04}"              name = f"[{unicodedata.name(char, '')}]({url})" -            info = f"`{u_code.ljust(10)}`: {name} - {char}" +            info = f"`{u_code.ljust(10)}`: {name} - {utils.escape_markdown(char)}"              return info, u_code          charlist, rawlist = zip(*(get_info(c) for c in characters)) diff --git a/bot/cogs/watchchannels/bigbrother.py b/bot/cogs/watchchannels/bigbrother.py index 702d371f4..4d27a6333 100644 --- a/bot/cogs/watchchannels/bigbrother.py +++ b/bot/cogs/watchchannels/bigbrother.py @@ -35,14 +35,29 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"):      @bigbrother_group.command(name='watched', aliases=('all', 'list'))      @with_role(*MODERATION_ROLES) -    async def watched_command(self, ctx: Context, update_cache: bool = True) -> None: +    async def watched_command( +        self, ctx: Context, oldest_first: bool = False, update_cache: bool = True +    ) -> None:          """          Shows the users that are currently being monitored by Big Brother. +        The optional kwarg `oldest_first` can be used to order the list by oldest watched. + +        The optional kwarg `update_cache` can be used to update the user +        cache using the API before listing the users. +        """ +        await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + +    @bigbrother_group.command(name='oldest') +    @with_role(*MODERATION_ROLES) +    async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: +        """ +        Shows Big Brother monitored users ordered by oldest watched. +          The optional kwarg `update_cache` can be used to update the user          cache using the API before listing the users.          """ -        await self.list_watched_users(ctx, update_cache) +        await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache)      @bigbrother_group.command(name='watch', aliases=('w',))      @with_role(*MODERATION_ROLES) diff --git a/bot/cogs/watchchannels/talentpool.py b/bot/cogs/watchchannels/talentpool.py index 14547105f..89256e92e 100644 --- a/bot/cogs/watchchannels/talentpool.py +++ b/bot/cogs/watchchannels/talentpool.py @@ -38,14 +38,29 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):      @nomination_group.command(name='watched', aliases=('all', 'list'))      @with_role(*MODERATION_ROLES) -    async def watched_command(self, ctx: Context, update_cache: bool = True) -> None: +    async def watched_command( +        self, ctx: Context, oldest_first: bool = False, update_cache: bool = True +    ) -> None:          """          Shows the users that are currently being monitored in the talent pool. +        The optional kwarg `oldest_first` can be used to order the list by oldest nomination. + +        The optional kwarg `update_cache` can be used to update the user +        cache using the API before listing the users. +        """ +        await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + +    @nomination_group.command(name='oldest') +    @with_role(*MODERATION_ROLES) +    async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: +        """ +        Shows talent pool monitored users ordered by oldest nomination. +          The optional kwarg `update_cache` can be used to update the user          cache using the API before listing the users.          """ -        await self.list_watched_users(ctx, update_cache) +        await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache)      @nomination_group.command(name='watch', aliases=('w', 'add', 'a'))      @with_role(*STAFF_ROLES) @@ -224,7 +239,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):                  Status: **Active**                  Date: {start_date}                  Actor: {actor.mention if actor else actor_id} -                Reason: {textwrap.shorten(nomination_object["reason"], width=200, placeholder="...")} +                Reason: {nomination_object["reason"]}                  Nomination ID: `{nomination_object["id"]}`                  ===============                  """ @@ -237,10 +252,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):                  Status: Inactive                  Date: {start_date}                  Actor: {actor.mention if actor else actor_id} -                Reason: {textwrap.shorten(nomination_object["reason"], width=200, placeholder="...")} +                Reason: {nomination_object["reason"]}                  End date: {end_date} -                Unwatch reason: {textwrap.shorten(nomination_object["end_reason"], width=200, placeholder="...")} +                Unwatch reason: {nomination_object["end_reason"]}                  Nomination ID: `{nomination_object["id"]}`                  ===============                  """ diff --git a/bot/cogs/watchchannels/watchchannel.py b/bot/cogs/watchchannels/watchchannel.py index 436778c46..044077350 100644 --- a/bot/cogs/watchchannels/watchchannel.py +++ b/bot/cogs/watchchannels/watchchannel.py @@ -204,6 +204,7 @@ class WatchChannel(metaclass=CogABCMeta):          embed: Optional[Embed] = None,      ) -> None:          """Sends a message to the webhook with the specified kwargs.""" +        username = messages.sub_clyde(username)          try:              await self.webhook.send(content=content, username=username, avatar_url=avatar_url, embed=embed)          except discord.HTTPException as exc: @@ -286,10 +287,14 @@ class WatchChannel(metaclass=CogABCMeta):          await self.webhook_send(embed=embed, username=msg.author.display_name, avatar_url=msg.author.avatar_url) -    async def list_watched_users(self, ctx: Context, update_cache: bool = True) -> None: +    async def list_watched_users( +        self, ctx: Context, oldest_first: bool = False, update_cache: bool = True +    ) -> None:          """          Gives an overview of the watched user list for this channel. +        The optional kwarg `oldest_first` orders the list by oldest entry. +          The optional kwarg `update_cache` specifies whether the cache should          be refreshed by polling the API.          """ @@ -304,7 +309,11 @@ class WatchChannel(metaclass=CogABCMeta):              time_delta = self._get_time_delta(inserted_at)              lines.append(f"• <@{user_id}> (added {time_delta})") +        if oldest_first: +            lines.reverse() +          lines = lines or ("There's nothing here yet.",) +          embed = Embed(              title=f"{self.__class__.__name__} watched users ({'updated' if update_cache else 'cached'})",              color=Color.blue() diff --git a/bot/cogs/webhook_remover.py b/bot/cogs/webhook_remover.py index 1b5c3f821..543869215 100644 --- a/bot/cogs/webhook_remover.py +++ b/bot/cogs/webhook_remover.py @@ -1,7 +1,7 @@  import logging  import re -from discord import Colour, Message +from discord import Colour, Message, NotFound  from discord.ext.commands import Cog  from bot.bot import Bot @@ -35,7 +35,13 @@ class WebhookRemover(Cog):          """Delete `msg` and send a warning that it contained the Discord webhook `redacted_url`."""          # Don't log this, due internal delete, not by user. Will make different entry.          self.mod_log.ignore(Event.message_delete, msg.id) -        await msg.delete() + +        try: +            await msg.delete() +        except NotFound: +            log.debug(f"Failed to remove webhook in message {msg.id}: message already deleted.") +            return +          await msg.channel.send(ALERT_MESSAGE_TEMPLATE.format(user=msg.author.mention))          message = ( @@ -59,6 +65,10 @@ class WebhookRemover(Cog):      @Cog.listener()      async def on_message(self, msg: Message) -> None:          """Check if a Discord webhook URL is in `message`.""" +        # Ignore DMs; can't delete messages in there anyway. +        if not msg.guild or msg.author.bot: +            return +          matches = WEBHOOK_URL_RE.search(msg.content)          if matches:              await self.delete_and_respond(msg, matches[1] + "xxx") diff --git a/bot/constants.py b/bot/constants.py index b31a9c99e..cf4f3f666 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -226,6 +226,7 @@ class Filter(metaclass=YAMLGetter):      notify_user_domains: bool      ping_everyone: bool +    offensive_msg_delete_days: int      guild_invite_whitelist: List[int]      domain_blacklist: List[str]      word_watchlist: List[str] @@ -271,6 +272,10 @@ class Emojis(metaclass=YAMLGetter):      status_idle: str      status_dnd: str +    incident_actioned: str +    incident_unactioned: str +    incident_investigating: str +      failmail: str      trashcan: str @@ -389,14 +394,17 @@ class Channels(metaclass=YAMLGetter):      attachment_log: int      big_brother_logs: int      bot_commands: int +    cooldown: int      defcon: int      dev_contrib: int      dev_core: int      dev_log: int +    dm_log: int      esoteric: int      helpers: int      how_to_get_help: int      incidents: int +    incidents_archive: int      message_log: int      meta: int      mod_alerts: int @@ -420,11 +428,13 @@ class Webhooks(metaclass=YAMLGetter):      section = "guild"      subsection = "webhooks" -    talent_pool: int      big_brother: int -    reddit: int -    duck_pond: int      dev_log: int +    dm_log: int +    duck_pond: int +    incidents_archive: int +    reddit: int +    talent_pool: int  class Roles(metaclass=YAMLGetter): @@ -458,6 +468,7 @@ class Guild(metaclass=YAMLGetter):      staff_channels: List[int]      staff_roles: List[int] +  class Keys(metaclass=YAMLGetter):      section = "keys" diff --git a/bot/converters.py b/bot/converters.py index 4deb59f87..4a0633951 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -181,8 +181,8 @@ class TagContentConverter(Converter):          return tag_content -class Duration(Converter): -    """Convert duration strings into UTC datetime.datetime objects.""" +class DurationDelta(Converter): +    """Convert duration strings into dateutil.relativedelta.relativedelta objects."""      duration_parser = re.compile(          r"((?P<years>\d+?) ?(years|year|Y|y) ?)?" @@ -194,9 +194,9 @@ class Duration(Converter):          r"((?P<seconds>\d+?) ?(seconds|second|S|s))?"      ) -    async def convert(self, ctx: Context, duration: str) -> datetime: +    async def convert(self, ctx: Context, duration: str) -> relativedelta:          """ -        Converts a `duration` string to a datetime object that's `duration` in the future. +        Converts a `duration` string to a relativedelta object.          The converter supports the following symbols for each unit of time:          - years: `Y`, `y`, `year`, `years` @@ -215,6 +215,20 @@ class Duration(Converter):          duration_dict = {unit: int(amount) for unit, amount in match.groupdict(default=0).items()}          delta = relativedelta(**duration_dict) + +        return delta + + +class Duration(DurationDelta): +    """Convert duration strings into UTC datetime.datetime objects.""" + +    async def convert(self, ctx: Context, duration: str) -> datetime: +        """ +        Converts a `duration` string to a datetime object that's `duration` in the future. + +        The converter supports the same symbols for each unit of time as its parent class. +        """ +        delta = await super().convert(ctx, duration)          now = datetime.utcnow()          try: @@ -316,6 +330,25 @@ def proxy_user(user_id: str) -> discord.Object:      return user +class UserMentionOrID(UserConverter): +    """ +    Converts to a `discord.User`, but only if a mention or userID is provided. + +    Unlike the default `UserConverter`, it does allow conversion from name, or name#descrim. + +    This is useful in cases where that lookup strategy would lead to ambiguity. +    """ + +    async def convert(self, ctx: Context, argument: str) -> discord.User: +        """Convert the `arg` to a `discord.User`.""" +        match = self._get_id_match(argument) or re.match(r'<@!?([0-9]+)>$', argument) + +        if match is not None: +            return await super().convert(ctx, argument) +        else: +            raise BadArgument(f"`{argument}` is not a User mention or a User ID.") + +  class FetchedUser(UserConverter):      """      Converts to a `discord.User` or, if it fails, a `discord.Object`. diff --git a/bot/pagination.py b/bot/pagination.py index 2aa3590ba..94c2d7c0c 100644 --- a/bot/pagination.py +++ b/bot/pagination.py @@ -37,12 +37,19 @@ class LinePaginator(Paginator):          The suffix appended at the end of every page. e.g. three backticks.      * max_size: `int`          The maximum amount of codepoints allowed in a page. +    * scale_to_size: `int` +        The maximum amount of characters a single line can scale up to.      * max_lines: `int`          The maximum amount of lines allowed in a page.      """      def __init__( -        self, prefix: str = '```', suffix: str = '```', max_size: int = 2000, max_lines: int = None +        self, +        prefix: str = '```', +        suffix: str = '```', +        max_size: int = 2000, +        scale_to_size: int = 2000, +        max_lines: t.Optional[int] = None      ) -> None:          """          This function overrides the Paginator.__init__ from inside discord.ext.commands. @@ -51,7 +58,21 @@ class LinePaginator(Paginator):          """          self.prefix = prefix          self.suffix = suffix + +        # Embeds that exceed 2048 characters will result in an HTTPException +        # (Discord API limit), so we've set a limit of 2000 +        if max_size > 2000: +            raise ValueError(f"max_size must be <= 2,000 characters. ({max_size} > 2000)") +          self.max_size = max_size - len(suffix) + +        if scale_to_size < max_size: +            raise ValueError(f"scale_to_size must be >= max_size. ({scale_to_size} < {max_size})") + +        if scale_to_size > 2000: +            raise ValueError(f"scale_to_size must be <= 2,000 characters. ({scale_to_size} > 2000)") + +        self.scale_to_size = scale_to_size - len(suffix)          self.max_lines = max_lines          self._current_page = [prefix]          self._linecount = 0 @@ -62,23 +83,38 @@ class LinePaginator(Paginator):          """          Adds a line to the current page. -        If the line exceeds the `self.max_size` then an exception is raised. +        If a line on a page exceeds `max_size` characters, then `max_size` will go up to +        `scale_to_size` for a single line before creating a new page for the overflow words. If it +        is still exceeded, the excess characters are stored and placed on the next pages unti +        there are none remaining (by word boundary). The line is truncated if `scale_to_size` is +        still exceeded after attempting to continue onto the next page. + +        In the case that the page already contains one or more lines and the new lines would cause +        `max_size` to be exceeded, a new page is created. This is done in order to make a best +        effort to avoid breaking up single lines across pages, while keeping the total length of the +        page at a reasonable size.          This function overrides the `Paginator.add_line` from inside `discord.ext.commands`.          It overrides in order to allow us to configure the maximum number of lines per page.          """ -        if len(line) > self.max_size - len(self.prefix) - 2: -            raise RuntimeError('Line exceeds maximum page size %s' % (self.max_size - len(self.prefix) - 2)) - -        if self.max_lines is not None: -            if self._linecount >= self.max_lines: -                self._linecount = 0 -                self.close_page() - -            self._linecount += 1 -        if self._count + len(line) + 1 > self.max_size: -            self.close_page() +        remaining_words = None +        if len(line) > (max_chars := self.max_size - len(self.prefix) - 2): +            if len(line) > self.scale_to_size: +                line, remaining_words = self._split_remaining_words(line, max_chars) +                if len(line) > self.scale_to_size: +                    log.debug("Could not continue to next page, truncating line.") +                    line = line[:self.scale_to_size] + +        # Check if we should start a new page or continue the line on the current one +        if self.max_lines is not None and self._linecount >= self.max_lines: +            log.debug("max_lines exceeded, creating new page.") +            self._new_page() +        elif self._count + len(line) + 1 > self.max_size and self._linecount > 0: +            log.debug("max_size exceeded on page with lines, creating new page.") +            self._new_page() + +        self._linecount += 1          self._count += len(line) + 1          self._current_page.append(line) @@ -87,6 +123,65 @@ class LinePaginator(Paginator):              self._current_page.append('')              self._count += 1 +        # Start a new page if there were any overflow words +        if remaining_words: +            self._new_page() +            self.add_line(remaining_words) + +    def _new_page(self) -> None: +        """ +        Internal: start a new page for the paginator. + +        This closes the current page and resets the counters for the new page's line count and +        character count. +        """ +        self._linecount = 0 +        self._count = len(self.prefix) + 1 +        self.close_page() + +    def _split_remaining_words(self, line: str, max_chars: int) -> t.Tuple[str, t.Optional[str]]: +        """ +        Internal: split a line into two strings -- reduced_words and remaining_words. + +        reduced_words: the remaining words in `line`, after attempting to remove all words that +            exceed `max_chars` (rounding down to the nearest word boundary). + +        remaining_words: the words in `line` which exceed `max_chars`. This value is None if +            no words could be split from `line`. + +        If there are any remaining_words, an ellipses is appended to reduced_words and a +        continuation header is inserted before remaining_words to visually communicate the line +        continuation. + +        Return a tuple in the format (reduced_words, remaining_words). +        """ +        reduced_words = [] +        remaining_words = [] + +        # "(Continued)" is used on a line by itself to indicate the continuation of last page +        continuation_header = "(Continued)\n-----------\n" +        reduced_char_count = 0 +        is_full = False + +        for word in line.split(" "): +            if not is_full: +                if len(word) + reduced_char_count <= max_chars: +                    reduced_words.append(word) +                    reduced_char_count += len(word) + 1 +                else: +                    # If reduced_words is empty, we were unable to split the words across pages +                    if not reduced_words: +                        return line, None +                    is_full = True +                    remaining_words.append(word) +            else: +                remaining_words.append(word) + +        return ( +            " ".join(reduced_words) + "..." if remaining_words else "", +            continuation_header + " ".join(remaining_words) if remaining_words else None +        ) +      @classmethod      async def paginate(          cls, @@ -97,6 +192,7 @@ class LinePaginator(Paginator):          suffix: str = "",          max_lines: t.Optional[int] = None,          max_size: int = 500, +        scale_to_size: int = 2000,          empty: bool = True,          restrict_to_user: User = None,          timeout: int = 300, @@ -142,7 +238,8 @@ class LinePaginator(Paginator):                  ))              ) -        paginator = cls(prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines) +        paginator = cls(prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines, +                        scale_to_size=scale_to_size)          current_page = 0          if not lines: diff --git a/bot/resources/tags/customcooldown.md b/bot/resources/tags/customcooldown.md new file mode 100644 index 000000000..ac7e70aee --- /dev/null +++ b/bot/resources/tags/customcooldown.md @@ -0,0 +1,20 @@ +**Cooldowns in discord.py** + +Cooldowns can be used in discord.py to rate-limit. In this example, we're using it in an on_message. + +```python +from discord.ext import commands + +message_cooldown = commands.CooldownMapping.from_cooldown(1.0, 60.0, commands.BucketType.user) + +async def on_message(message): +    bucket = message_cooldown.get_bucket(message) +    retry_after = bucket.update_rate_limit() +    if retry_after: +        await message.channel.send(f"Slow down! Try again in {retry_after} seconds.") +    else: +        await message.channel.send("Not ratelimited!") +``` + +`from_cooldown` takes the amount of `update_rate_limit()`s needed to trigger the cooldown, the time in which the cooldown is triggered, and a [`BucketType`](https://discordpy.readthedocs.io/en/latest/ext/commands/api.html#discord.discord.ext.commands.BucketType). diff --git a/bot/resources/tags/or-gotcha.md b/bot/resources/tags/or-gotcha.md index 00c2db1f8..d75a73d78 100644 --- a/bot/resources/tags/or-gotcha.md +++ b/bot/resources/tags/or-gotcha.md @@ -3,7 +3,7 @@ When checking if something is equal to one thing or another, you might think tha  if favorite_fruit == 'grapefruit' or 'lemon':      print("That's a weird favorite fruit to have.")  ``` -After all, that's how you would normally phrase it in plain English. In Python, however, you have to have _complete instructions on both sides of the logical operator_. +While this makes sense in English, it may not behave the way you would expect. In Python, you should have _[complete instructions on both sides of the logical operator](https://docs.python.org/3/reference/expressions.html#boolean-operations)_.  So, if you want to check if something is equal to one thing or another, there are two common ways:  ```py diff --git a/bot/resources/tags/range-len.md b/bot/resources/tags/range-len.md new file mode 100644 index 000000000..65665eccf --- /dev/null +++ b/bot/resources/tags/range-len.md @@ -0,0 +1,11 @@ +Iterating over `range(len(...))` is a common approach to accessing each item in an ordered collection. +```py +for i in range(len(my_list)): +    do_something(my_list[i]) +``` +The pythonic syntax is much simpler, and is guaranteed to produce elements in the same order: +```py +for item in my_list: +    do_something(item) +``` +Python has other solutions for cases when the index itself might be needed. To get the element at the same index from two or more lists, use [zip](https://docs.python.org/3/library/functions.html#zip). To get both the index and the element at that index, use [enumerate](https://docs.python.org/3/library/functions.html#enumerate). diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py index c5a12d5e3..5a6e1811b 100644 --- a/bot/utils/__init__.py +++ b/bot/utils/__init__.py @@ -11,3 +11,8 @@ class CogABCMeta(CogMeta, ABCMeta):      """Metaclass for ABCs meant to be implemented as Cogs."""      pass + + +def pad_base64(data: str) -> str: +    """Return base64 `data` with padding characters to ensure its length is a multiple of 4.""" +    return data + "=" * (-len(data) % 4) diff --git a/bot/utils/messages.py b/bot/utils/messages.py index de8e186f3..670289941 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -1,14 +1,17 @@  import asyncio  import contextlib  import logging +import random +import re  from io import BytesIO  from typing import List, Optional, Sequence, Union -from discord import Client, Embed, File, Member, Message, Reaction, TextChannel, Webhook +from discord import Client, Colour, Embed, File, Member, Message, Reaction, TextChannel, Webhook  from discord.abc import Snowflake  from discord.errors import HTTPException +from discord.ext.commands import Context -from bot.constants import Emojis +from bot.constants import Emojis, NEGATIVE_REPLIES  log = logging.getLogger(__name__) @@ -86,7 +89,7 @@ async def send_attachments(                      else:                          await destination.send(                              file=attachment_file, -                            username=message.author.display_name, +                            username=sub_clyde(message.author.display_name),                              avatar_url=message.author.avatar_url                          )              elif link_large: @@ -97,7 +100,7 @@ async def send_attachments(              if link_large and e.status == 413:                  large.append(attachment)              else: -                log.warning(f"{failure_msg} with status {e.status}.") +                log.warning(f"{failure_msg} with status {e.status}.", exc_info=e)      if link_large and large:          desc = "\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large) @@ -109,8 +112,35 @@ async def send_attachments(          else:              await destination.send(                  embed=embed, -                username=message.author.display_name, +                username=sub_clyde(message.author.display_name),                  avatar_url=message.author.avatar_url              )      return urls + + +def sub_clyde(username: Optional[str]) -> Optional[str]: +    """ +    Replace "e"/"E" in any "clyde" in `username` with a Cyrillic "е"/"E" and return the new string. + +    Discord disallows "clyde" anywhere in the username for webhooks. It will return a 400. +    Return None only if `username` is None. +    """ +    def replace_e(match: re.Match) -> str: +        char = "е" if match[2] == "e" else "Е" +        return match[1] + char + +    if username: +        return re.sub(r"(clyd)(e)", replace_e, username, flags=re.I) +    else: +        return username  # Empty string or None + + +async def send_denial(ctx: Context, reason: str) -> None: +    """Send an embed denying the user with the given reason.""" +    embed = Embed() +    embed.colour = Colour.red() +    embed.title = random.choice(NEGATIVE_REPLIES) +    embed.description = reason + +    await ctx.send(embed=embed) diff --git a/bot/utils/redis_cache.py b/bot/utils/redis_cache.py index de80cee84..58cfe1df5 100644 --- a/bot/utils/redis_cache.py +++ b/bot/utils/redis_cache.py @@ -11,7 +11,7 @@ log = logging.getLogger(__name__)  # Type aliases  RedisKeyType = Union[str, int] -RedisValueType = Union[str, int, float] +RedisValueType = Union[str, int, float, bool]  RedisKeyOrValue = Union[RedisKeyType, RedisValueType]  # Prefix tuples @@ -20,6 +20,7 @@ _VALUE_PREFIXES = (      ("f|", float),      ("i|", int),      ("s|", str), +    ("b|", bool),  )  _KEY_PREFIXES = (      ("i|", int), @@ -47,8 +48,8 @@ class RedisCache:      behaves, and should be familiar to Python users. The biggest difference is that      all the public methods in this class are coroutines, and must be awaited. -    Because of limitations in Redis, this cache will only accept strings, integers and -    floats both for keys and values. +    Because of limitations in Redis, this cache will only accept strings and integers for keys, +    and strings, integers, floats and booleans for values.      Please note that this class MUST be created as a class attribute, and that that class      must also contain an attribute with an instance of our Bot. See `__get__` and `__set_name__` @@ -100,16 +101,7 @@ class RedisCache:      def _set_namespace(self, namespace: str) -> None:          """Try to set the namespace, but do not permit collisions.""" -        # We need a unique namespace, to prevent collisions. This loop -        # will try appending underscores to the end of the namespace until -        # it finds one that is unique. -        # -        # For example, if `john` and `john_`  are both taken, the namespace will -        # be `john__` at the end of this loop. -        while namespace in self._namespaces: -            namespace += "_" - -        log.trace(f"RedisCache setting namespace to {self._namespace}") +        log.trace(f"RedisCache setting namespace to {namespace}")          self._namespaces.append(namespace)          self._namespace = namespace @@ -117,8 +109,15 @@ class RedisCache:      def _to_typestring(key_or_value: RedisKeyOrValue, prefixes: _PrefixTuple) -> str:          """Turn a valid Redis type into a typestring."""          for prefix, _type in prefixes: -            if isinstance(key_or_value, _type): +            # Convert bools into integers before storing them. +            if type(key_or_value) is bool: +                bool_int = int(key_or_value) +                return f"{prefix}{bool_int}" + +            # isinstance is a bad idea here, because isintance(False, int) == True. +            if type(key_or_value) is _type:                  return f"{prefix}{key_or_value}" +          raise TypeError(f"RedisCache._to_typestring only supports the following: {prefixes}.")      @staticmethod @@ -131,6 +130,13 @@ class RedisCache:          # Now we convert our unicode string back into the type it originally was.          for prefix, _type in prefixes:              if key_or_value.startswith(prefix): + +                # For booleans, we need special handling because bool("False") is True. +                if prefix == "b|": +                    value = key_or_value[len(prefix):] +                    return bool(int(value)) + +                # Otherwise we can just convert normally.                  return _type(key_or_value[len(prefix):])          raise TypeError(f"RedisCache._from_typestring only supports the following: {prefixes}.") diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 8b778a093..03f31d78f 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -1,81 +1,126 @@  import asyncio  import contextlib +import inspect  import logging  import typing as t -from abc import abstractmethod +from datetime import datetime  from functools import partial -from bot.utils import CogABCMeta -log = logging.getLogger(__name__) +class Scheduler: +    """ +    Schedule the execution of coroutines and keep track of them. +    When instantiating a Scheduler, a name must be provided. This name is used to distinguish the +    instance's log messages from other instances. Using the name of the class or module containing +    the instance is suggested. -class Scheduler(metaclass=CogABCMeta): -    """Task scheduler.""" +    Coroutines can be scheduled immediately with `schedule` or in the future with `schedule_at` +    or `schedule_later`. A unique ID is required to be given in order to keep track of the +    resulting Tasks. Any scheduled task can be cancelled prematurely using `cancel` by providing +    the same ID used to schedule it.  The `in` operator is supported for checking if a task with a +    given ID is currently scheduled. -    def __init__(self): -        # Keep track of the child cog's name so the logs are clear. -        self.cog_name = self.__class__.__name__ +    Any exception raised in a scheduled task is logged when the task is done. +    """ -        self._scheduled_tasks: t.Dict[t.Hashable, asyncio.Task] = {} +    def __init__(self, name: str): +        self.name = name -    @abstractmethod -    async def _scheduled_task(self, task_object: t.Any) -> None: -        """ -        A coroutine which handles the scheduling. +        self._log = logging.getLogger(f"{__name__}.{name}") +        self._scheduled_tasks: t.Dict[t.Hashable, asyncio.Task] = {} -        This is added to the scheduled tasks, and should wait the task duration, execute the desired -        code, then clean up the task. +    def __contains__(self, task_id: t.Hashable) -> bool: +        """Return True if a task with the given `task_id` is currently scheduled.""" +        return task_id in self._scheduled_tasks -        For example, in Reminders this will wait for the reminder duration, send the reminder, -        then make a site API request to delete the reminder from the database. +    def schedule(self, task_id: t.Hashable, coroutine: t.Coroutine) -> None:          """ +        Schedule the execution of a `coroutine`. -    def schedule_task(self, task_id: t.Hashable, task_data: t.Any) -> None: +        If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This +        prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.          """ -        Schedules a task. +        self._log.trace(f"Scheduling task #{task_id}...") -        `task_data` is passed to the `Scheduler._scheduled_task()` coroutine. -        """ -        log.trace(f"{self.cog_name}: scheduling task #{task_id}...") +        msg = f"Cannot schedule an already started coroutine for #{task_id}" +        assert inspect.getcoroutinestate(coroutine) == "CORO_CREATED", msg          if task_id in self._scheduled_tasks: -            log.debug( -                f"{self.cog_name}: did not schedule task #{task_id}; task was already scheduled." -            ) +            self._log.debug(f"Did not schedule task #{task_id}; task was already scheduled.") +            coroutine.close()              return -        task = asyncio.create_task(self._scheduled_task(task_data)) +        task = asyncio.create_task(coroutine, name=f"{self.name}_{task_id}")          task.add_done_callback(partial(self._task_done_callback, task_id))          self._scheduled_tasks[task_id] = task -        log.debug(f"{self.cog_name}: scheduled task #{task_id} {id(task)}.") +        self._log.debug(f"Scheduled task #{task_id} {id(task)}.") + +    def schedule_at(self, time: datetime, task_id: t.Hashable, coroutine: t.Coroutine) -> None: +        """ +        Schedule `coroutine` to be executed at the given naïve UTC `time`. + +        If `time` is in the past, schedule `coroutine` immediately. + +        If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This +        prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere. +        """ +        delay = (time - datetime.utcnow()).total_seconds() +        if delay > 0: +            coroutine = self._await_later(delay, task_id, coroutine) + +        self.schedule(task_id, coroutine) -    def cancel_task(self, task_id: t.Hashable, ignore_missing: bool = False) -> None: +    def schedule_later(self, delay: t.Union[int, float], task_id: t.Hashable, coroutine: t.Coroutine) -> None:          """ -        Unschedule the task identified by `task_id`. +        Schedule `coroutine` to be executed after the given `delay` number of seconds. -        If `ignore_missing` is True, a warning will not be sent if a task isn't found. +        If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This +        prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.          """ -        log.trace(f"{self.cog_name}: cancelling task #{task_id}...") -        task = self._scheduled_tasks.get(task_id) +        self.schedule(task_id, self._await_later(delay, task_id, coroutine)) -        if not task: -            if not ignore_missing: -                log.warning(f"{self.cog_name}: failed to unschedule {task_id} (no task found).") -            return +    def cancel(self, task_id: t.Hashable) -> None: +        """Unschedule the task identified by `task_id`. Log a warning if the task doesn't exist.""" +        self._log.trace(f"Cancelling task #{task_id}...") -        del self._scheduled_tasks[task_id] -        task.cancel() +        try: +            task = self._scheduled_tasks.pop(task_id) +        except KeyError: +            self._log.warning(f"Failed to unschedule {task_id} (no task found).") +        else: +            task.cancel() -        log.debug(f"{self.cog_name}: unscheduled task #{task_id} {id(task)}.") +            self._log.debug(f"Unscheduled task #{task_id} {id(task)}.")      def cancel_all(self) -> None:          """Unschedule all known tasks.""" -        log.debug(f"{self.cog_name}: unscheduling all tasks") +        self._log.debug("Unscheduling all tasks")          for task_id in self._scheduled_tasks.copy(): -            self.cancel_task(task_id, ignore_missing=True) +            self.cancel(task_id) + +    async def _await_later(self, delay: t.Union[int, float], task_id: t.Hashable, coroutine: t.Coroutine) -> None: +        """Await `coroutine` after the given `delay` number of seconds.""" +        try: +            self._log.trace(f"Waiting {delay} seconds before awaiting coroutine for #{task_id}.") +            await asyncio.sleep(delay) + +            # Use asyncio.shield to prevent the coroutine from cancelling itself. +            self._log.trace(f"Done waiting for #{task_id}; now awaiting the coroutine.") +            await asyncio.shield(coroutine) +        finally: +            # Close it to prevent unawaited coroutine warnings, +            # which would happen if the task was cancelled during the sleep. +            # Only close it if it's not been awaited yet. This check is important because the +            # coroutine may cancel this task, which would also trigger the finally block. +            state = inspect.getcoroutinestate(coroutine) +            if state == "CORO_CREATED": +                self._log.debug(f"Explicitly closing the coroutine for #{task_id}.") +                coroutine.close() +            else: +                self._log.debug(f"Finally block reached for #{task_id}; {state=}")      def _task_done_callback(self, task_id: t.Hashable, done_task: asyncio.Task) -> None:          """ @@ -84,24 +129,24 @@ class Scheduler(metaclass=CogABCMeta):          If `done_task` and the task associated with `task_id` are different, then the latter          will not be deleted. In this case, a new task was likely rescheduled with the same ID.          """ -        log.trace(f"{self.cog_name}: performing done callback for task #{task_id} {id(done_task)}.") +        self._log.trace(f"Performing done callback for task #{task_id} {id(done_task)}.")          scheduled_task = self._scheduled_tasks.get(task_id)          if scheduled_task and done_task is scheduled_task: -            # A task for the ID exists and its the same as the done task. +            # A task for the ID exists and is the same as the done task.              # Since this is the done callback, the task is already done so no need to cancel it. -            log.trace(f"{self.cog_name}: deleting task #{task_id} {id(done_task)}.") +            self._log.trace(f"Deleting task #{task_id} {id(done_task)}.")              del self._scheduled_tasks[task_id]          elif scheduled_task:              # A new task was likely rescheduled with the same ID. -            log.debug( -                f"{self.cog_name}: the scheduled task #{task_id} {id(scheduled_task)} " +            self._log.debug( +                f"The scheduled task #{task_id} {id(scheduled_task)} "                  f"and the done task {id(done_task)} differ."              )          elif not done_task.cancelled(): -            log.warning( -                f"{self.cog_name}: task #{task_id} not found while handling task {id(done_task)}! " +            self._log.warning( +                f"Task #{task_id} not found while handling task {id(done_task)}! "                  f"A task somehow got unscheduled improperly (i.e. deleted but not cancelled)."              ) @@ -109,7 +154,4 @@ class Scheduler(metaclass=CogABCMeta):              exception = done_task.exception()              # Log the exception if one exists.              if exception: -                log.error( -                    f"{self.cog_name}: error in task #{task_id} {id(done_task)}!", -                    exc_info=exception -                ) +                self._log.error(f"Error in task #{task_id} {id(done_task)}!", exc_info=exception) diff --git a/bot/utils/time.py b/bot/utils/time.py index 77060143c..47e49904b 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -20,7 +20,9 @@ def _stringify_time_unit(value: int, unit: str) -> str:      >>> _stringify_time_unit(0, "minutes")      "less than a minute"      """ -    if value == 1: +    if unit == "seconds" and value == 0: +        return "0 seconds" +    elif value == 1:          return f"{value} {unit[:-1]}"      elif value == 0:          return f"less than a {unit[:-1]}" diff --git a/bot/utils/webhooks.py b/bot/utils/webhooks.py new file mode 100644 index 000000000..66f82ec66 --- /dev/null +++ b/bot/utils/webhooks.py @@ -0,0 +1,34 @@ +import logging +from typing import Optional + +import discord +from discord import Embed + +from bot.utils.messages import sub_clyde + +log = logging.getLogger(__name__) + + +async def send_webhook( +        webhook: discord.Webhook, +        content: Optional[str] = None, +        username: Optional[str] = None, +        avatar_url: Optional[str] = None, +        embed: Optional[Embed] = None, +        wait: Optional[bool] = False +) -> discord.Message: +    """ +    Send a message using the provided webhook. + +    This uses sub_clyde() and tries for an HTTPException to ensure it doesn't crash. +    """ +    try: +        return await webhook.send( +            content=content, +            username=sub_clyde(username), +            avatar_url=avatar_url, +            embed=embed, +            wait=wait, +        ) +    except discord.HTTPException: +        log.exception("Failed to send a message to the webhook!") diff --git a/config-default.yml b/config-default.yml index 3a1bdae54..ad6149f6f 100644 --- a/config-default.yml +++ b/config-default.yml @@ -38,6 +38,10 @@ style:          status_dnd:     "<:status_dnd:470326272082313216>"          status_offline: "<:status_offline:470326266537705472>" +        incident_actioned:      "<:incident_actioned:719645530128646266>" +        incident_unactioned:    "<:incident_unactioned:719645583245180960>" +        incident_investigating: "<:incident_investigating:719645658671480924>" +          failmail: "<:failmail:633660039931887616>"          trashcan: "<:trashcan:637136429717389331>" @@ -142,6 +146,7 @@ guild:          # Python Help: Available          how_to_get_help:    704250143020417084 +        cooldown:           720603994149486673          # Logs          attachment_log:     &ATTACH_LOG     649243850006855680 @@ -149,6 +154,7 @@ guild:          mod_log:            &MOD_LOG        282638479504965634          user_log:                           528976905546760203          voice_log:                          640292421988646961 +        dm_log:                             653713721625018428          # Off-topic          off_topic_0:    291284109232308226 @@ -166,12 +172,13 @@ guild:          admin_spam:         &ADMIN_SPAM     563594791770914816          defcon:             &DEFCON         464469101889454091          helpers:            &HELPERS        385474242440986624 +        incidents:                          714214212200562749 +        incidents_archive:                  720668923636351037          mods:               &MODS           305126844661760000          mod_alerts:         &MOD_ALERTS     473092532147060736          mod_spam:           &MOD_SPAM       620607373828030464          organisation:       &ORGANISATION   551789653284356126          staff_lounge:       &STAFF_LOUNGE   464905259261755392 -        incidents:                          714214212200562749          # Voice          admins_voice:       &ADMINS_VOICE   500734494840717332 @@ -244,16 +251,16 @@ guild:          - *HELPERS_ROLE      webhooks: -        talent_pool:                    569145364800602132 -        big_brother:                    569133704568373283 -        reddit:                         635408384794951680 -        duck_pond:                      637821475327311927 -        dev_log:                        680501655111729222 -        python_news:    &PYNEWS_WEBHOOK 704381182279942324 - +        big_brother:                        569133704568373283 +        dev_log:                            680501655111729222 +        dm_log:                             654567640664244225 +        duck_pond:                          637821475327311927 +        incidents_archive:                  720671599790915702 +        python_news:        &PYNEWS_WEBHOOK 704381182279942324 +        reddit:                             635408384794951680 +        talent_pool:                        569145364800602132  filter: -      # What do we filter?      filter_zalgo:       false      filter_invites:     true @@ -268,7 +275,8 @@ filter:      notify_user_domains:     false      # Filter configuration -    ping_everyone: true  # Ping @everyone when we send a mod-alert? +    ping_everyone:             true +    offensive_msg_delete_days: 7     # How many days before deleting an offensive message?      guild_invite_whitelist:          - 280033776820813825  # Functional Programming @@ -293,11 +301,22 @@ filter:          - 172018499005317120  # The Coding Den          - 666560367173828639  # PyWeek          - 702724176489873509  # Microsoft Python +        - 150662382874525696  # Microsoft Community          - 81384788765712384   # Discord API          - 613425648685547541  # Discord Developers          - 185590609631903755  # Blender Hub          - 420324994703163402  # /r/FlutterDev          - 488751051629920277  # Python Atlanta +        - 143867839282020352  # C# +        - 159039020565790721  # Django +        - 238666723824238602  # Programming Discussions +        - 433980600391696384  # JetBrains Community +        - 204621105720328193  # Raspberry Pi +        - 244230771232079873  # Programmers Hangout +        - 239433591950540801  # SpeakJS +        - 174075418410876928  # DevCord +        - 489222168727519232  # Unity +        - 494558898880118785  # Programmer Humor      domain_blacklist:          - pornhub.com @@ -329,6 +348,7 @@ filter:          - ssteam.site          - steamwalletgift.com          - discord.gift +        - lmgtfy.com      word_watchlist:          - goo+ks* diff --git a/tests/bot/cogs/moderation/test_incidents.py b/tests/bot/cogs/moderation/test_incidents.py new file mode 100644 index 000000000..435a1cd51 --- /dev/null +++ b/tests/bot/cogs/moderation/test_incidents.py @@ -0,0 +1,770 @@ +import asyncio +import enum +import logging +import typing as t +import unittest +from unittest.mock import AsyncMock, MagicMock, call, patch + +import aiohttp +import discord + +from bot.cogs.moderation import Incidents, incidents +from bot.constants import Colours +from tests.helpers import ( +    MockAsyncWebhook, +    MockAttachment, +    MockBot, +    MockMember, +    MockMessage, +    MockReaction, +    MockRole, +    MockTextChannel, +    MockUser, +) + + +class MockAsyncIterable: +    """ +    Helper for mocking asynchronous for loops. + +    It does not appear that the `unittest` library currently provides anything that would +    allow us to simply mock an async iterator, such as `discord.TextChannel.history`. + +    We therefore write our own helper to wrap a regular synchronous iterable, and feed +    its values via `__anext__` rather than `__next__`. + +    This class was written for the purposes of testing the `Incidents` cog - it may not +    be generic enough to be placed in the `tests.helpers` module. +    """ + +    def __init__(self, messages: t.Iterable): +        """Take a sync iterable to be wrapped.""" +        self.iter_messages = iter(messages) + +    def __aiter__(self): +        """Return `self` as we provide the `__anext__` method.""" +        return self + +    async def __anext__(self): +        """ +        Feed the next item, or raise `StopAsyncIteration`. + +        Since we're wrapping a sync iterator, it will communicate that it has been depleted +        by raising a `StopIteration`. The `async for` construct does not expect it, and we +        therefore need to substitute it for the appropriate exception type. +        """ +        try: +            return next(self.iter_messages) +        except StopIteration: +            raise StopAsyncIteration + + +class MockSignal(enum.Enum): +    A = "A" +    B = "B" + + +mock_404 = discord.NotFound( +    response=MagicMock(aiohttp.ClientResponse),  # Mock the erroneous response +    message="Not found", +) + + +class TestDownloadFile(unittest.IsolatedAsyncioTestCase): +    """Collection of tests for the `download_file` helper function.""" + +    async def test_download_file_success(self): +        """If `to_file` succeeds, function returns the acquired `discord.File`.""" +        file = MagicMock(discord.File, filename="bigbadlemon.jpg") +        attachment = MockAttachment(to_file=AsyncMock(return_value=file)) + +        acquired_file = await incidents.download_file(attachment) +        self.assertIs(file, acquired_file) + +    async def test_download_file_404(self): +        """If `to_file` encounters a 404, function handles the exception & returns None.""" +        attachment = MockAttachment(to_file=AsyncMock(side_effect=mock_404)) + +        acquired_file = await incidents.download_file(attachment) +        self.assertIsNone(acquired_file) + +    async def test_download_file_fail(self): +        """If `to_file` fails on a non-404 error, function logs the exception & returns None.""" +        arbitrary_error = discord.HTTPException(MagicMock(aiohttp.ClientResponse), "Arbitrary API error") +        attachment = MockAttachment(to_file=AsyncMock(side_effect=arbitrary_error)) + +        with self.assertLogs(logger=incidents.log, level=logging.ERROR): +            acquired_file = await incidents.download_file(attachment) + +        self.assertIsNone(acquired_file) + + +class TestMakeEmbed(unittest.IsolatedAsyncioTestCase): +    """Collection of tests for the `make_embed` helper function.""" + +    async def test_make_embed_actioned(self): +        """Embed is coloured green and footer contains 'Actioned' when `outcome=Signal.ACTIONED`.""" +        embed, file = await incidents.make_embed(MockMessage(), incidents.Signal.ACTIONED, MockMember()) + +        self.assertEqual(embed.colour.value, Colours.soft_green) +        self.assertIn("Actioned", embed.footer.text) + +    async def test_make_embed_not_actioned(self): +        """Embed is coloured red and footer contains 'Rejected' when `outcome=Signal.NOT_ACTIONED`.""" +        embed, file = await incidents.make_embed(MockMessage(), incidents.Signal.NOT_ACTIONED, MockMember()) + +        self.assertEqual(embed.colour.value, Colours.soft_red) +        self.assertIn("Rejected", embed.footer.text) + +    async def test_make_embed_content(self): +        """Incident content appears as embed description.""" +        incident = MockMessage(content="this is an incident") +        embed, file = await incidents.make_embed(incident, incidents.Signal.ACTIONED, MockMember()) + +        self.assertEqual(incident.content, embed.description) + +    async def test_make_embed_with_attachment_succeeds(self): +        """Incident's attachment is downloaded and displayed in the embed's image field.""" +        file = MagicMock(discord.File, filename="bigbadjoe.jpg") +        attachment = MockAttachment(filename="bigbadjoe.jpg") +        incident = MockMessage(content="this is an incident", attachments=[attachment]) + +        # Patch `download_file` to return our `file` +        with patch("bot.cogs.moderation.incidents.download_file", AsyncMock(return_value=file)): +            embed, returned_file = await incidents.make_embed(incident, incidents.Signal.ACTIONED, MockMember()) + +        self.assertIs(file, returned_file) +        self.assertEqual("attachment://bigbadjoe.jpg", embed.image.url) + +    async def test_make_embed_with_attachment_fails(self): +        """Incident's attachment fails to download, proxy url is linked instead.""" +        attachment = MockAttachment(proxy_url="discord.com/bigbadjoe.jpg") +        incident = MockMessage(content="this is an incident", attachments=[attachment]) + +        # Patch `download_file` to return None as if the download failed +        with patch("bot.cogs.moderation.incidents.download_file", AsyncMock(return_value=None)): +            embed, returned_file = await incidents.make_embed(incident, incidents.Signal.ACTIONED, MockMember()) + +        self.assertIsNone(returned_file) + +        # The author name field is simply expected to have something in it, we do not assert the message +        self.assertGreater(len(embed.author.name), 0) +        self.assertEqual(embed.author.url, "discord.com/bigbadjoe.jpg")  # However, it should link the exact url + + +@patch("bot.constants.Channels.incidents", 123) +class TestIsIncident(unittest.TestCase): +    """ +    Collection of tests for the `is_incident` helper function. + +    In `setUp`, we will create a mock message which should qualify as an incident. Each +    test case will then mutate this instance to make it **not** qualify, in various ways. + +    Notice that we patch the #incidents channel id globally for this class. +    """ + +    def setUp(self) -> None: +        """Prepare a mock message which should qualify as an incident.""" +        self.incident = MockMessage( +            channel=MockTextChannel(id=123), +            content="this is an incident", +            author=MockUser(bot=False), +            pinned=False, +        ) + +    def test_is_incident_true(self): +        """Message qualifies as an incident if unchanged.""" +        self.assertTrue(incidents.is_incident(self.incident)) + +    def check_false(self): +        """Assert that `self.incident` does **not** qualify as an incident.""" +        self.assertFalse(incidents.is_incident(self.incident)) + +    def test_is_incident_false_channel(self): +        """Message doesn't qualify if sent outside of #incidents.""" +        self.incident.channel = MockTextChannel(id=456) +        self.check_false() + +    def test_is_incident_false_content(self): +        """Message doesn't qualify if content begins with hash symbol.""" +        self.incident.content = "# this is a comment message" +        self.check_false() + +    def test_is_incident_false_author(self): +        """Message doesn't qualify if author is a bot.""" +        self.incident.author = MockUser(bot=True) +        self.check_false() + +    def test_is_incident_false_pinned(self): +        """Message doesn't qualify if it is pinned.""" +        self.incident.pinned = True +        self.check_false() + + +class TestOwnReactions(unittest.TestCase): +    """Assertions for the `own_reactions` function.""" + +    def test_own_reactions(self): +        """Only bot's own emoji are extracted from the input incident.""" +        reactions = ( +            MockReaction(emoji="A", me=True), +            MockReaction(emoji="B", me=True), +            MockReaction(emoji="C", me=False), +        ) +        message = MockMessage(reactions=reactions) +        self.assertSetEqual(incidents.own_reactions(message), {"A", "B"}) + + +@patch("bot.cogs.moderation.incidents.ALL_SIGNALS", {"A", "B"}) +class TestHasSignals(unittest.TestCase): +    """ +    Assertions for the `has_signals` function. + +    We patch `ALL_SIGNALS` globally. Each test function then patches `own_reactions` +    as appropriate. +    """ + +    def test_has_signals_true(self): +        """True when `own_reactions` returns all emoji in `ALL_SIGNALS`.""" +        message = MockMessage() +        own_reactions = MagicMock(return_value={"A", "B"}) + +        with patch("bot.cogs.moderation.incidents.own_reactions", own_reactions): +            self.assertTrue(incidents.has_signals(message)) + +    def test_has_signals_false(self): +        """False when `own_reactions` does not return all emoji in `ALL_SIGNALS`.""" +        message = MockMessage() +        own_reactions = MagicMock(return_value={"A", "C"}) + +        with patch("bot.cogs.moderation.incidents.own_reactions", own_reactions): +            self.assertFalse(incidents.has_signals(message)) + + +@patch("bot.cogs.moderation.incidents.Signal", MockSignal) +class TestAddSignals(unittest.IsolatedAsyncioTestCase): +    """ +    Assertions for the `add_signals` coroutine. + +    These are all fairly similar and could go into a single test function, but I found the +    patching & sub-testing fairly awkward in that case and decided to split them up +    to avoid unnecessary syntax noise. +    """ + +    def setUp(self): +        """Prepare a mock incident message for tests to use.""" +        self.incident = MockMessage() + +    @patch("bot.cogs.moderation.incidents.own_reactions", MagicMock(return_value=set())) +    async def test_add_signals_missing(self): +        """All emoji are added when none are present.""" +        await incidents.add_signals(self.incident) +        self.incident.add_reaction.assert_has_calls([call("A"), call("B")]) + +    @patch("bot.cogs.moderation.incidents.own_reactions", MagicMock(return_value={"A"})) +    async def test_add_signals_partial(self): +        """Only missing emoji are added when some are present.""" +        await incidents.add_signals(self.incident) +        self.incident.add_reaction.assert_has_calls([call("B")]) + +    @patch("bot.cogs.moderation.incidents.own_reactions", MagicMock(return_value={"A", "B"})) +    async def test_add_signals_present(self): +        """No emoji are added when all are present.""" +        await incidents.add_signals(self.incident) +        self.incident.add_reaction.assert_not_called() + + +class TestIncidents(unittest.IsolatedAsyncioTestCase): +    """ +    Tests for bound methods of the `Incidents` cog. + +    Use this as a base class for `Incidents` tests - it will prepare a fresh instance +    for each test function, but not make any assertions on its own. Tests can mutate +    the instance as they wish. +    """ + +    def setUp(self): +        """ +        Prepare a fresh `Incidents` instance for each test. + +        Note that this will not schedule `crawl_incidents` in the background, as everything +        is being mocked. The `crawl_task` attribute will end up being None. +        """ +        self.cog_instance = Incidents(MockBot()) + + +@patch("asyncio.sleep", AsyncMock())  # Prevent the coro from sleeping to speed up the test +class TestCrawlIncidents(TestIncidents): +    """ +    Tests for the `Incidents.crawl_incidents` coroutine. + +    Apart from `test_crawl_incidents_waits_until_cache_ready`, all tests in this class +    will patch the return values of `is_incident` and `has_signal` and then observe +    whether the `AsyncMock` for `add_signals` was awaited or not. + +    The `add_signals` mock is added by each test separately to ensure it is clean (has not +    been awaited by another test yet). The mock can be reset, but this appears to be the +    cleaner way. + +    For each test, we inject a mock channel with a history of 1 message only (see: `setUp`). +    """ + +    def setUp(self): +        """For each test, ensure `bot.get_channel` returns a channel with 1 arbitrary message.""" +        super().setUp()  # First ensure we get `cog_instance` from parent + +        incidents_history = MagicMock(return_value=MockAsyncIterable([MockMessage()])) +        self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(history=incidents_history)) + +    async def test_crawl_incidents_waits_until_cache_ready(self): +        """ +        The coroutine will await the `wait_until_guild_available` event. + +        Since this task is schedule in the `__init__`, it is critical that it waits for the +        cache to be ready, so that it can safely get the #incidents channel. +        """ +        await self.cog_instance.crawl_incidents() +        self.cog_instance.bot.wait_until_guild_available.assert_awaited() + +    @patch("bot.cogs.moderation.incidents.add_signals", AsyncMock()) +    @patch("bot.cogs.moderation.incidents.is_incident", MagicMock(return_value=False))  # Message doesn't qualify +    @patch("bot.cogs.moderation.incidents.has_signals", MagicMock(return_value=False)) +    async def test_crawl_incidents_noop_if_is_not_incident(self): +        """Signals are not added for a non-incident message.""" +        await self.cog_instance.crawl_incidents() +        incidents.add_signals.assert_not_awaited() + +    @patch("bot.cogs.moderation.incidents.add_signals", AsyncMock()) +    @patch("bot.cogs.moderation.incidents.is_incident", MagicMock(return_value=True))  # Message qualifies +    @patch("bot.cogs.moderation.incidents.has_signals", MagicMock(return_value=True))  # But already has signals +    async def test_crawl_incidents_noop_if_message_already_has_signals(self): +        """Signals are not added for messages which already have them.""" +        await self.cog_instance.crawl_incidents() +        incidents.add_signals.assert_not_awaited() + +    @patch("bot.cogs.moderation.incidents.add_signals", AsyncMock()) +    @patch("bot.cogs.moderation.incidents.is_incident", MagicMock(return_value=True))  # Message qualifies +    @patch("bot.cogs.moderation.incidents.has_signals", MagicMock(return_value=False))  # And doesn't have signals +    async def test_crawl_incidents_add_signals_called(self): +        """Message has signals added as it does not have them yet and qualifies as an incident.""" +        await self.cog_instance.crawl_incidents() +        incidents.add_signals.assert_awaited_once() + + +class TestArchive(TestIncidents): +    """Tests for the `Incidents.archive` coroutine.""" + +    async def test_archive_webhook_not_found(self): +        """ +        Method recovers and returns False when the webhook is not found. + +        Implicitly, this also tests that the error is handled internally and doesn't +        propagate out of the method, which is just as important. +        """ +        self.cog_instance.bot.fetch_webhook = AsyncMock(side_effect=mock_404) +        self.assertFalse( +            await self.cog_instance.archive(incident=MockMessage(), outcome=MagicMock(), actioned_by=MockMember()) +        ) + +    async def test_archive_relays_incident(self): +        """ +        If webhook is found, method relays `incident` properly. + +        This test will assert that the fetched webhook's `send` method is fed the correct arguments, +        and that the `archive` method returns True. +        """ +        webhook = MockAsyncWebhook() +        self.cog_instance.bot.fetch_webhook = AsyncMock(return_value=webhook)  # Patch in our webhook + +        # Define our own `incident` to be archived +        incident = MockMessage( +            content="this is an incident", +            author=MockUser(name="author_name", avatar_url="author_avatar"), +            id=123, +        ) +        built_embed = MagicMock(discord.Embed, id=123)  # We patch `make_embed` to return this + +        with patch("bot.cogs.moderation.incidents.make_embed", AsyncMock(return_value=(built_embed, None))): +            archive_return = await self.cog_instance.archive(incident, MagicMock(value="A"), MockMember()) + +        # Now we check that the webhook was given the correct args, and that `archive` returned True +        webhook.send.assert_called_once_with( +            embed=built_embed, +            username="author_name", +            avatar_url="author_avatar", +            file=None, +        ) +        self.assertTrue(archive_return) + +    async def test_archive_clyde_username(self): +        """ +        The archive webhook username is cleansed using `sub_clyde`. + +        Discord will reject any webhook with "clyde" in the username field, as it impersonates +        the official Clyde bot. Since we do not control what the username will be (the incident +        author name is used), we must ensure the name is cleansed, otherwise the relay may fail. + +        This test assumes the username is passed as a kwarg. If this test fails, please review +        whether the passed argument is being retrieved correctly. +        """ +        webhook = MockAsyncWebhook() +        self.cog_instance.bot.fetch_webhook = AsyncMock(return_value=webhook) + +        message_from_clyde = MockMessage(author=MockUser(name="clyde the great")) +        await self.cog_instance.archive(message_from_clyde, MagicMock(incidents.Signal), MockMember()) + +        self.assertNotIn("clyde", webhook.send.call_args.kwargs["username"]) + + +class TestMakeConfirmationTask(TestIncidents): +    """ +    Tests for the `Incidents.make_confirmation_task` method. + +    Writing tests for this method is difficult, as it mostly just delegates the provided +    information elsewhere. There is very little internal logic. Whether our approach +    works conceptually is difficult to prove using unit tests. +    """ + +    def test_make_confirmation_task_check(self): +        """ +        The internal check will recognize the passed incident. + +        This is a little tricky - we first pass a message with a specific `id` in, and then +        retrieve the built check from the `call_args` of the `wait_for` method. This relies +        on the check being passed as a kwarg. + +        Once the check is retrieved, we assert that it gives True for our incident's `id`, +        and False for any other. + +        If this function begins to fail, first check that `created_check` is being retrieved +        correctly. It should be the function that is built locally in the tested method. +        """ +        self.cog_instance.make_confirmation_task(MockMessage(id=123)) + +        self.cog_instance.bot.wait_for.assert_called_once() +        created_check = self.cog_instance.bot.wait_for.call_args.kwargs["check"] + +        # The `message_id` matches the `id` of our incident +        self.assertTrue(created_check(payload=MagicMock(message_id=123))) + +        # This `message_id` does not match +        self.assertFalse(created_check(payload=MagicMock(message_id=0))) + + +@patch("bot.cogs.moderation.incidents.ALLOWED_ROLES", {1, 2}) +@patch("bot.cogs.moderation.incidents.Incidents.make_confirmation_task", AsyncMock())  # Generic awaitable +class TestProcessEvent(TestIncidents): +    """Tests for the `Incidents.process_event` coroutine.""" + +    async def test_process_event_bad_role(self): +        """The reaction is removed when the author lacks all allowed roles.""" +        incident = MockMessage() +        member = MockMember(roles=[MockRole(id=0)])  # Must have role 1 or 2 + +        await self.cog_instance.process_event("reaction", incident, member) +        incident.remove_reaction.assert_called_once_with("reaction", member) + +    async def test_process_event_bad_emoji(self): +        """ +        The reaction is removed when an invalid emoji is used. + +        This requires that we pass in a `member` with valid roles, as we need the role check +        to succeed. +        """ +        incident = MockMessage() +        member = MockMember(roles=[MockRole(id=1)])  # Member has allowed role + +        await self.cog_instance.process_event("invalid_signal", incident, member) +        incident.remove_reaction.assert_called_once_with("invalid_signal", member) + +    async def test_process_event_no_archive_on_investigating(self): +        """Message is not archived on `Signal.INVESTIGATING`.""" +        with patch("bot.cogs.moderation.incidents.Incidents.archive", AsyncMock()) as mocked_archive: +            await self.cog_instance.process_event( +                reaction=incidents.Signal.INVESTIGATING.value, +                incident=MockMessage(), +                member=MockMember(roles=[MockRole(id=1)]), +            ) + +        mocked_archive.assert_not_called() + +    async def test_process_event_no_delete_if_archive_fails(self): +        """ +        Original message is not deleted when `Incidents.archive` returns False. + +        This is the way of signaling that the relay failed, and we should not remove the original, +        as that would result in losing the incident record. +        """ +        incident = MockMessage() + +        with patch("bot.cogs.moderation.incidents.Incidents.archive", AsyncMock(return_value=False)): +            await self.cog_instance.process_event( +                reaction=incidents.Signal.ACTIONED.value, +                incident=incident, +                member=MockMember(roles=[MockRole(id=1)]) +            ) + +        incident.delete.assert_not_called() + +    async def test_process_event_confirmation_task_is_awaited(self): +        """Task given by `Incidents.make_confirmation_task` is awaited before method exits.""" +        mock_task = AsyncMock() + +        with patch("bot.cogs.moderation.incidents.Incidents.make_confirmation_task", mock_task): +            await self.cog_instance.process_event( +                reaction=incidents.Signal.ACTIONED.value, +                incident=MockMessage(), +                member=MockMember(roles=[MockRole(id=1)]) +            ) + +        mock_task.assert_awaited() + +    async def test_process_event_confirmation_task_timeout_is_handled(self): +        """ +        Confirmation task `asyncio.TimeoutError` is handled gracefully. + +        We have `make_confirmation_task` return a mock with a side effect, and then catch the +        exception should it propagate out of `process_event`. This is so that we can then manually +        fail the test with a more informative message than just the plain traceback. +        """ +        mock_task = AsyncMock(side_effect=asyncio.TimeoutError()) + +        try: +            with patch("bot.cogs.moderation.incidents.Incidents.make_confirmation_task", mock_task): +                await self.cog_instance.process_event( +                    reaction=incidents.Signal.ACTIONED.value, +                    incident=MockMessage(), +                    member=MockMember(roles=[MockRole(id=1)]) +                ) +        except asyncio.TimeoutError: +            self.fail("TimeoutError was not handled gracefully, and propagated out of `process_event`!") + + +class TestResolveMessage(TestIncidents): +    """Tests for the `Incidents.resolve_message` coroutine.""" + +    async def test_resolve_message_pass_message_id(self): +        """Method will call `_get_message` with the passed `message_id`.""" +        await self.cog_instance.resolve_message(123) +        self.cog_instance.bot._connection._get_message.assert_called_once_with(123) + +    async def test_resolve_message_in_cache(self): +        """ +        No API call is made if the queried message exists in the cache. + +        We mock the `_get_message` return value regardless of input. Whether it finds the message +        internally is considered d.py's responsibility, not ours. +        """ +        cached_message = MockMessage(id=123) +        self.cog_instance.bot._connection._get_message = MagicMock(return_value=cached_message) + +        return_value = await self.cog_instance.resolve_message(123) + +        self.assertIs(return_value, cached_message) +        self.cog_instance.bot.get_channel.assert_not_called()  # The `fetch_message` line was never hit + +    async def test_resolve_message_not_in_cache(self): +        """ +        The message is retrieved from the API if it isn't cached. + +        This is desired behaviour for messages which exist, but were sent before the bot's +        current session. +        """ +        self.cog_instance.bot._connection._get_message = MagicMock(return_value=None)  # Cache returns None + +        # API returns our message +        uncached_message = MockMessage() +        fetch_message = AsyncMock(return_value=uncached_message) +        self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(fetch_message=fetch_message)) + +        retrieved_message = await self.cog_instance.resolve_message(123) +        self.assertIs(retrieved_message, uncached_message) + +    async def test_resolve_message_doesnt_exist(self): +        """ +        If the API returns a 404, the function handles it gracefully and returns None. + +        This is an edge-case happening with racing events - event A will relay the message +        to the archive and delete the original. Once event B acquires the `event_lock`, +        it will not find the message in the cache, and will ask the API. +        """ +        self.cog_instance.bot._connection._get_message = MagicMock(return_value=None)  # Cache returns None + +        fetch_message = AsyncMock(side_effect=mock_404) +        self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(fetch_message=fetch_message)) + +        self.assertIsNone(await self.cog_instance.resolve_message(123)) + +    async def test_resolve_message_fetch_fails(self): +        """ +        Non-404 errors are handled, logged & None is returned. + +        In contrast with a 404, this should make an error-level log. We assert that at least +        one such log was made - we do not make any assertions about the log's message. +        """ +        self.cog_instance.bot._connection._get_message = MagicMock(return_value=None)  # Cache returns None + +        arbitrary_error = discord.HTTPException( +            response=MagicMock(aiohttp.ClientResponse), +            message="Arbitrary error", +        ) +        fetch_message = AsyncMock(side_effect=arbitrary_error) +        self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(fetch_message=fetch_message)) + +        with self.assertLogs(logger=incidents.log, level=logging.ERROR): +            self.assertIsNone(await self.cog_instance.resolve_message(123)) + + +@patch("bot.constants.Channels.incidents", 123) +class TestOnRawReactionAdd(TestIncidents): +    """ +    Tests for the `Incidents.on_raw_reaction_add` listener. + +    Writing tests for this listener comes with additional complexity due to the listener +    awaiting the `crawl_task` task. See `asyncSetUp` for further details, which attempts +    to make unit testing this function possible. +    """ + +    def setUp(self): +        """ +        Prepare & assign `payload` attribute. + +        This attribute represents an *ideal* payload which will not be rejected by the +        listener. As each test will receive a fresh instance, it can be mutated to +        observe how the listener's behaviour changes with different attributes on +        the passed payload. +        """ +        super().setUp()  # Ensure `cog_instance` is assigned + +        self.payload = MagicMock( +            discord.RawReactionActionEvent, +            channel_id=123,  # Patched at class level +            message_id=456, +            member=MockMember(bot=False), +            emoji="reaction", +        ) + +    async def asyncSetUp(self):  # noqa: N802 +        """ +        Prepare an empty task and assign it as `crawl_task`. + +        It appears that the `unittest` framework does not provide anything for mocking +        asyncio tasks. An `AsyncMock` instance can be called and then awaited, however, +        it does not provide the `done` method or any other parts of the `asyncio.Task` +        interface. + +        Although we do not need to make any assertions about the task itself while +        testing the listener, the code will still await it and call the `done` method, +        and so we must inject something that will not fail on either action. + +        Note that this is done in an `asyncSetUp`, which runs after `setUp`. +        The justification is that creating an actual task requires the event +        loop to be ready, which is not the case in the `setUp`. +        """ +        mock_task = asyncio.create_task(AsyncMock()())  # Mock async func, then a coro +        self.cog_instance.crawl_task = mock_task + +    async def test_on_raw_reaction_add_wrong_channel(self): +        """ +        Events outside of #incidents will be ignored. + +        We check this by asserting that `resolve_message` was never queried. +        """ +        self.payload.channel_id = 0 +        self.cog_instance.resolve_message = AsyncMock() + +        await self.cog_instance.on_raw_reaction_add(self.payload) +        self.cog_instance.resolve_message.assert_not_called() + +    async def test_on_raw_reaction_add_user_is_bot(self): +        """ +        Events dispatched by bot accounts will be ignored. + +        We check this by asserting that `resolve_message` was never queried. +        """ +        self.payload.member = MockMember(bot=True) +        self.cog_instance.resolve_message = AsyncMock() + +        await self.cog_instance.on_raw_reaction_add(self.payload) +        self.cog_instance.resolve_message.assert_not_called() + +    async def test_on_raw_reaction_add_message_doesnt_exist(self): +        """ +        Listener gracefully handles the case where `resolve_message` gives None. + +        We check this by asserting that `process_event` was never called. +        """ +        self.cog_instance.process_event = AsyncMock() +        self.cog_instance.resolve_message = AsyncMock(return_value=None) + +        await self.cog_instance.on_raw_reaction_add(self.payload) +        self.cog_instance.process_event.assert_not_called() + +    async def test_on_raw_reaction_add_message_is_not_an_incident(self): +        """ +        The event won't be processed if the related message is not an incident. + +        This is an edge-case that can happen if someone manually leaves a reaction +        on a pinned message, or a comment. + +        We check this by asserting that `process_event` was never called. +        """ +        self.cog_instance.process_event = AsyncMock() +        self.cog_instance.resolve_message = AsyncMock(return_value=MockMessage()) + +        with patch("bot.cogs.moderation.incidents.is_incident", MagicMock(return_value=False)): +            await self.cog_instance.on_raw_reaction_add(self.payload) + +        self.cog_instance.process_event.assert_not_called() + +    async def test_on_raw_reaction_add_valid_event_is_processed(self): +        """ +        If the reaction event is valid, it is passed to `process_event`. + +        This is the case when everything goes right: +            * The reaction was placed in #incidents, and not by a bot +            * The message was found successfully +            * The message qualifies as an incident + +        Additionally, we check that all arguments were passed as expected. +        """ +        incident = MockMessage(id=1) + +        self.cog_instance.process_event = AsyncMock() +        self.cog_instance.resolve_message = AsyncMock(return_value=incident) + +        with patch("bot.cogs.moderation.incidents.is_incident", MagicMock(return_value=True)): +            await self.cog_instance.on_raw_reaction_add(self.payload) + +        self.cog_instance.process_event.assert_called_with( +            "reaction",  # Defined in `self.payload` +            incident, +            self.payload.member, +        ) + + +class TestOnMessage(TestIncidents): +    """ +    Tests for the `Incidents.on_message` listener. + +    Notice the decorators mocking the `is_incident` return value. The `is_incidents` +    function is tested in `TestIsIncident` - here we do not worry about it. +    """ + +    @patch("bot.cogs.moderation.incidents.is_incident", MagicMock(return_value=True)) +    async def test_on_message_incident(self): +        """Messages qualifying as incidents are passed to `add_signals`.""" +        incident = MockMessage() + +        with patch("bot.cogs.moderation.incidents.add_signals", AsyncMock()) as mock_add_signals: +            await self.cog_instance.on_message(incident) + +        mock_add_signals.assert_called_once_with(incident) + +    @patch("bot.cogs.moderation.incidents.is_incident", MagicMock(return_value=False)) +    async def test_on_message_non_incident(self): +        """Messages not qualifying as incidents are ignored.""" +        with patch("bot.cogs.moderation.incidents.add_signals", AsyncMock()) as mock_add_signals: +            await self.cog_instance.on_message(MockMessage()) + +        mock_add_signals.assert_not_called() diff --git a/tests/bot/cogs/moderation/test_silence.py b/tests/bot/cogs/moderation/test_silence.py index 3fd149f04..ab3d0742a 100644 --- a/tests/bot/cogs/moderation/test_silence.py +++ b/tests/bot/cogs/moderation/test_silence.py @@ -127,10 +127,20 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase):              self.ctx.reset_mock()      async def test_unsilence_sent_correct_discord_message(self): -        """Proper reply after a successful unsilence.""" -        with mock.patch.object(self.cog, "_unsilence", return_value=True): -            await self.cog.unsilence.callback(self.cog, self.ctx) -            self.ctx.send.assert_called_once_with(f"{Emojis.check_mark} unsilenced current channel.") +        """Check if proper message was sent when unsilencing channel.""" +        test_cases = ( +            (True, f"{Emojis.check_mark} unsilenced current channel."), +            (False, f"{Emojis.cross_mark} current channel was not silenced.") +        ) +        for _unsilence_patch_return, result_message in test_cases: +            with self.subTest( +                starting_silenced_state=_unsilence_patch_return, +                result_message=result_message +            ): +                with mock.patch.object(self.cog, "_unsilence", return_value=_unsilence_patch_return): +                    await self.cog.unsilence.callback(self.cog, self.ctx) +                    self.ctx.send.assert_called_once_with(result_message) +            self.ctx.reset_mock()      async def test_silence_private_for_false(self):          """Permissions are not set and `False` is returned in an already silenced channel.""" diff --git a/tests/bot/cogs/sync/test_cog.py b/tests/bot/cogs/sync/test_cog.py index 14fd909c4..120bc991d 100644 --- a/tests/bot/cogs/sync/test_cog.py +++ b/tests/bot/cogs/sync/test_cog.py @@ -131,6 +131,15 @@ class SyncCogListenerTests(SyncCogTestCase):          super().setUp()          self.cog.patch_user = mock.AsyncMock(spec_set=self.cog.patch_user) +        self.guild_id_patcher = mock.patch("bot.cogs.sync.cog.constants.Guild.id", 5) +        self.guild_id = self.guild_id_patcher.start() + +        self.guild = helpers.MockGuild(id=self.guild_id) +        self.other_guild = helpers.MockGuild(id=0) + +    def tearDown(self): +        self.guild_id_patcher.stop() +      async def test_sync_cog_on_guild_role_create(self):          """A POST request should be sent with the new role's data."""          self.assertTrue(self.cog.on_guild_role_create.__cog_listener__) @@ -142,20 +151,32 @@ class SyncCogListenerTests(SyncCogTestCase):              "permissions": 8,              "position": 23,          } -        role = helpers.MockRole(**role_data) +        role = helpers.MockRole(**role_data, guild=self.guild)          await self.cog.on_guild_role_create(role)          self.bot.api_client.post.assert_called_once_with("bot/roles", json=role_data) +    async def test_sync_cog_on_guild_role_create_ignores_guilds(self): +        """Events from other guilds should be ignored.""" +        role = helpers.MockRole(guild=self.other_guild) +        await self.cog.on_guild_role_create(role) +        self.bot.api_client.post.assert_not_awaited() +      async def test_sync_cog_on_guild_role_delete(self):          """A DELETE request should be sent."""          self.assertTrue(self.cog.on_guild_role_delete.__cog_listener__) -        role = helpers.MockRole(id=99) +        role = helpers.MockRole(id=99, guild=self.guild)          await self.cog.on_guild_role_delete(role)          self.bot.api_client.delete.assert_called_once_with("bot/roles/99") +    async def test_sync_cog_on_guild_role_delete_ignores_guilds(self): +        """Events from other guilds should be ignored.""" +        role = helpers.MockRole(guild=self.other_guild) +        await self.cog.on_guild_role_delete(role) +        self.bot.api_client.delete.assert_not_awaited() +      async def test_sync_cog_on_guild_role_update(self):          """A PUT request should be sent if the colour, name, permissions, or position changes."""          self.assertTrue(self.cog.on_guild_role_update.__cog_listener__) @@ -180,8 +201,8 @@ class SyncCogListenerTests(SyncCogTestCase):                      after_role_data = role_data.copy()                      after_role_data[attribute] = 876 -                    before_role = helpers.MockRole(**role_data) -                    after_role = helpers.MockRole(**after_role_data) +                    before_role = helpers.MockRole(**role_data, guild=self.guild) +                    after_role = helpers.MockRole(**after_role_data, guild=self.guild)                      await self.cog.on_guild_role_update(before_role, after_role) @@ -193,31 +214,43 @@ class SyncCogListenerTests(SyncCogTestCase):                      else:                          self.bot.api_client.put.assert_not_called() +    async def test_sync_cog_on_guild_role_update_ignores_guilds(self): +        """Events from other guilds should be ignored.""" +        role = helpers.MockRole(guild=self.other_guild) +        await self.cog.on_guild_role_update(role, role) +        self.bot.api_client.put.assert_not_awaited() +      async def test_sync_cog_on_member_remove(self): -        """Member should patched to set in_guild as False.""" +        """Member should be patched to set in_guild as False."""          self.assertTrue(self.cog.on_member_remove.__cog_listener__) -        member = helpers.MockMember() +        member = helpers.MockMember(guild=self.guild)          await self.cog.on_member_remove(member)          self.cog.patch_user.assert_called_once_with(              member.id, -            updated_information={"in_guild": False} +            json={"in_guild": False}          ) +    async def test_sync_cog_on_member_remove_ignores_guilds(self): +        """Events from other guilds should be ignored.""" +        member = helpers.MockMember(guild=self.other_guild) +        await self.cog.on_member_remove(member) +        self.cog.patch_user.assert_not_awaited() +      async def test_sync_cog_on_member_update_roles(self):          """Members should be patched if their roles have changed."""          self.assertTrue(self.cog.on_member_update.__cog_listener__)          # Roles are intentionally unsorted.          before_roles = [helpers.MockRole(id=12), helpers.MockRole(id=30), helpers.MockRole(id=20)] -        before_member = helpers.MockMember(roles=before_roles) -        after_member = helpers.MockMember(roles=before_roles[1:]) +        before_member = helpers.MockMember(roles=before_roles, guild=self.guild) +        after_member = helpers.MockMember(roles=before_roles[1:], guild=self.guild)          await self.cog.on_member_update(before_member, after_member)          data = {"roles": sorted(role.id for role in after_member.roles)} -        self.cog.patch_user.assert_called_once_with(after_member.id, updated_information=data) +        self.cog.patch_user.assert_called_once_with(after_member.id, json=data)      async def test_sync_cog_on_member_update_other(self):          """Members should not be patched if other attributes have changed.""" @@ -233,13 +266,19 @@ class SyncCogListenerTests(SyncCogTestCase):              with self.subTest(attribute=attribute):                  self.cog.patch_user.reset_mock() -                before_member = helpers.MockMember(**{attribute: old_value}) -                after_member = helpers.MockMember(**{attribute: new_value}) +                before_member = helpers.MockMember(**{attribute: old_value}, guild=self.guild) +                after_member = helpers.MockMember(**{attribute: new_value}, guild=self.guild)                  await self.cog.on_member_update(before_member, after_member)                  self.cog.patch_user.assert_not_called() +    async def test_sync_cog_on_member_update_ignores_guilds(self): +        """Events from other guilds should be ignored.""" +        member = helpers.MockMember(guild=self.other_guild) +        await self.cog.on_member_update(member, member) +        self.cog.patch_user.assert_not_awaited() +      async def test_sync_cog_on_user_update(self):          """A user should be patched only if the name, discriminator, or avatar changes."""          self.assertTrue(self.cog.on_user_update.__cog_listener__) @@ -272,12 +311,15 @@ class SyncCogListenerTests(SyncCogTestCase):                      # Don't care if *all* keys are present; only the changed one is required                      call_args = self.cog.patch_user.call_args -                    self.assertEqual(call_args[0][0], after_user.id) -                    self.assertIn("updated_information", call_args[1]) +                    self.assertEqual(call_args.args[0], after_user.id) +                    self.assertIn("json", call_args.kwargs) + +                    self.assertIn("ignore_404", call_args.kwargs) +                    self.assertTrue(call_args.kwargs["ignore_404"]) -                    updated_information = call_args[1]["updated_information"] -                    self.assertIn(api_field, updated_information) -                    self.assertEqual(updated_information[api_field], api_value) +                    json = call_args.kwargs["json"] +                    self.assertIn(api_field, json) +                    self.assertEqual(json[api_field], api_value)                  else:                      self.cog.patch_user.assert_not_called() @@ -290,6 +332,7 @@ class SyncCogListenerTests(SyncCogTestCase):          member = helpers.MockMember(              discriminator="1234",              roles=[helpers.MockRole(id=22), helpers.MockRole(id=12)], +            guild=self.guild,          )          data = { @@ -334,6 +377,13 @@ class SyncCogListenerTests(SyncCogTestCase):          self.bot.api_client.post.assert_not_called() +    async def test_sync_cog_on_member_join_ignores_guilds(self): +        """Events from other guilds should be ignored.""" +        member = helpers.MockMember(guild=self.other_guild) +        await self.cog.on_member_join(member) +        self.bot.api_client.post.assert_not_awaited() +        self.bot.api_client.put.assert_not_awaited() +  class SyncCogCommandTests(SyncCogTestCase, CommandTestCase):      """Tests for the commands in the Sync cog.""" diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py index a8c0107c6..cfe10aebf 100644 --- a/tests/bot/cogs/test_duck_pond.py +++ b/tests/bot/cogs/test_duck_pond.py @@ -129,38 +129,6 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase):              ):                  self.assertEqual(expected_return, actual_return) -    def test_send_webhook_correctly_passes_on_arguments(self): -        """The `send_webhook` method should pass the arguments to the webhook correctly.""" -        self.cog.webhook = helpers.MockAsyncWebhook() - -        content = "fake content" -        username = "fake username" -        avatar_url = "fake avatar_url" -        embed = "fake embed" - -        asyncio.run(self.cog.send_webhook(content, username, avatar_url, embed)) - -        self.cog.webhook.send.assert_called_once_with( -            content=content, -            username=username, -            avatar_url=avatar_url, -            embed=embed -        ) - -    def test_send_webhook_logs_when_sending_message_fails(self): -        """The `send_webhook` method should catch a `discord.HTTPException` and log accordingly.""" -        self.cog.webhook = helpers.MockAsyncWebhook() -        self.cog.webhook.send.side_effect = discord.HTTPException(response=MagicMock(), message="Something failed.") - -        log = logging.getLogger('bot.cogs.duck_pond') -        with self.assertLogs(logger=log, level=logging.ERROR) as log_watcher: -            asyncio.run(self.cog.send_webhook()) - -        self.assertEqual(len(log_watcher.records), 1) - -        record = log_watcher.records[0] -        self.assertEqual(record.levelno, logging.ERROR) -      def _get_reaction(          self,          emoji: typing.Union[str, helpers.MockEmoji], @@ -280,16 +248,20 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase):      async def test_relay_message_correctly_relays_content_and_attachments(self):          """The `relay_message` method should correctly relay message content and attachments.""" -        send_webhook_path = f"{MODULE_PATH}.DuckPond.send_webhook" +        send_webhook_path = f"{MODULE_PATH}.send_webhook"          send_attachments_path = f"{MODULE_PATH}.send_attachments" +        author = MagicMock( +            display_name="x", +            avatar_url="https://" +        )          self.cog.webhook = helpers.MockAsyncWebhook()          test_values = ( -            (helpers.MockMessage(clean_content="", attachments=[]), False, False), -            (helpers.MockMessage(clean_content="message", attachments=[]), True, False), -            (helpers.MockMessage(clean_content="", attachments=["attachment"]), False, True), -            (helpers.MockMessage(clean_content="message", attachments=["attachment"]), True, True), +            (helpers.MockMessage(author=author, clean_content="", attachments=[]), False, False), +            (helpers.MockMessage(author=author, clean_content="message", attachments=[]), True, False), +            (helpers.MockMessage(author=author, clean_content="", attachments=["attachment"]), False, True), +            (helpers.MockMessage(author=author, clean_content="message", attachments=["attachment"]), True, True),          )          for message, expect_webhook_call, expect_attachment_call in test_values: @@ -314,14 +286,14 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase):          for side_effect in side_effects:  # pragma: no cover              send_attachments.side_effect = side_effect -            with patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=AsyncMock) as send_webhook: +            with patch(f"{MODULE_PATH}.send_webhook", new_callable=AsyncMock) as send_webhook:                  with self.subTest(side_effect=type(side_effect).__name__):                      with self.assertNotLogs(logger=log, level=logging.ERROR):                          await self.cog.relay_message(message)                      self.assertEqual(send_webhook.call_count, 2) -    @patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=AsyncMock) +    @patch(f"{MODULE_PATH}.send_webhook", new_callable=AsyncMock)      @patch(f"{MODULE_PATH}.send_attachments", new_callable=AsyncMock)      async def test_relay_message_handles_attachment_http_error(self, send_attachments, send_webhook):          """The `relay_message` method should handle irretrievable attachments.""" @@ -337,6 +309,7 @@ class DuckPondTests(base.LoggingTestsMixin, unittest.IsolatedAsyncioTestCase):                  await self.cog.relay_message(message)              send_webhook.assert_called_once_with( +                webhook=self.cog.webhook,                  content=message.clean_content,                  username=message.author.display_name,                  avatar_url=message.author.avatar_url diff --git a/tests/bot/cogs/test_logging.py b/tests/bot/cogs/test_logging.py new file mode 100644 index 000000000..8a18fdcd6 --- /dev/null +++ b/tests/bot/cogs/test_logging.py @@ -0,0 +1,32 @@ +import unittest +from unittest.mock import patch + +from bot import constants +from bot.cogs.logging import Logging +from tests.helpers import MockBot, MockTextChannel + + +class LoggingTests(unittest.IsolatedAsyncioTestCase): +    """Test cases for connected login.""" + +    def setUp(self): +        self.bot = MockBot() +        self.cog = Logging(self.bot) +        self.dev_log = MockTextChannel(id=1234, name="dev-log") + +    @patch("bot.cogs.logging.DEBUG_MODE", False) +    async def test_debug_mode_false(self): +        """Should send connected message to dev-log.""" +        self.bot.get_channel.return_value = self.dev_log + +        await self.cog.startup_greeting() +        self.bot.wait_until_guild_available.assert_awaited_once_with() +        self.bot.get_channel.assert_called_once_with(constants.Channels.dev_log) +        self.dev_log.send.assert_awaited_once() + +    @patch("bot.cogs.logging.DEBUG_MODE", True) +    async def test_debug_mode_true(self): +        """Should not send anything to dev-log.""" +        await self.cog.startup_greeting() +        self.bot.wait_until_guild_available.assert_awaited_once_with() +        self.bot.get_channel.assert_not_called() diff --git a/tests/bot/cogs/test_slowmode.py b/tests/bot/cogs/test_slowmode.py new file mode 100644 index 000000000..f442814c8 --- /dev/null +++ b/tests/bot/cogs/test_slowmode.py @@ -0,0 +1,111 @@ +import unittest +from unittest import mock + +from dateutil.relativedelta import relativedelta + +from bot.cogs.moderation.slowmode import Slowmode +from bot.constants import Emojis +from tests.helpers import MockBot, MockContext, MockTextChannel + + +class SlowmodeTests(unittest.IsolatedAsyncioTestCase): + +    def setUp(self) -> None: +        self.bot = MockBot() +        self.cog = Slowmode(self.bot) +        self.ctx = MockContext() + +    async def test_get_slowmode_no_channel(self) -> None: +        """Get slowmode without a given channel.""" +        self.ctx.channel = MockTextChannel(name='python-general', slowmode_delay=5) + +        await self.cog.get_slowmode(self.cog, self.ctx, None) +        self.ctx.send.assert_called_once_with("The slowmode delay for #python-general is 5 seconds.") + +    async def test_get_slowmode_with_channel(self) -> None: +        """Get slowmode with a given channel.""" +        text_channel = MockTextChannel(name='python-language', slowmode_delay=2) + +        await self.cog.get_slowmode(self.cog, self.ctx, text_channel) +        self.ctx.send.assert_called_once_with('The slowmode delay for #python-language is 2 seconds.') + +    async def test_set_slowmode_no_channel(self) -> None: +        """Set slowmode without a given channel.""" +        test_cases = ( +            ('helpers', 23, True, f'{Emojis.check_mark} The slowmode delay for #helpers is now 23 seconds.'), +            ('mods', 76526, False, f'{Emojis.cross_mark} The slowmode delay must be between 0 and 6 hours.'), +            ('admins', 97, True, f'{Emojis.check_mark} The slowmode delay for #admins is now 1 minute and 37 seconds.') +        ) + +        for channel_name, seconds, edited, result_msg in test_cases: +            with self.subTest( +                channel_mention=channel_name, +                seconds=seconds, +                edited=edited, +                result_msg=result_msg +            ): +                self.ctx.channel = MockTextChannel(name=channel_name) + +                await self.cog.set_slowmode(self.cog, self.ctx, None, relativedelta(seconds=seconds)) + +                if edited: +                    self.ctx.channel.edit.assert_awaited_once_with(slowmode_delay=float(seconds)) +                else: +                    self.ctx.channel.edit.assert_not_called() + +                self.ctx.send.assert_called_once_with(result_msg) + +            self.ctx.reset_mock() + +    async def test_set_slowmode_with_channel(self) -> None: +        """Set slowmode with a given channel.""" +        test_cases = ( +            ('bot-commands', 12, True, f'{Emojis.check_mark} The slowmode delay for #bot-commands is now 12 seconds.'), +            ('mod-spam', 21, True, f'{Emojis.check_mark} The slowmode delay for #mod-spam is now 21 seconds.'), +            ('admin-spam', 4323598, False, f'{Emojis.cross_mark} The slowmode delay must be between 0 and 6 hours.') +        ) + +        for channel_name, seconds, edited, result_msg in test_cases: +            with self.subTest( +                channel_mention=channel_name, +                seconds=seconds, +                edited=edited, +                result_msg=result_msg +            ): +                text_channel = MockTextChannel(name=channel_name) + +                await self.cog.set_slowmode(self.cog, self.ctx, text_channel, relativedelta(seconds=seconds)) + +                if edited: +                    text_channel.edit.assert_awaited_once_with(slowmode_delay=float(seconds)) +                else: +                    text_channel.edit.assert_not_called() + +                self.ctx.send.assert_called_once_with(result_msg) + +            self.ctx.reset_mock() + +    async def test_reset_slowmode_no_channel(self) -> None: +        """Reset slowmode without a given channel.""" +        self.ctx.channel = MockTextChannel(name='careers', slowmode_delay=6) + +        await self.cog.reset_slowmode(self.cog, self.ctx, None) +        self.ctx.send.assert_called_once_with( +            f'{Emojis.check_mark} The slowmode delay for #careers has been reset to 0 seconds.' +        ) + +    async def test_reset_slowmode_with_channel(self) -> None: +        """Reset slowmode with a given channel.""" +        text_channel = MockTextChannel(name='meta', slowmode_delay=1) + +        await self.cog.reset_slowmode(self.cog, self.ctx, text_channel) +        self.ctx.send.assert_called_once_with( +            f'{Emojis.check_mark} The slowmode delay for #meta has been reset to 0 seconds.' +        ) + +    @mock.patch("bot.cogs.moderation.slowmode.with_role_check") +    @mock.patch("bot.cogs.moderation.slowmode.MODERATION_ROLES", new=(1, 2, 3)) +    def test_cog_check(self, role_check): +        """Role check is called with `MODERATION_ROLES`""" +        self.cog.cog_check(self.ctx) +        role_check.assert_called_once_with(self.ctx, *(1, 2, 3)) diff --git a/tests/bot/cogs/test_snekbox.py b/tests/bot/cogs/test_snekbox.py index cf9adbee0..343e37db9 100644 --- a/tests/bot/cogs/test_snekbox.py +++ b/tests/bot/cogs/test_snekbox.py @@ -233,9 +233,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):          self.cog.get_status_emoji = MagicMock(return_value=':yay!:')          self.cog.format_output = AsyncMock(return_value=('[No output]', None)) +        mocked_filter_cog = MagicMock() +        mocked_filter_cog.filter_eval = AsyncMock(return_value=False) +        self.bot.get_cog.return_value = mocked_filter_cog +          await self.cog.send_eval(ctx, 'MyAwesomeCode')          ctx.send.assert_called_once_with( -            '@LemonLemonishBeard#0042 :yay!: Return code 0.\n\n```py\n[No output]\n```' +            '@LemonLemonishBeard#0042 :yay!: Return code 0.\n\n```\n[No output]\n```'          )          self.cog.post_eval.assert_called_once_with('MyAwesomeCode')          self.cog.get_status_emoji.assert_called_once_with({'stdout': '', 'returncode': 0}) @@ -254,10 +258,14 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):          self.cog.get_status_emoji = MagicMock(return_value=':yay!:')          self.cog.format_output = AsyncMock(return_value=('Way too long beard', 'lookatmybeard.com')) +        mocked_filter_cog = MagicMock() +        mocked_filter_cog.filter_eval = AsyncMock(return_value=False) +        self.bot.get_cog.return_value = mocked_filter_cog +          await self.cog.send_eval(ctx, 'MyAwesomeCode')          ctx.send.assert_called_once_with(              '@LemonLemonishBeard#0042 :yay!: Return code 0.' -            '\n\n```py\nWay too long beard\n```\nFull output: lookatmybeard.com' +            '\n\n```\nWay too long beard\n```\nFull output: lookatmybeard.com'          )          self.cog.post_eval.assert_called_once_with('MyAwesomeCode')          self.cog.get_status_emoji.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) @@ -275,9 +283,13 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase):          self.cog.get_status_emoji = MagicMock(return_value=':nope!:')          self.cog.format_output = AsyncMock()  # This function isn't called +        mocked_filter_cog = MagicMock() +        mocked_filter_cog.filter_eval = AsyncMock(return_value=False) +        self.bot.get_cog.return_value = mocked_filter_cog +          await self.cog.send_eval(ctx, 'MyAwesomeCode')          ctx.send.assert_called_once_with( -            '@LemonLemonishBeard#0042 :nope!: Return code 127.\n\n```py\nBeard got stuck in the eval\n```' +            '@LemonLemonishBeard#0042 :nope!: Return code 127.\n\n```\nBeard got stuck in the eval\n```'          )          self.cog.post_eval.assert_called_once_with('MyAwesomeCode')          self.cog.get_status_emoji.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py index 33d1ec170..3349caa73 100644 --- a/tests/bot/cogs/test_token_remover.py +++ b/tests/bot/cogs/test_token_remover.py @@ -1,56 +1,89 @@ -import asyncio -import logging  import unittest -from unittest.mock import AsyncMock, MagicMock +from re import Match +from unittest import mock +from unittest.mock import MagicMock -from discord import Colour +from discord import Colour, NotFound -from bot.cogs.token_remover import ( -    DELETION_MESSAGE_TEMPLATE, -    TokenRemover, -    setup as setup_cog, -) -from bot.constants import Channels, Colours, Event, Icons -from tests.helpers import MockBot, MockMessage +from bot import constants +from bot.cogs import token_remover +from bot.cogs.moderation import ModLog +from bot.cogs.token_remover import Token, TokenRemover +from tests.helpers import MockBot, MockMessage, autospec -class TokenRemoverTests(unittest.TestCase): +class TokenRemoverTests(unittest.IsolatedAsyncioTestCase):      """Tests the `TokenRemover` cog."""      def setUp(self):          """Adds the cog, a bot, and a message to the instance for usage in tests."""          self.bot = MockBot() -        self.bot.get_cog.return_value = MagicMock() -        self.bot.get_cog.return_value.send_log_message = AsyncMock()          self.cog = TokenRemover(bot=self.bot) -        self.msg = MockMessage(id=555, content='') -        self.msg.author.__str__ = MagicMock() -        self.msg.author.__str__.return_value = 'lemon' -        self.msg.author.bot = False -        self.msg.author.avatar_url_as.return_value = 'picture-lemon.png' -        self.msg.author.id = 42 -        self.msg.author.mention = '@lemon' +        self.msg = MockMessage(id=555, content="hello world")          self.msg.channel.mention = "#lemonade-stand" +        self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) +        self.msg.author.avatar_url_as.return_value = "picture-lemon.png" -    def test_is_valid_user_id_is_true_for_numeric_content(self): -        """A string decoding to numeric characters is a valid user ID.""" -        # MTIz = base64(123) -        self.assertTrue(TokenRemover.is_valid_user_id('MTIz')) +    def test_is_valid_user_id_valid(self): +        """Should consider user IDs valid if they decode entirely to ASCII digits.""" +        ids = ( +            "NDcyMjY1OTQzMDYyNDEzMzMy", +            "NDc1MDczNjI5Mzk5NTQ3OTA0", +            "NDY3MjIzMjMwNjUwNzc3NjQx", +        ) + +        for user_id in ids: +            with self.subTest(user_id=user_id): +                result = TokenRemover.is_valid_user_id(user_id) +                self.assertTrue(result) -    def test_is_valid_user_id_is_false_for_alphabetic_content(self): -        """A string decoding to alphabetic characters is not a valid user ID.""" -        # YWJj = base64(abc) -        self.assertFalse(TokenRemover.is_valid_user_id('YWJj')) +    def test_is_valid_user_id_invalid(self): +        """Should consider non-digit and non-ASCII IDs invalid.""" +        ids = ( +            ("SGVsbG8gd29ybGQ", "non-digit ASCII"), +            ("0J_RgNC40LLQtdGCINC80LjRgA", "cyrillic text"), +            ("4pO14p6L4p6C4pG34p264pGl8J-EiOKSj-KCieKBsA", "Unicode digits"), +            ("4oaA4oaB4oWh4oWi4Lyz4Lyq4Lyr4LG9", "Unicode numerals"), +            ("8J2fjvCdn5nwnZ-k8J2fr_Cdn7rgravvvJngr6c", "Unicode decimals"), +            ("{hello}[world]&(bye!)", "ASCII invalid Base64"), +            ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), +        ) -    def test_is_valid_timestamp_is_true_for_valid_timestamps(self): -        """A string decoding to a valid timestamp should be recognized as such.""" -        self.assertTrue(TokenRemover.is_valid_timestamp('DN9r_A')) +        for user_id, msg in ids: +            with self.subTest(msg=msg): +                result = TokenRemover.is_valid_user_id(user_id) +                self.assertFalse(result) -    def test_is_valid_timestamp_is_false_for_invalid_values(self): -        """A string not decoding to a valid timestamp should not be recognized as such.""" -        # MTIz = base64(123) -        self.assertFalse(TokenRemover.is_valid_timestamp('MTIz')) +    def test_is_valid_timestamp_valid(self): +        """Should consider timestamps valid if they're greater than the Discord epoch.""" +        timestamps = ( +            "XsyRkw", +            "Xrim9Q", +            "XsyR-w", +            "XsySD_", +            "Dn9r_A", +        ) + +        for timestamp in timestamps: +            with self.subTest(timestamp=timestamp): +                result = TokenRemover.is_valid_timestamp(timestamp) +                self.assertTrue(result) + +    def test_is_valid_timestamp_invalid(self): +        """Should consider timestamps invalid if they're before Discord epoch or can't be parsed.""" +        timestamps = ( +            ("B4Yffw", "DISCORD_EPOCH - TOKEN_EPOCH - 1"), +            ("ew", "123"), +            ("AoIKgA", "42076800"), +            ("{hello}[world]&(bye!)", "ASCII invalid Base64"), +            ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), +        ) + +        for timestamp, msg in timestamps: +            with self.subTest(msg=msg): +                result = TokenRemover.is_valid_timestamp(timestamp) +                self.assertFalse(result)      def test_mod_log_property(self):          """The `mod_log` property should ask the bot to return the `ModLog` cog.""" @@ -58,74 +91,220 @@ class TokenRemoverTests(unittest.TestCase):          self.assertEqual(self.cog.mod_log, self.bot.get_cog.return_value)          self.bot.get_cog.assert_called_once_with('ModLog') -    def test_ignores_bot_messages(self): -        """When the message event handler is called with a bot message, nothing is done.""" -        self.msg.author.bot = True -        coroutine = self.cog.on_message(self.msg) -        self.assertIsNone(asyncio.run(coroutine)) - -    def test_ignores_messages_without_tokens(self): -        """Messages without anything looking like a token are ignored.""" -        for content in ('', 'lemon wins'): -            with self.subTest(content=content): -                self.msg.content = content -                coroutine = self.cog.on_message(self.msg) -                self.assertIsNone(asyncio.run(coroutine)) - -    def test_ignores_messages_with_invalid_tokens(self): -        """Messages with values that are invalid tokens are ignored.""" -        for content in ('foo.bar.baz', 'x.y.'): -            with self.subTest(content=content): -                self.msg.content = content -                coroutine = self.cog.on_message(self.msg) -                self.assertIsNone(asyncio.run(coroutine)) - -    def test_censors_valid_tokens(self): -        """Valid tokens are censored.""" -        cases = ( -            # (content, censored_token) -            ('MTIz.DN9R_A.xyz', 'MTIz.DN9R_A.xxx'), +    async def test_on_message_edit_uses_on_message(self): +        """The edit listener should delegate handling of the message to the normal listener.""" +        self.cog.on_message = mock.create_autospec(self.cog.on_message, spec_set=True) + +        await self.cog.on_message_edit(MockMessage(), self.msg) +        self.cog.on_message.assert_awaited_once_with(self.msg) + +    @autospec(TokenRemover, "find_token_in_message", "take_action") +    async def test_on_message_takes_action(self, find_token_in_message, take_action): +        """Should take action if a valid token is found when a message is sent.""" +        cog = TokenRemover(self.bot) +        found_token = "foobar" +        find_token_in_message.return_value = found_token + +        await cog.on_message(self.msg) + +        find_token_in_message.assert_called_once_with(self.msg) +        take_action.assert_awaited_once_with(cog, self.msg, found_token) + +    @autospec(TokenRemover, "find_token_in_message", "take_action") +    async def test_on_message_skips_missing_token(self, find_token_in_message, take_action): +        """Shouldn't take action if a valid token isn't found when a message is sent.""" +        cog = TokenRemover(self.bot) +        find_token_in_message.return_value = False + +        await cog.on_message(self.msg) + +        find_token_in_message.assert_called_once_with(self.msg) +        take_action.assert_not_awaited() + +    @autospec(TokenRemover, "find_token_in_message") +    async def test_on_message_ignores_dms_bots(self, find_token_in_message): +        """Shouldn't parse a message if it is a DM or authored by a bot.""" +        cog = TokenRemover(self.bot) +        dm_msg = MockMessage(guild=None) +        bot_msg = MockMessage(author=MagicMock(bot=True)) + +        for msg in (dm_msg, bot_msg): +            await cog.on_message(msg) +            find_token_in_message.assert_not_called() + +    @autospec("bot.cogs.token_remover", "TOKEN_RE") +    def test_find_token_no_matches(self, token_re): +        """None should be returned if the regex matches no tokens in a message.""" +        token_re.finditer.return_value = () + +        return_value = TokenRemover.find_token_in_message(self.msg) + +        self.assertIsNone(return_value) +        token_re.finditer.assert_called_once_with(self.msg.content) + +    @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") +    @autospec("bot.cogs.token_remover", "Token") +    @autospec("bot.cogs.token_remover", "TOKEN_RE") +    def test_find_token_valid_match(self, token_re, token_cls, is_valid_id, is_valid_timestamp): +        """The first match with a valid user ID and timestamp should be returned as a `Token`.""" +        matches = [ +            mock.create_autospec(Match, spec_set=True, instance=True), +            mock.create_autospec(Match, spec_set=True, instance=True), +        ] +        tokens = [ +            mock.create_autospec(Token, spec_set=True, instance=True), +            mock.create_autospec(Token, spec_set=True, instance=True), +        ] + +        token_re.finditer.return_value = matches +        token_cls.side_effect = tokens +        is_valid_id.side_effect = (False, True)  # The 1st match will be invalid, 2nd one valid. +        is_valid_timestamp.return_value = True + +        return_value = TokenRemover.find_token_in_message(self.msg) + +        self.assertEqual(tokens[1], return_value) +        token_re.finditer.assert_called_once_with(self.msg.content) + +    @autospec(TokenRemover, "is_valid_user_id", "is_valid_timestamp") +    @autospec("bot.cogs.token_remover", "Token") +    @autospec("bot.cogs.token_remover", "TOKEN_RE") +    def test_find_token_invalid_matches(self, token_re, token_cls, is_valid_id, is_valid_timestamp): +        """None should be returned if no matches have valid user IDs or timestamps.""" +        token_re.finditer.return_value = [mock.create_autospec(Match, spec_set=True, instance=True)] +        token_cls.return_value = mock.create_autospec(Token, spec_set=True, instance=True) +        is_valid_id.return_value = False +        is_valid_timestamp.return_value = False + +        return_value = TokenRemover.find_token_in_message(self.msg) + +        self.assertIsNone(return_value) +        token_re.finditer.assert_called_once_with(self.msg.content) + +    def test_regex_invalid_tokens(self): +        """Messages without anything looking like a token are not matched.""" +        tokens = ( +            "", +            "lemon wins", +            "..", +            "x.y", +            "x.y.", +            ".y.z", +            ".y.", +            "..z", +            "x..z", +            " . . ", +            "\n.\n.\n", +            "hellö.world.bye", +            "base64.nötbåse64.morebase64", +            "19jd3J.dfkm3d.€víł§tüff", +        ) + +        for token in tokens: +            with self.subTest(token=token): +                results = token_remover.TOKEN_RE.findall(token) +                self.assertEqual(len(results), 0) + +    def test_regex_valid_tokens(self): +        """Messages that look like tokens should be matched.""" +        # Don't worry, these tokens have been invalidated. +        tokens = ( +            "NDcyMjY1OTQzMDYy_DEzMz-y.XsyRkw.VXmErH7j511turNpfURmb0rVNm8", +            "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8", +            "NDc1MDczNjI5Mzk5NTQ3OTA0.XsyR-w.sJf6omBPORBPju3WJEIAcwW9Zds", +            "NDY3MjIzMjMwNjUwNzc3NjQx.XsySD_.s45jqDV_Iisn-symw0yDRrk_jf4", +        ) + +        for token in tokens: +            with self.subTest(token=token): +                results = token_remover.TOKEN_RE.fullmatch(token) +                self.assertIsNotNone(results, f"{token} was not matched by the regex") + +    def test_regex_matches_multiple_valid(self): +        """Should support multiple matches in the middle of a string.""" +        token_1 = "NDY3MjIzMjMwNjUwNzc3NjQx.XsyWGg.uFNEQPCc4ePwGh7egG8UicQssz8" +        token_2 = "NDcyMjY1OTQzMDYyNDEzMzMy.XsyWMw.l8XPnDqb0lp-EiQ2g_0xVFT1pyc" +        message = f"garbage {token_1} hello {token_2} world" + +        results = token_remover.TOKEN_RE.finditer(message) +        results = [match[0] for match in results] +        self.assertCountEqual((token_1, token_2), results) + +    @autospec("bot.cogs.token_remover", "LOG_MESSAGE") +    def test_format_log_message(self, log_message): +        """Should correctly format the log message with info from the message and token.""" +        token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") +        log_message.format.return_value = "Howdy" + +        return_value = TokenRemover.format_log_message(self.msg, token) + +        self.assertEqual(return_value, log_message.format.return_value) +        log_message.format.assert_called_once_with( +            author=self.msg.author, +            author_id=self.msg.author.id, +            channel=self.msg.channel.mention, +            user_id=token.user_id, +            timestamp=token.timestamp, +            hmac="x" * len(token.hmac), +        ) + +    @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) +    @autospec("bot.cogs.token_remover", "log") +    @autospec(TokenRemover, "format_log_message") +    async def test_take_action(self, format_log_message, logger, mod_log_property): +        """Should delete the message and send a mod log.""" +        cog = TokenRemover(self.bot) +        mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True) +        token = mock.create_autospec(Token, spec_set=True, instance=True) +        log_msg = "testing123" + +        mod_log_property.return_value = mod_log +        format_log_message.return_value = log_msg + +        await cog.take_action(self.msg, token) + +        self.msg.delete.assert_called_once_with() +        self.msg.channel.send.assert_called_once_with( +            token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) +        ) + +        format_log_message.assert_called_once_with(self.msg, token) +        logger.debug.assert_called_with(log_msg) +        self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens") + +        mod_log.ignore.assert_called_once_with(constants.Event.message_delete, self.msg.id) +        mod_log.send_log_message.assert_called_once_with( +            icon_url=constants.Icons.token_removed, +            colour=Colour(constants.Colours.soft_red), +            title="Token removed!", +            text=log_msg, +            thumbnail=self.msg.author.avatar_url_as.return_value, +            channel_id=constants.Channels.mod_alerts          ) -        for content, censored_token in cases: -            with self.subTest(content=content, censored_token=censored_token): -                self.msg.content = content -                coroutine = self.cog.on_message(self.msg) -                with self.assertLogs(logger='bot.cogs.token_remover', level=logging.DEBUG) as cm: -                    self.assertIsNone(asyncio.run(coroutine))  # no return value - -                [line] = cm.output -                log_message = ( -                    "Censored a seemingly valid token sent by " -                    "lemon (`42`) in #lemonade-stand, " -                    f"token was `{censored_token}`" -                ) -                self.assertIn(log_message, line) - -                self.msg.delete.assert_called_once_with() -                self.msg.channel.send.assert_called_once_with( -                    DELETION_MESSAGE_TEMPLATE.format(mention='@lemon') -                ) -                self.bot.get_cog.assert_called_with('ModLog') -                self.msg.author.avatar_url_as.assert_called_once_with(static_format='png') - -                mod_log = self.bot.get_cog.return_value -                mod_log.ignore.assert_called_once_with(Event.message_delete, self.msg.id) -                mod_log.send_log_message.assert_called_once_with( -                    icon_url=Icons.token_removed, -                    colour=Colour(Colours.soft_red), -                    title="Token removed!", -                    text=log_message, -                    thumbnail='picture-lemon.png', -                    channel_id=Channels.mod_alerts -                ) - - -class TokenRemoverSetupTests(unittest.TestCase): -    """Tests setup of the `TokenRemover` cog.""" - -    def test_setup(self): -        """Setup of the extension should call add_cog.""" +    @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) +    async def test_take_action_delete_failure(self, mod_log_property): +        """Shouldn't send any messages if the token message can't be deleted.""" +        cog = TokenRemover(self.bot) +        mod_log_property.return_value = mock.create_autospec(ModLog, spec_set=True, instance=True) +        self.msg.delete.side_effect = NotFound(MagicMock(), MagicMock()) + +        token = mock.create_autospec(Token, spec_set=True, instance=True) +        await cog.take_action(self.msg, token) + +        self.msg.delete.assert_called_once_with() +        self.msg.channel.send.assert_not_awaited() + + +class TokenRemoverExtensionTests(unittest.TestCase): +    """Tests for the token_remover extension.""" + +    @autospec("bot.cogs.token_remover", "TokenRemover") +    def test_extension_setup(self, cog): +        """The TokenRemover cog should be added."""          bot = MockBot() -        setup_cog(bot) +        token_remover.setup(bot) + +        cog.assert_called_once_with(bot)          bot.add_cog.assert_called_once() +        self.assertTrue(isinstance(bot.add_cog.call_args.args[0], TokenRemover)) diff --git a/tests/bot/test_pagination.py b/tests/bot/test_pagination.py index 0a734b505..ce880d457 100644 --- a/tests/bot/test_pagination.py +++ b/tests/bot/test_pagination.py @@ -8,17 +8,42 @@ class LinePaginatorTests(TestCase):      def setUp(self):          """Create a paginator for the test method.""" -        self.paginator = pagination.LinePaginator(prefix='', suffix='', max_size=30) - -    def test_add_line_raises_on_too_long_lines(self): -        """`add_line` should raise a `RuntimeError` for too long lines.""" -        message = f"Line exceeds maximum page size {self.paginator.max_size - 2}" -        with self.assertRaises(RuntimeError, msg=message): -            self.paginator.add_line('x' * self.paginator.max_size) +        self.paginator = pagination.LinePaginator(prefix='', suffix='', max_size=30, +                                                  scale_to_size=50)      def test_add_line_works_on_small_lines(self):          """`add_line` should allow small lines to be added."""          self.paginator.add_line('x' * (self.paginator.max_size - 3)) +        # Note that the page isn't added to _pages until it's full. +        self.assertEqual(len(self.paginator._pages), 0) + +    def test_add_line_works_on_long_lines(self): +        """After additional lines after `max_size` is exceeded should go on the next page.""" +        self.paginator.add_line('x' * self.paginator.max_size) +        self.assertEqual(len(self.paginator._pages), 0) + +        # Any additional lines should start a new page after `max_size` is exceeded. +        self.paginator.add_line('x') +        self.assertEqual(len(self.paginator._pages), 1) + +    def test_add_line_continuation(self): +        """When `scale_to_size` is exceeded, remaining words should be split onto the next page.""" +        self.paginator.add_line('zyz ' * (self.paginator.scale_to_size//4 + 1)) +        self.assertEqual(len(self.paginator._pages), 1) + +    def test_add_line_no_continuation(self): +        """If adding a new line to an existing page would exceed `max_size`, it should start a new +        page rather than using continuation. +        """ +        self.paginator.add_line('z' * (self.paginator.max_size - 3)) +        self.paginator.add_line('z') +        self.assertEqual(len(self.paginator._pages), 1) + +    def test_add_line_truncates_very_long_words(self): +        """`add_line` should truncate if a single long word exceeds `scale_to_size`.""" +        self.paginator.add_line('x' * (self.paginator.scale_to_size + 1)) +        # Note: item at index 1 is the truncated line, index 0 is prefix +        self.assertEqual(self.paginator._current_page[1], 'x' * self.paginator.scale_to_size)  class ImagePaginatorTests(TestCase): diff --git a/tests/bot/utils/test_messages.py b/tests/bot/utils/test_messages.py new file mode 100644 index 000000000..9c22c9751 --- /dev/null +++ b/tests/bot/utils/test_messages.py @@ -0,0 +1,27 @@ +import unittest + +from bot.utils import messages + + +class TestMessages(unittest.TestCase): +    """Tests for functions in the `bot.utils.messages` module.""" + +    def test_sub_clyde(self): +        """Uppercase E's and lowercase e's are substituted with their cyrillic counterparts.""" +        sub_e = "\u0435" +        sub_E = "\u0415"  # noqa: N806: Uppercase E in variable name + +        test_cases = ( +            (None, None), +            ("", ""), +            ("clyde", f"clyd{sub_e}"), +            ("CLYDE", f"CLYD{sub_E}"), +            ("cLyDe", f"cLyD{sub_e}"), +            ("BIGclyde", f"BIGclyd{sub_e}"), +            ("small clydeus the unholy", f"small clyd{sub_e}us the unholy"), +            ("BIGCLYDE, babyclyde", f"BIGCLYD{sub_E}, babyclyd{sub_e}"), +        ) + +        for username_in, username_out in test_cases: +            with self.subTest(input=username_in, expected_output=username_out): +                self.assertEqual(messages.sub_clyde(username_in), username_out) diff --git a/tests/bot/utils/test_redis_cache.py b/tests/bot/utils/test_redis_cache.py index 8c1a40640..a2f0fe55d 100644 --- a/tests/bot/utils/test_redis_cache.py +++ b/tests/bot/utils/test_redis_cache.py @@ -44,22 +44,14 @@ class RedisCacheTests(unittest.IsolatedAsyncioTestCase):          with self.assertRaises(RuntimeError):              await bad_cache.set("test", "me_up_deadman") -    def test_namespace_collision(self): -        """Test that we prevent colliding namespaces.""" -        bob_cache_1 = RedisCache() -        bob_cache_1._set_namespace("BobRoss") -        self.assertEqual(bob_cache_1._namespace, "BobRoss") - -        bob_cache_2 = RedisCache() -        bob_cache_2._set_namespace("BobRoss") -        self.assertEqual(bob_cache_2._namespace, "BobRoss_") -      async def test_set_get_item(self):          """Test that users can set and get items from the RedisDict."""          test_cases = (              ('favorite_fruit', 'melon'),              ('favorite_number', 86), -            ('favorite_fraction', 86.54) +            ('favorite_fraction', 86.54), +            ('favorite_boolean', False), +            ('other_boolean', True),          )          # Test that we can get and set different types. diff --git a/tests/helpers.py b/tests/helpers.py index faa839370..facc4e1af 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -5,7 +5,7 @@ import itertools  import logging  import unittest.mock  from asyncio import AbstractEventLoop -from typing import Iterable, Optional +from typing import Callable, Iterable, Optional  import discord  from aiohttp import ClientSession @@ -26,6 +26,24 @@ for logger in logging.Logger.manager.loggerDict.values():      logger.setLevel(logging.CRITICAL) +def autospec(target, *attributes: str, **kwargs) -> Callable: +    """Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True.""" +    # Caller's kwargs should take priority and overwrite the defaults. +    kwargs = {'spec_set': True, 'autospec': True, **kwargs} + +    # Import the target if it's a string. +    # This is to support both object and string targets like patch.multiple. +    if type(target) is str: +        target = unittest.mock._importer(target) + +    def decorator(func): +        for attribute in attributes: +            patcher = unittest.mock.patch.object(target, attribute, **kwargs) +            func = patcher(func) +        return func +    return decorator + +  class HashableMixin(discord.mixins.EqualityComparable):      """      Mixin that provides similar hashing and equality functionality as discord.py's `Hashable` mixin. | 
