diff options
| author | 2021-01-20 20:21:04 +0300 | |
|---|---|---|
| committer | 2021-01-20 20:21:04 +0300 | |
| commit | 41e861c52c5e8c3f5fb751345661d8f062976d87 (patch) | |
| tree | 3837c8a7bc377aff4b7b6973d810856e57fd414b | |
| parent | Cleans Up Tests (diff) | |
| parent | Replace in_whitelist check with commands.has_any_role check (diff) | |
Merge branch 'master' into fix-voiceban-member-bug
50 files changed, 2007 insertions, 1421 deletions
| diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 642676078..ad813d893 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,6 +1,3 @@ -# Request Dennis for any PR -* @Den4200 -  # Extensions  **/bot/exts/backend/sync/**             @MarkKoz  **/bot/exts/filters/*token_remover.py   @MarkKoz @@ -9,9 +6,11 @@ bot/exts/info/codeblock/**              @MarkKoz  bot/exts/utils/extensions.py            @MarkKoz  bot/exts/utils/snekbox.py               @MarkKoz @Akarys42  bot/exts/help_channels/**               @MarkKoz @Akarys42 -bot/exts/moderation/**                  @Akarys42 @mbaruh -bot/exts/info/**                        @Akarys42 @mbaruh +bot/exts/moderation/**                  @Akarys42 @mbaruh @Den4200 @ks129 +bot/exts/info/**                        @Akarys42 @mbaruh @Den4200  bot/exts/filters/**                     @mbaruh +bot/exts/fun/**                         @ks129 +bot/exts/utils/**                       @ks129  # Utils  bot/utils/extensions.py                 @MarkKoz @@ -26,9 +25,9 @@ tests/bot/exts/test_cogs.py             @MarkKoz  tests/**                                @Akarys42  # CI & Docker -.github/workflows/**                    @MarkKoz @Akarys42 @SebastiaanZ -Dockerfile                              @MarkKoz @Akarys42 -docker-compose.yml                      @MarkKoz @Akarys42 +.github/workflows/**                    @MarkKoz @Akarys42 @SebastiaanZ @Den4200 +Dockerfile                              @MarkKoz @Akarys42 @Den4200 +docker-compose.yml                      @MarkKoz @Akarys42 @Den4200  # Tools  Pipfile*                                @Akarys42 diff --git a/.github/review-policy.yml b/.github/review-policy.yml new file mode 100644 index 000000000..421b30f8a --- /dev/null +++ b/.github/review-policy.yml @@ -0,0 +1,3 @@ +remote: python-discord/.github +path: review-policies/core-developers.yml +ref: main diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6152f1543..6c97e8784 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -55,3 +55,5 @@ jobs:            tags: |              ghcr.io/python-discord/bot:latest              ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }} +          build-args: | +            git_sha=${{ github.sha }} diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index 5444fc3de..6fa8e8333 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -113,3 +113,25 @@ jobs:          env:              GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}          run: coveralls + +      # Prepare the Pull Request Payload artifact. If this fails, we +      # we fail silently using the `continue-on-error` option. It's +      # nice if this succeeds, but if it fails for any reason, it +      # does not mean that our lint-test checks failed. +      - name: Prepare Pull Request Payload artifact +        id: prepare-artifact +        if: always() && github.event_name == 'pull_request' +        continue-on-error: true +        run: cat $GITHUB_EVENT_PATH | jq '.pull_request' > pull_request_payload.json + +      # This only makes sense if the previous step succeeded. To +      # get the original outcome of the previous step before the +      # `continue-on-error` conclusion is applied, we use the +      # `.outcome` value. This step also fails silently. +      - name: Upload a Build Artifact +        if: always() && steps.prepare-artifact.outcome == 'success' +        continue-on-error: true +        uses: actions/upload-artifact@v2 +        with: +          name: pull-request-payload +          path: pull_request_payload.json diff --git a/.github/workflows/sentry_release.yml b/.github/workflows/sentry_release.yml new file mode 100644 index 000000000..b8d92e90a --- /dev/null +++ b/.github/workflows/sentry_release.yml @@ -0,0 +1,24 @@ +name: Create Sentry release + +on: +  push: +    branches: +      - master + +jobs: +  create_sentry_release: +    runs-on: ubuntu-latest +    steps: +      - name: Checkout code +        uses: actions/checkout@master + +      - name: Create a Sentry.io release +        uses: tclindner/[email protected] +        env: +          SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} +          SENTRY_ORG: python-discord +          SENTRY_PROJECT: bot +        with: +          tagName: ${{ github.sha }} +          environment: production +          releaseNamePrefix: bot@ diff --git a/.github/workflows/status_embed.yaml b/.github/workflows/status_embed.yaml new file mode 100644 index 000000000..b6a71b887 --- /dev/null +++ b/.github/workflows/status_embed.yaml @@ -0,0 +1,78 @@ +name: Status Embed + +on: +  workflow_run: +    workflows: +      - Lint & Test +      - Build +      - Deploy +    types: +      - completed + +jobs: +  status_embed: +    # We need to send a status embed whenever the workflow +    # sequence we're running terminates. There are a number +    # of situations in which that happens: +    # +    # 1. We reach the end of the Deploy workflow, without +    #    it being skipped. +    # +    # 2. A `pull_request` triggered a Lint & Test workflow, +    #    as the sequence always terminates with one run. +    # +    # 3. If any workflow ends in failure or was cancelled. +    if: >- +      (github.event.workflow_run.name == 'Deploy' && github.event.workflow_run.conclusion != 'skipped') || +      github.event.workflow_run.event == 'pull_request' || +      github.event.workflow_run.conclusion == 'failure' || +      github.event.workflow_run.conclusion == 'cancelled' +    name:  Send Status Embed to Discord +    runs-on: ubuntu-latest + +    steps: +      # A workflow_run event does not contain all the information +      # we need for a PR embed. That's why we upload an artifact +      # with that information in the Lint workflow. +      - name: Get Pull Request Information +        id: pr_info +        if: github.event.workflow_run.event == 'pull_request' +        run: | +          curl -s -H "Authorization: token $GITHUB_TOKEN" ${{ github.event.workflow_run.artifacts_url }} > artifacts.json +          DOWNLOAD_URL=$(cat artifacts.json | jq -r '.artifacts[] | select(.name == "pull-request-payload") | .archive_download_url') +          [ -z "$DOWNLOAD_URL" ] && exit 1 +          wget --quiet --header="Authorization: token $GITHUB_TOKEN" -O pull_request_payload.zip $DOWNLOAD_URL || exit 2 +          unzip -p pull_request_payload.zip > pull_request_payload.json +          [ -s pull_request_payload.json ] || exit 3 +          echo "::set-output name=pr_author_login::$(jq -r '.user.login // empty' pull_request_payload.json)" +          echo "::set-output name=pr_number::$(jq -r '.number // empty' pull_request_payload.json)" +          echo "::set-output name=pr_title::$(jq -r '.title // empty' pull_request_payload.json)" +          echo "::set-output name=pr_source::$(jq -r '.head.label // empty' pull_request_payload.json)" +        env: +          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +      # Send an informational status embed to Discord instead of the +      # standard embeds that Discord sends. This embed will contain +      # more information and we can fine tune when we actually want +      # to send an embed. +      - name: GitHub Actions Status Embed for Discord +        uses: SebastiaanZ/[email protected] +        with: +          # Our GitHub Actions webhook +          webhook_id: '784184528997842985' +          webhook_token: ${{ secrets.GHA_WEBHOOK_TOKEN }} + +          # Workflow information +          workflow_name: ${{ github.event.workflow_run.name }} +          run_id: ${{ github.event.workflow_run.id }} +          run_number: ${{ github.event.workflow_run.run_number }} +          status: ${{ github.event.workflow_run.conclusion }} +          actor: ${{ github.actor }} +          repository:  ${{ github.repository }} +          ref: ${{ github.ref }} +          sha: ${{ github.event.workflow_run.head_sha }} + +          pr_author_login: ${{ steps.pr_info.outputs.pr_author_login }} +          pr_number: ${{ steps.pr_info.outputs.pr_number }} +          pr_title: ${{ steps.pr_info.outputs.pr_title }} +          pr_source: ${{ steps.pr_info.outputs.pr_source }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 876d32b15..1597592ca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,6 +21,6 @@ repos:          name: Flake8          description: This hook runs flake8 within our project's pipenv environment.          entry: pipenv run flake8 -        language: python +        language: system          types: [python]          require_serial: true diff --git a/Dockerfile b/Dockerfile index 06a538b2a..5d0380b44 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,19 @@  FROM python:3.8-slim +# Define Git SHA build argument +ARG git_sha="development" +  # Set pip to have cleaner logs and no saved cache  ENV PIP_NO_CACHE_DIR=false \      PIPENV_HIDE_EMOJIS=1 \      PIPENV_IGNORE_VIRTUALENVS=1 \ -    PIPENV_NOSPIN=1 +    PIPENV_NOSPIN=1 \ +    GIT_SHA=$git_sha + +RUN apt-get -y update \ +    && apt-get install -y \ +        git \ +    && rm -rf /var/lib/apt/lists/*  # Install pipenv  RUN pip install -U pipenv @@ -14,18 +14,19 @@ beautifulsoup4 = "~=4.9"  colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}  coloredlogs = "~=14.0"  deepdiff = "~=4.0" -"discord.py" = "~=1.5.0" +"discord.py" = "~=1.6.0"  feedparser = "~=5.2"  fuzzywuzzy = "~=0.17"  lxml = "~=4.4" -markdownify = "~=0.4" +markdownify = "==0.5.3"  more_itertools = "~=8.2"  python-dateutil = "~=2.8"  pyyaml = "~=5.1"  requests = "~=2.22" -sentry-sdk = "~=0.14" +sentry-sdk = "~=0.19"  sphinx = "~=2.2"  statsd = "~=3.3" +arrow = "~=0.17"  emoji = "~=0.6"  [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index 541db1627..636d07b1a 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@  {      "_meta": {          "hash": { -            "sha256": "3ccb368599709d2970f839fc3721cfeebcd5a2700fed7231b2ce38a080828325" +            "sha256": "26c8089f17d6d6bac11dbed366b1b46818b4546f243af756a106a32af5d9d8f6"          },          "pipfile-spec": 6,          "requires": { @@ -34,22 +34,46 @@          },          "aiohttp": {              "hashes": [ -                "sha256:1a4160579ffbc1b69e88cb6ca8bb0fbd4947dfcbf9fb1e2a4fc4c7a4a986c1fe", -                "sha256:206c0ccfcea46e1bddc91162449c20c72f308aebdcef4977420ef329c8fcc599", -                "sha256:2ad493de47a8f926386fa6d256832de3095ba285f325db917c7deae0b54a9fc8", -                "sha256:319b490a5e2beaf06891f6711856ea10591cfe84fe9f3e71a721aa8f20a0872a", -                "sha256:470e4c90da36b601676fe50c49a60d34eb8c6593780930b1aa4eea6f508dfa37", -                "sha256:60f4caa3b7f7a477f66ccdd158e06901e1d235d572283906276e3803f6b098f5", -                "sha256:66d64486172b032db19ea8522328b19cfb78a3e1e5b62ab6a0567f93f073dea0", -                "sha256:687461cd974722110d1763b45c5db4d2cdee8d50f57b00c43c7590d1dd77fc5c", -                "sha256:698cd7bc3c7d1b82bb728bae835724a486a8c376647aec336aa21a60113c3645", -                "sha256:797456399ffeef73172945708810f3277f794965eb6ec9bd3a0c007c0476be98", -                "sha256:a885432d3cabc1287bcf88ea94e1826d3aec57fd5da4a586afae4591b061d40d", -                "sha256:c506853ba52e516b264b106321c424d03f3ddef2813246432fa9d1cefd361c81", -                "sha256:fb83326d8295e8840e4ba774edf346e87eca78ba8a89c55d2690352842c15ba5" +                "sha256:0b795072bb1bf87b8620120a6373a3c61bfcb8da7e5c2377f4bb23ff4f0b62c9", +                "sha256:0d438c8ca703b1b714e82ed5b7a4412c82577040dadff479c08405e2a715564f", +                "sha256:16a3cb5df5c56f696234ea9e65e227d1ebe9c18aa774d36ff42f532139066a5f", +                "sha256:1edfd82a98c5161497bbb111b2b70c0813102ad7e0aa81cbeb34e64c93863005", +                "sha256:2406dc1dda01c7f6060ab586e4601f18affb7a6b965c50a8c90ff07569cf782a", +                "sha256:2858b2504c8697beb9357be01dc47ef86438cc1cb36ecb6991796d19475faa3e", +                "sha256:2a7b7640167ab536c3cb90cfc3977c7094f1c5890d7eeede8b273c175c3910fd", +                "sha256:3228b7a51e3ed533f5472f54f70fd0b0a64c48dc1649a0f0e809bec312934d7a", +                "sha256:328b552513d4f95b0a2eea4c8573e112866107227661834652a8984766aa7656", +                "sha256:39f4b0a6ae22a1c567cb0630c30dd082481f95c13ca528dc501a7766b9c718c0", +                "sha256:3b0036c978cbcc4a4512278e98e3e6d9e6b834dc973206162eddf98b586ef1c6", +                "sha256:3ea8c252d8df5e9166bcf3d9edced2af132f4ead8ac422eac723c5781063709a", +                "sha256:41608c0acbe0899c852281978492f9ce2c6fbfaf60aff0cefc54a7c4516b822c", +                "sha256:59d11674964b74a81b149d4ceaff2b674b3b0e4d0f10f0be1533e49c4a28408b", +                "sha256:5e479df4b2d0f8f02133b7e4430098699450e1b2a826438af6bec9a400530957", +                "sha256:684850fb1e3e55c9220aad007f8386d8e3e477c4ec9211ae54d968ecdca8c6f9", +                "sha256:6ccc43d68b81c424e46192a778f97da94ee0630337c9bbe5b2ecc9b0c1c59001", +                "sha256:6d42debaf55450643146fabe4b6817bb2a55b23698b0434107e892a43117285e", +                "sha256:710376bf67d8ff4500a31d0c207b8941ff4fba5de6890a701d71680474fe2a60", +                "sha256:756ae7efddd68d4ea7d89c636b703e14a0c686688d42f588b90778a3c2fc0564", +                "sha256:77149002d9386fae303a4a162e6bce75cc2161347ad2ba06c2f0182561875d45", +                "sha256:78e2f18a82b88cbc37d22365cf8d2b879a492faedb3f2975adb4ed8dfe994d3a", +                "sha256:7d9b42127a6c0bdcc25c3dcf252bb3ddc70454fac593b1b6933ae091396deb13", +                "sha256:8389d6044ee4e2037dca83e3f6994738550f6ee8cfb746762283fad9b932868f", +                "sha256:9c1a81af067e72261c9cbe33ea792893e83bc6aa987bfbd6fdc1e5e7b22777c4", +                "sha256:c1e0920909d916d3375c7a1fdb0b1c78e46170e8bb42792312b6eb6676b2f87f", +                "sha256:c68fdf21c6f3573ae19c7ee65f9ff185649a060c9a06535e9c3a0ee0bbac9235", +                "sha256:c733ef3bdcfe52a1a75564389bad4064352274036e7e234730526d155f04d914", +                "sha256:c9c58b0b84055d8bc27b7df5a9d141df4ee6ff59821f922dd73155861282f6a3", +                "sha256:d03abec50df423b026a5aa09656bd9d37f1e6a49271f123f31f9b8aed5dc3ea3", +                "sha256:d2cfac21e31e841d60dc28c0ec7d4ec47a35c608cb8906435d47ef83ffb22150", +                "sha256:dcc119db14757b0c7bce64042158307b9b1c76471e655751a61b57f5a0e4d78e", +                "sha256:df3a7b258cc230a65245167a202dd07320a5af05f3d41da1488ba0fa05bc9347", +                "sha256:df48a623c58180874d7407b4d9ec06a19b84ed47f60a3884345b1a5099c1818b", +                "sha256:e1b95972a0ae3f248a899cdbac92ba2e01d731225f566569311043ce2226f5e7", +                "sha256:f326b3c1bbfda5b9308252ee0dcb30b612ee92b0e105d4abec70335fab5b1245", +                "sha256:f411cb22115cb15452d099fec0ee636b06cf81bfb40ed9c02d30c8dc2bc2e3d1"              ],              "index": "pypi", -            "version": "==3.6.3" +            "version": "==3.7.3"          },          "aioping": {              "hashes": [ @@ -82,6 +106,14 @@              ],              "version": "==0.7.12"          }, +        "arrow": { +            "hashes": [ +                "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5", +                "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4" +            ], +            "index": "pypi", +            "version": "==0.17.0" +        },          "async-rediscache": {              "extras": [                  "fakeredis" @@ -129,51 +161,51 @@          },          "certifi": {              "hashes": [ -                "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd", -                "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4" +                "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", +                "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"              ], -            "version": "==2020.11.8" +            "version": "==2020.12.5"          },          "cffi": {              "hashes": [ -                "sha256:005f2bfe11b6745d726dbb07ace4d53f057de66e336ff92d61b8c7e9c8f4777d", -                "sha256:09e96138280241bd355cd585148dec04dbbedb4f46128f340d696eaafc82dd7b", -                "sha256:0b1ad452cc824665ddc682400b62c9e4f5b64736a2ba99110712fdee5f2505c4", -                "sha256:0ef488305fdce2580c8b2708f22d7785ae222d9825d3094ab073e22e93dfe51f", -                "sha256:15f351bed09897fbda218e4db5a3d5c06328862f6198d4fb385f3e14e19decb3", -                "sha256:22399ff4870fb4c7ef19fff6eeb20a8bbf15571913c181c78cb361024d574579", -                "sha256:23e5d2040367322824605bc29ae8ee9175200b92cb5483ac7d466927a9b3d537", -                "sha256:2791f68edc5749024b4722500e86303a10d342527e1e3bcac47f35fbd25b764e", -                "sha256:2f9674623ca39c9ebe38afa3da402e9326c245f0f5ceff0623dccdac15023e05", -                "sha256:3363e77a6176afb8823b6e06db78c46dbc4c7813b00a41300a4873b6ba63b171", -                "sha256:33c6cdc071ba5cd6d96769c8969a0531be2d08c2628a0143a10a7dcffa9719ca", -                "sha256:3b8eaf915ddc0709779889c472e553f0d3e8b7bdf62dab764c8921b09bf94522", -                "sha256:3cb3e1b9ec43256c4e0f8d2837267a70b0e1ca8c4f456685508ae6106b1f504c", -                "sha256:3eeeb0405fd145e714f7633a5173318bd88d8bbfc3dd0a5751f8c4f70ae629bc", -                "sha256:44f60519595eaca110f248e5017363d751b12782a6f2bd6a7041cba275215f5d", -                "sha256:4d7c26bfc1ea9f92084a1d75e11999e97b62d63128bcc90c3624d07813c52808", -                "sha256:529c4ed2e10437c205f38f3691a68be66c39197d01062618c55f74294a4a4828", -                "sha256:6642f15ad963b5092d65aed022d033c77763515fdc07095208f15d3563003869", -                "sha256:85ba797e1de5b48aa5a8427b6ba62cf69607c18c5d4eb747604b7302f1ec382d", -                "sha256:8f0f1e499e4000c4c347a124fa6a27d37608ced4fe9f7d45070563b7c4c370c9", -                "sha256:a624fae282e81ad2e4871bdb767e2c914d0539708c0f078b5b355258293c98b0", -                "sha256:b0358e6fefc74a16f745afa366acc89f979040e0cbc4eec55ab26ad1f6a9bfbc", -                "sha256:bbd2f4dfee1079f76943767fce837ade3087b578aeb9f69aec7857d5bf25db15", -                "sha256:bf39a9e19ce7298f1bd6a9758fa99707e9e5b1ebe5e90f2c3913a47bc548747c", -                "sha256:c11579638288e53fc94ad60022ff1b67865363e730ee41ad5e6f0a17188b327a", -                "sha256:c150eaa3dadbb2b5339675b88d4573c1be3cb6f2c33a6c83387e10cc0bf05bd3", -                "sha256:c53af463f4a40de78c58b8b2710ade243c81cbca641e34debf3396a9640d6ec1", -                "sha256:cb763ceceae04803adcc4e2d80d611ef201c73da32d8f2722e9d0ab0c7f10768", -                "sha256:cc75f58cdaf043fe6a7a6c04b3b5a0e694c6a9e24050967747251fb80d7bce0d", -                "sha256:d80998ed59176e8cba74028762fbd9b9153b9afc71ea118e63bbf5d4d0f9552b", -                "sha256:de31b5164d44ef4943db155b3e8e17929707cac1e5bd2f363e67a56e3af4af6e", -                "sha256:e66399cf0fc07de4dce4f588fc25bfe84a6d1285cc544e67987d22663393926d", -                "sha256:f0620511387790860b249b9241c2f13c3a80e21a73e0b861a2df24e9d6f56730", -                "sha256:f4eae045e6ab2bb54ca279733fe4eb85f1effda392666308250714e01907f394", -                "sha256:f92cdecb618e5fa4658aeb97d5eb3d2f47aa94ac6477c6daf0f306c5a3b9e6b1", -                "sha256:f92f789e4f9241cd262ad7a555ca2c648a98178a953af117ef7fad46aa1d5591" -            ], -            "version": "==1.14.3" +                "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e", +                "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d", +                "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a", +                "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec", +                "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362", +                "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668", +                "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c", +                "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b", +                "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06", +                "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698", +                "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2", +                "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c", +                "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7", +                "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009", +                "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03", +                "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b", +                "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909", +                "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53", +                "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35", +                "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26", +                "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b", +                "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01", +                "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb", +                "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293", +                "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd", +                "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d", +                "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3", +                "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d", +                "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e", +                "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca", +                "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d", +                "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775", +                "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375", +                "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b", +                "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b", +                "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f" +            ], +            "version": "==1.14.4"          },          "chardet": {              "hashes": [ @@ -192,11 +224,11 @@          },          "coloredlogs": {              "hashes": [ -                "sha256:346f58aad6afd48444c2468618623638dadab76e4e70d5e10822676f2d32226a", -                "sha256:a1fab193d2053aa6c0a97608c4342d031f1f93a3d1218432c59322441d31a505" +                "sha256:7ef1a7219870c7f02c218a2f2877ce68f2f8e087bb3a55bd6fbaa2a4362b4d52", +                "sha256:e244a892f9d97ffd2c60f15bf1d2582ef7f9ac0f848d132249004184785702b3"              ],              "index": "pypi", -            "version": "==14.0" +            "version": "==14.3"          },          "deepdiff": {              "hashes": [ @@ -208,11 +240,11 @@          },          "discord.py": {              "hashes": [ -                "sha256:2367359e31f6527f8a936751fc20b09d7495dd6a76b28c8fb13d4ca6c55b7563", -                "sha256:def00dc50cf36d21346d71bc89f0cad8f18f9a3522978dc18c7796287d47de8b" +                "sha256:3df148daf6fbcc7ab5b11042368a3cd5f7b730b62f09fb5d3cbceff59bcfbb12", +                "sha256:ba8be99ff1b8c616f7b6dcb700460d0222b29d4c11048e74366954c465fdd05f"              ],              "index": "pypi", -            "version": "==1.5.1" +            "version": "==1.6.0"          },          "docutils": {              "hashes": [ @@ -231,10 +263,10 @@          },          "fakeredis": {              "hashes": [ -                "sha256:8070b7fce16f828beaef2c757a4354af91698685d5232404f1aeeb233529c7a5", -                "sha256:f8c8ea764d7b6fd801e7f5486e3edd32ca991d506186f1923a01fc072e33c271" +                "sha256:01cb47d2286825a171fb49c0e445b1fa9307087e07cbb3d027ea10dbff108b6a", +                "sha256:2c6041cf0225889bc403f3949838b2c53470a95a9e2d4272422937786f5f8f73"              ], -            "version": "==1.4.4" +            "version": "==1.4.5"          },          "feedparser": {              "hashes": [ @@ -307,11 +339,11 @@          },          "humanfriendly": {              "hashes": [ -                "sha256:bf52ec91244819c780341a3438d5d7b09f431d3f113a475147ac9b7b167a3d12", -                "sha256:e78960b31198511f45fd455534ae7645a6207d33e512d2e842c766d15d9c8080" +                "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d", +                "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72"              ],              "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", -            "version": "==8.2" +            "version": "==9.1"          },          "idna": {              "hashes": [ @@ -339,46 +371,46 @@          },          "lxml": {              "hashes": [ -                "sha256:098fb713b31050463751dcc694878e1d39f316b86366fb9fe3fbbe5396ac9fab", -                "sha256:0e89f5d422988c65e6936e4ec0fe54d6f73f3128c80eb7ecc3b87f595523607b", -                "sha256:189ad47203e846a7a4951c17694d845b6ade7917c47c64b29b86526eefc3adf5", -                "sha256:1d87936cb5801c557f3e981c9c193861264c01209cb3ad0964a16310ca1b3301", -                "sha256:211b3bcf5da70c2d4b84d09232534ad1d78320762e2c59dedc73bf01cb1fc45b", -                "sha256:2358809cc64394617f2719147a58ae26dac9e21bae772b45cfb80baa26bfca5d", -                "sha256:23c83112b4dada0b75789d73f949dbb4e8f29a0a3511647024a398ebd023347b", -                "sha256:24e811118aab6abe3ce23ff0d7d38932329c513f9cef849d3ee88b0f848f2aa9", -                "sha256:2d5896ddf5389560257bbe89317ca7bcb4e54a02b53a3e572e1ce4226512b51b", -                "sha256:2d6571c48328be4304aee031d2d5046cbc8aed5740c654575613c5a4f5a11311", -                "sha256:2e311a10f3e85250910a615fe194839a04a0f6bc4e8e5bb5cac221344e3a7891", -                "sha256:302160eb6e9764168e01d8c9ec6becddeb87776e81d3fcb0d97954dd51d48e0a", -                "sha256:3a7a380bfecc551cfd67d6e8ad9faa91289173bdf12e9cfafbd2bdec0d7b1ec1", -                "sha256:3d9b2b72eb0dbbdb0e276403873ecfae870599c83ba22cadff2db58541e72856", -                "sha256:475325e037fdf068e0c2140b818518cf6bc4aa72435c407a798b2db9f8e90810", -                "sha256:4b7572145054330c8e324a72d808c8c8fbe12be33368db28c39a255ad5f7fb51", -                "sha256:4fff34721b628cce9eb4538cf9a73d02e0f3da4f35a515773cce6f5fe413b360", -                "sha256:56eff8c6fb7bc4bcca395fdff494c52712b7a57486e4fbde34c31bb9da4c6cc4", -                "sha256:573b2f5496c7e9f4985de70b9bbb4719ffd293d5565513e04ac20e42e6e5583f", -                "sha256:7ecaef52fd9b9535ae5f01a1dd2651f6608e4ec9dc136fc4dfe7ebe3c3ddb230", -                "sha256:803a80d72d1f693aa448566be46ffd70882d1ad8fc689a2e22afe63035eb998a", -                "sha256:8862d1c2c020cb7a03b421a9a7b4fe046a208db30994fc8ff68c627a7915987f", -                "sha256:9b06690224258db5cd39a84e993882a6874676f5de582da57f3df3a82ead9174", -                "sha256:a71400b90b3599eb7bf241f947932e18a066907bf84617d80817998cee81e4bf", -                "sha256:bb252f802f91f59767dcc559744e91efa9df532240a502befd874b54571417bd", -                "sha256:be1ebf9cc25ab5399501c9046a7dcdaa9e911802ed0e12b7d620cd4bbf0518b3", -                "sha256:be7c65e34d1b50ab7093b90427cbc488260e4b3a38ef2435d65b62e9fa3d798a", -                "sha256:c0dac835c1a22621ffa5e5f999d57359c790c52bbd1c687fe514ae6924f65ef5", -                "sha256:c152b2e93b639d1f36ec5a8ca24cde4a8eefb2b6b83668fcd8e83a67badcb367", -                "sha256:d182eada8ea0de61a45a526aa0ae4bcd222f9673424e65315c35820291ff299c", -                "sha256:d18331ea905a41ae71596502bd4c9a2998902328bbabd29e3d0f5f8569fabad1", -                "sha256:d20d32cbb31d731def4b1502294ca2ee99f9249b63bc80e03e67e8f8e126dea8", -                "sha256:d4ad7fd3269281cb471ad6c7bafca372e69789540d16e3755dd717e9e5c9d82f", -                "sha256:d6f8c23f65a4bfe4300b85f1f40f6c32569822d08901db3b6454ab785d9117cc", -                "sha256:d84d741c6e35c9f3e7406cb7c4c2e08474c2a6441d59322a00dcae65aac6315d", -                "sha256:e65c221b2115a91035b55a593b6eb94aa1206fa3ab374f47c6dc10d364583ff9", -                "sha256:f98b6f256be6cec8dd308a8563976ddaff0bdc18b730720f6f4bee927ffe926f" +                "sha256:0448576c148c129594d890265b1a83b9cd76fd1f0a6a04620753d9a6bcfd0a4d", +                "sha256:127f76864468d6630e1b453d3ffbbd04b024c674f55cf0a30dc2595137892d37", +                "sha256:1471cee35eba321827d7d53d104e7b8c593ea3ad376aa2df89533ce8e1b24a01", +                "sha256:2363c35637d2d9d6f26f60a208819e7eafc4305ce39dc1d5005eccc4593331c2", +                "sha256:2e5cc908fe43fe1aa299e58046ad66981131a66aea3129aac7770c37f590a644", +                "sha256:2e6fd1b8acd005bd71e6c94f30c055594bbd0aa02ef51a22bbfa961ab63b2d75", +                "sha256:366cb750140f221523fa062d641393092813b81e15d0e25d9f7c6025f910ee80", +                "sha256:42ebca24ba2a21065fb546f3e6bd0c58c3fe9ac298f3a320147029a4850f51a2", +                "sha256:4e751e77006da34643ab782e4a5cc21ea7b755551db202bc4d3a423b307db780", +                "sha256:4fb85c447e288df535b17ebdebf0ec1cf3a3f1a8eba7e79169f4f37af43c6b98", +                "sha256:50c348995b47b5a4e330362cf39fc503b4a43b14a91c34c83b955e1805c8e308", +                "sha256:535332fe9d00c3cd455bd3dd7d4bacab86e2d564bdf7606079160fa6251caacf", +                "sha256:535f067002b0fd1a4e5296a8f1bf88193080ff992a195e66964ef2a6cfec5388", +                "sha256:5be4a2e212bb6aa045e37f7d48e3e1e4b6fd259882ed5a00786f82e8c37ce77d", +                "sha256:60a20bfc3bd234d54d49c388950195d23a5583d4108e1a1d47c9eef8d8c042b3", +                "sha256:648914abafe67f11be7d93c1a546068f8eff3c5fa938e1f94509e4a5d682b2d8", +                "sha256:681d75e1a38a69f1e64ab82fe4b1ed3fd758717bed735fb9aeaa124143f051af", +                "sha256:68a5d77e440df94011214b7db907ec8f19e439507a70c958f750c18d88f995d2", +                "sha256:69a63f83e88138ab7642d8f61418cf3180a4d8cd13995df87725cb8b893e950e", +                "sha256:6e4183800f16f3679076dfa8abf2db3083919d7e30764a069fb66b2b9eff9939", +                "sha256:6fd8d5903c2e53f49e99359b063df27fdf7acb89a52b6a12494208bf61345a03", +                "sha256:791394449e98243839fa822a637177dd42a95f4883ad3dec2a0ce6ac99fb0a9d", +                "sha256:7a7669ff50f41225ca5d6ee0a1ec8413f3a0d8aa2b109f86d540887b7ec0d72a", +                "sha256:7e9eac1e526386df7c70ef253b792a0a12dd86d833b1d329e038c7a235dfceb5", +                "sha256:7ee8af0b9f7de635c61cdd5b8534b76c52cd03536f29f51151b377f76e214a1a", +                "sha256:8246f30ca34dc712ab07e51dc34fea883c00b7ccb0e614651e49da2c49a30711", +                "sha256:8c88b599e226994ad4db29d93bc149aa1aff3dc3a4355dd5757569ba78632bdf", +                "sha256:923963e989ffbceaa210ac37afc9b906acebe945d2723e9679b643513837b089", +                "sha256:94d55bd03d8671686e3f012577d9caa5421a07286dd351dfef64791cf7c6c505", +                "sha256:97db258793d193c7b62d4e2586c6ed98d51086e93f9a3af2b2034af01450a74b", +                "sha256:a9d6bc8642e2c67db33f1247a77c53476f3a166e09067c0474facb045756087f", +                "sha256:cd11c7e8d21af997ee8079037fff88f16fda188a9776eb4b81c7e4c9c0a7d7fc", +                "sha256:d8d3d4713f0c28bdc6c806a278d998546e8efc3498949e3ace6e117462ac0a5e", +                "sha256:e0bfe9bb028974a481410432dbe1b182e8191d5d40382e5b8ff39cdd2e5c5931", +                "sha256:f4822c0660c3754f1a41a655e37cb4dbbc9be3d35b125a37fab6f82d47674ebc", +                "sha256:f83d281bb2a6217cd806f4cf0ddded436790e66f393e124dfe9731f6b3fb9afe", +                "sha256:fc37870d6716b137e80d19241d0e2cff7a7643b925dfa49b4c8ebd1295eb506e"              ],              "index": "pypi", -            "version": "==4.6.1" +            "version": "==4.6.2"          },          "markdownify": {              "hashes": [ @@ -437,26 +469,46 @@          },          "multidict": {              "hashes": [ -                "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a", -                "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000", -                "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2", -                "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507", -                "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5", -                "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7", -                "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d", -                "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463", -                "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19", -                "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3", -                "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b", -                "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c", -                "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87", -                "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7", -                "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430", -                "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255", -                "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d" +                "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a", +                "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93", +                "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632", +                "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656", +                "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79", +                "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7", +                "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d", +                "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5", +                "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224", +                "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26", +                "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea", +                "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348", +                "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6", +                "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76", +                "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1", +                "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f", +                "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952", +                "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a", +                "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37", +                "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9", +                "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359", +                "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8", +                "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da", +                "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3", +                "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d", +                "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf", +                "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841", +                "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d", +                "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93", +                "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f", +                "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647", +                "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635", +                "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456", +                "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda", +                "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5", +                "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", +                "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"              ], -            "markers": "python_version >= '3.5'", -            "version": "==4.7.6" +            "markers": "python_version >= '3.6'", +            "version": "==5.1.0"          },          "ordered-set": {              "hashes": [ @@ -467,11 +519,11 @@          },          "packaging": {              "hashes": [ -                "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", -                "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" +                "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", +                "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"              ],              "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", -            "version": "==20.4" +            "version": "==20.8"          },          "pamqp": {              "hashes": [ @@ -524,18 +576,18 @@          },          "pygments": {              "hashes": [ -                "sha256:381985fcc551eb9d37c52088a32914e00517e57f4a21609f48141ba08e193fa0", -                "sha256:88a0bbcd659fcb9573703957c6b9cff9fab7295e6e76db54c9d00ae42df32773" +                "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435", +                "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"              ],              "markers": "python_version >= '3.5'", -            "version": "==2.7.2" +            "version": "==2.7.4"          },          "pyparsing": {              "hashes": [                  "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",                  "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"              ], -            "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", +            "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",              "version": "==2.4.7"          },          "python-dateutil": { @@ -548,25 +600,25 @@          },          "pytz": {              "hashes": [ -                "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268", -                "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd" +                "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", +                "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"              ], -            "version": "==2020.4" +            "version": "==2020.5"          },          "pyyaml": {              "hashes": [ -                "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", -                "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", -                "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", +                "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",                  "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", +                "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",                  "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",                  "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", -                "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", -                "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", +                "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",                  "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", -                "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", -                "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",                  "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", +                "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", +                "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", +                "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", +                "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",                  "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"              ],              "index": "pypi", @@ -582,26 +634,26 @@          },          "requests": {              "hashes": [ -                "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8", -                "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998" +                "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", +                "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"              ],              "index": "pypi", -            "version": "==2.25.0" +            "version": "==2.25.1"          },          "sentry-sdk": {              "hashes": [ -                "sha256:1052f0ed084e532f66cb3e4ba617960d820152aee8b93fc6c05bd53861768c1c", -                "sha256:4c42910a55a6b1fe694d5e4790d5188d105d77b5a6346c1c64cbea8c06c0e8b7" +                "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0", +                "sha256:737a094e49a529dd0fdcaafa9e97cf7c3d5eb964bd229821d640bc77f3502b3f"              ],              "index": "pypi", -            "version": "==0.19.4" +            "version": "==0.19.5"          },          "six": {              "hashes": [                  "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",                  "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"              ], -            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",              "version": "==1.15.0"          },          "snowballstemmer": { @@ -620,11 +672,11 @@          },          "soupsieve": {              "hashes": [ -                "sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55", -                "sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232" +                "sha256:4bb21a6ee4707bf43b61230e80740e71bfe56e55d1f1f50924b087bb2975c851", +                "sha256:6dc52924dc0bc710a5d16794e6b3480b2c7c08b07729505feab2b2c16661ff6e"              ],              "markers": "python_version >= '3.0'", -            "version": "==2.0.1" +            "version": "==2.1"          },          "sphinx": {              "hashes": [ @@ -690,6 +742,14 @@              "index": "pypi",              "version": "==3.3.0"          }, +        "typing-extensions": { +            "hashes": [ +                "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", +                "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", +                "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" +            ], +            "version": "==3.7.4.3" +        },          "urllib3": {              "hashes": [                  "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", @@ -700,26 +760,46 @@          },          "yarl": {              "hashes": [ -                "sha256:040b237f58ff7d800e6e0fd89c8439b841f777dd99b4a9cca04d6935564b9409", -                "sha256:17668ec6722b1b7a3a05cc0167659f6c95b436d25a36c2d52db0eca7d3f72593", -                "sha256:3a584b28086bc93c888a6c2aa5c92ed1ae20932f078c46509a66dce9ea5533f2", -                "sha256:4439be27e4eee76c7632c2427ca5e73703151b22cae23e64adb243a9c2f565d8", -                "sha256:48e918b05850fffb070a496d2b5f97fc31d15d94ca33d3d08a4f86e26d4e7c5d", -                "sha256:9102b59e8337f9874638fcfc9ac3734a0cfadb100e47d55c20d0dc6087fb4692", -                "sha256:9b930776c0ae0c691776f4d2891ebc5362af86f152dd0da463a6614074cb1b02", -                "sha256:b3b9ad80f8b68519cc3372a6ca85ae02cc5a8807723ac366b53c0f089db19e4a", -                "sha256:bc2f976c0e918659f723401c4f834deb8a8e7798a71be4382e024bcc3f7e23a8", -                "sha256:c22c75b5f394f3d47105045ea551e08a3e804dc7e01b37800ca35b58f856c3d6", -                "sha256:c52ce2883dc193824989a9b97a76ca86ecd1fa7955b14f87bf367a61b6232511", -                "sha256:ce584af5de8830d8701b8979b18fcf450cef9a382b1a3c8ef189bedc408faf1e", -                "sha256:da456eeec17fa8aa4594d9a9f27c0b1060b6a75f2419fe0c00609587b2695f4a", -                "sha256:db6db0f45d2c63ddb1a9d18d1b9b22f308e52c83638c26b422d520a815c4b3fb", -                "sha256:df89642981b94e7db5596818499c4b2219028f2a528c9c37cc1de45bf2fd3a3f", -                "sha256:f18d68f2be6bf0e89f1521af2b1bb46e66ab0018faafa81d70f358153170a317", -                "sha256:f379b7f83f23fe12823085cd6b906edc49df969eb99757f58ff382349a3303c6" +                "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e", +                "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434", +                "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366", +                "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3", +                "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec", +                "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959", +                "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e", +                "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c", +                "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6", +                "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a", +                "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6", +                "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424", +                "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e", +                "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f", +                "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50", +                "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2", +                "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc", +                "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4", +                "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970", +                "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10", +                "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0", +                "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406", +                "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896", +                "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643", +                "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721", +                "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478", +                "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724", +                "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e", +                "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8", +                "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96", +                "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25", +                "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76", +                "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2", +                "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2", +                "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c", +                "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", +                "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"              ], -            "markers": "python_version >= '3.5'", -            "version": "==1.5.1" +            "markers": "python_version >= '3.6'", +            "version": "==1.6.3"          }      },      "develop": { @@ -740,10 +820,10 @@          },          "certifi": {              "hashes": [ -                "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd", -                "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4" +                "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", +                "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"              ], -            "version": "==2020.11.8" +            "version": "==2020.12.5"          },          "cfgv": {              "hashes": [ @@ -762,43 +842,58 @@          },          "coverage": {              "hashes": [ -                "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516", -                "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259", -                "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9", -                "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097", -                "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0", -                "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f", -                "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7", -                "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c", -                "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5", -                "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7", -                "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729", -                "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978", -                "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9", -                "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f", -                "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9", -                "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822", -                "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418", -                "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82", -                "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f", -                "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d", -                "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221", -                "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4", -                "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21", -                "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709", -                "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54", -                "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d", -                "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270", -                "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24", -                "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751", -                "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a", -                "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237", -                "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7", -                "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636", -                "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8" +                "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297", +                "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1", +                "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497", +                "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606", +                "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528", +                "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b", +                "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4", +                "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830", +                "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1", +                "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f", +                "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d", +                "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3", +                "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8", +                "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500", +                "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7", +                "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb", +                "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b", +                "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059", +                "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b", +                "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72", +                "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36", +                "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277", +                "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c", +                "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631", +                "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff", +                "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8", +                "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec", +                "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b", +                "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7", +                "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105", +                "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b", +                "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c", +                "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b", +                "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98", +                "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4", +                "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879", +                "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f", +                "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4", +                "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044", +                "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e", +                "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899", +                "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f", +                "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448", +                "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714", +                "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2", +                "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d", +                "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd", +                "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7", +                "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae"              ],              "index": "pypi", -            "version": "==5.3" +            "version": "==5.3.1"          },          "coveralls": {              "hashes": [ @@ -838,19 +933,19 @@          },          "flake8-annotations": {              "hashes": [ -                "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1", -                "sha256:bebd11a850f6987a943ce8cdff4159767e0f5f89b3c88aca64680c2175ee02df" +                "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055", +                "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e"              ],              "index": "pypi", -            "version": "==2.4.1" +            "version": "==2.5.0"          },          "flake8-bugbear": {              "hashes": [ -                "sha256:a3ddc03ec28ba2296fc6f89444d1c946a6b76460f859795b35b77d4920a51b63", -                "sha256:bd02e4b009fb153fe6072c31c52aeab5b133d508095befb2ffcf3b41c4823162" +                "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538", +                "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"              ],              "index": "pypi", -            "version": "==20.1.4" +            "version": "==20.11.1"          },          "flake8-docstrings": {              "hashes": [ @@ -885,11 +980,11 @@          },          "flake8-tidy-imports": {              "hashes": [ -                "sha256:62059ca07d8a4926b561d392cbab7f09ee042350214a25cf12823384a45d27dd", -                "sha256:c30b40337a2e6802ba3bb611c26611154a27e94c53fc45639e3e282169574fd3" +                "sha256:52e5f2f987d3d5597538d5941153409ebcab571635835b78f522c7bf03ca23bc", +                "sha256:76e36fbbfdc8e3c5017f9a216c2855a298be85bc0631e66777f4e6a07a859dc4"              ],              "index": "pypi", -            "version": "==4.1.0" +            "version": "==4.2.1"          },          "flake8-todo": {              "hashes": [ @@ -900,11 +995,11 @@          },          "identify": {              "hashes": [ -                "sha256:5dd84ac64a9a115b8e0b27d1756b244b882ad264c3c423f42af8235a6e71ca12", -                "sha256:c9504ba6a043ee2db0a9d69e43246bc138034895f6338d5aed1b41e4a73b1513" +                "sha256:18994e850ba50c37bcaed4832be8b354d6a06c8fb31f54e0e7ece76d32f69bc8", +                "sha256:892473bf12e655884132a3a32aca737a3cbefaa34a850ff52d501773a45837bc"              ],              "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", -            "version": "==1.5.9" +            "version": "==1.5.12"          },          "idna": {              "hashes": [ @@ -938,11 +1033,11 @@          },          "pre-commit": {              "hashes": [ -                "sha256:22e6aa3bd571debb01eb7d34483f11c01b65237be4eebbf30c3d4fb65762d315", -                "sha256:905ebc9b534b991baec87e934431f2d0606ba27f2b90f7f652985f5a5b8b6ae6" +                "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0", +                "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4"              ],              "index": "pypi", -            "version": "==2.8.2" +            "version": "==2.9.3"          },          "pycodestyle": {              "hashes": [ @@ -970,18 +1065,18 @@          },          "pyyaml": {              "hashes": [ -                "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", -                "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", -                "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", +                "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",                  "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", +                "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",                  "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",                  "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", -                "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", -                "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", +                "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",                  "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", -                "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", -                "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",                  "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", +                "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", +                "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", +                "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", +                "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",                  "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"              ],              "index": "pypi", @@ -989,18 +1084,18 @@          },          "requests": {              "hashes": [ -                "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8", -                "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998" +                "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", +                "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"              ],              "index": "pypi", -            "version": "==2.25.0" +            "version": "==2.25.1"          },          "six": {              "hashes": [                  "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",                  "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"              ], -            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", +            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",              "version": "==1.15.0"          },          "snowballstemmer": { @@ -1015,7 +1110,7 @@                  "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",                  "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"              ], -            "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", +            "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",              "version": "==0.10.2"          },          "urllib3": { @@ -1028,11 +1123,11 @@          },          "virtualenv": {              "hashes": [ -                "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2", -                "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380" +                "sha256:0c111a2236b191422b37fe8c28b8c828ced39aab4bf5627fa5c331aeffb570d9", +                "sha256:14b34341e742bdca219e10708198e704e8a7064dd32f474fc16aca68ac53a306"              ],              "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", -            "version": "==20.1.0" +            "version": "==20.3.1"          }      }  } diff --git a/bot/api.py b/bot/api.py index 4b8520582..d93f9f2ba 100644 --- a/bot/api.py +++ b/bot/api.py @@ -37,64 +37,27 @@ class APIClient:      session: Optional[aiohttp.ClientSession] = None      loop: asyncio.AbstractEventLoop = None -    def __init__(self, loop: asyncio.AbstractEventLoop, **kwargs): +    def __init__(self, **session_kwargs):          auth_headers = {              'Authorization': f"Token {Keys.site_api}"          } -        if 'headers' in kwargs: -            kwargs['headers'].update(auth_headers) +        if 'headers' in session_kwargs: +            session_kwargs['headers'].update(auth_headers)          else: -            kwargs['headers'] = auth_headers +            session_kwargs['headers'] = auth_headers -        self.session = None -        self.loop = loop - -        self._ready = asyncio.Event(loop=loop) -        self._creation_task = None -        self._default_session_kwargs = kwargs - -        self.recreate() +        # aiohttp will complain if APIClient gets instantiated outside a coroutine. Thankfully, we +        # don't and shouldn't need to do that, so we can avoid scheduling a task to create it. +        self.session = aiohttp.ClientSession(**session_kwargs)      @staticmethod      def _url_for(endpoint: str) -> str:          return f"{URLs.site_schema}{URLs.site_api}/{quote_url(endpoint)}" -    async def _create_session(self, **session_kwargs) -> None: -        """ -        Create the aiohttp session with `session_kwargs` and set the ready event. - -        `session_kwargs` is merged with `_default_session_kwargs` and overwrites its values. -        If an open session already exists, it will first be closed. -        """ -        await self.close() -        self.session = aiohttp.ClientSession(**{**self._default_session_kwargs, **session_kwargs}) -        self._ready.set() -      async def close(self) -> None: -        """Close the aiohttp session and unset the ready event.""" -        if self.session: -            await self.session.close() - -        self._ready.clear() - -    def recreate(self, force: bool = False, **session_kwargs) -> None: -        """ -        Schedule the aiohttp session to be created with `session_kwargs` if it's been closed. - -        If `force` is True, the session will be recreated even if an open one exists. If a task to -        create the session is pending, it will be cancelled. - -        `session_kwargs` is merged with the kwargs given when the `APIClient` was created and -        overwrites those default kwargs. -        """ -        if force or self.session is None or self.session.closed: -            if force and self._creation_task: -                self._creation_task.cancel() - -            # Don't schedule a task if one is already in progress. -            if force or self._creation_task is None or self._creation_task.done(): -                self._creation_task = self.loop.create_task(self._create_session(**session_kwargs)) +        """Close the aiohttp session.""" +        await self.session.close()      async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None:          """Raise ResponseCodeError for non-OK response if an exception should be raised.""" @@ -108,8 +71,6 @@ class APIClient:      async def request(self, method: str, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict:          """Send an HTTP request to the site API and return the JSON response.""" -        await self._ready.wait() -          async with self.session.request(method.upper(), self._url_for(endpoint), **kwargs) as resp:              await self.maybe_raise_for_status(resp, raise_for_status)              return await resp.json() @@ -132,25 +93,9 @@ class APIClient:      async def delete(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> Optional[dict]:          """Site API DELETE.""" -        await self._ready.wait() -          async with self.session.delete(self._url_for(endpoint), **kwargs) as resp:              if resp.status == 204:                  return None              await self.maybe_raise_for_status(resp, raise_for_status)              return await resp.json() - - -def loop_is_running() -> bool: -    """ -    Determine if there is a running asyncio event loop. - -    This helps enable "call this when event loop is running" logic (see: Twisted's `callWhenRunning`), -    which is currently not provided by asyncio. -    """ -    try: -        asyncio.get_running_loop() -    except RuntimeError: -        return False -    return True diff --git a/bot/bot.py b/bot/bot.py index 36cf7d30a..d5f108575 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -3,7 +3,8 @@ import logging  import socket  import warnings  from collections import defaultdict -from typing import Dict, Optional +from contextlib import suppress +from typing import Dict, List, Optional  import aiohttp  import discord @@ -15,6 +16,7 @@ from bot import api, constants  from bot.async_stats import AsyncStatsClient  log = logging.getLogger('bot') +LOCALHOST = "127.0.0.1"  class Bot(commands.Bot): @@ -31,11 +33,12 @@ class Bot(commands.Bot):          self.http_session: Optional[aiohttp.ClientSession] = None          self.redis_session = redis_session -        self.api_client = api.APIClient(loop=self.loop) +        self.api_client: Optional[api.APIClient] = None          self.filter_list_cache = defaultdict(dict)          self._connector = None          self._resolver = None +        self._statsd_timerhandle: asyncio.TimerHandle = None          self._guild_available = asyncio.Event()          statsd_url = constants.Stats.statsd_host @@ -44,9 +47,32 @@ class Bot(commands.Bot):              # Since statsd is UDP, there are no errors for sending to a down port.              # For this reason, setting the statsd host to 127.0.0.1 for development              # will effectively disable stats. -            statsd_url = "127.0.0.1" +            statsd_url = LOCALHOST -        self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot") +        self.stats = AsyncStatsClient(self.loop, LOCALHOST) +        self._connect_statsd(statsd_url) + +    def _connect_statsd(self, statsd_url: str, retry_after: int = 2, attempt: int = 1) -> None: +        """Callback used to retry a connection to statsd if it should fail.""" +        if attempt >= 8: +            log.error("Reached 8 attempts trying to reconnect AsyncStatsClient. Aborting") +            return + +        try: +            self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot") +        except socket.gaierror: +            log.warning(f"Statsd client failed to connect (Attempt(s): {attempt})") +            # Use a fallback strategy for retrying, up to 8 times. +            self._statsd_timerhandle = self.loop.call_later( +                retry_after, +                self._connect_statsd, +                statsd_url, +                retry_after * 2, +                attempt + 1 +            ) + +        # All tasks that need to block closing until finished +        self.closing_tasks: List[asyncio.Task] = []      async def cache_filter_list_data(self) -> None:          """Cache all the data in the FilterList on the site.""" @@ -55,46 +81,6 @@ class Bot(commands.Bot):          for item in full_cache:              self.insert_item_into_filter_list_cache(item) -    def _recreate(self) -> None: -        """Re-create the connector, aiohttp session, the APIClient and the Redis session.""" -        # Use asyncio for DNS resolution instead of threads so threads aren't spammed. -        # Doesn't seem to have any state with regards to being closed, so no need to worry? -        self._resolver = aiohttp.AsyncResolver() - -        # Its __del__ does send a warning but it doesn't always show up for some reason. -        if self._connector and not self._connector._closed: -            log.warning( -                "The previous connector was not closed; it will remain open and be overwritten" -            ) - -        if self.redis_session.closed: -            # If the RedisSession was somehow closed, we try to reconnect it -            # here. Normally, this shouldn't happen. -            self.loop.create_task(self.redis_session.connect()) - -        # Use AF_INET as its socket family to prevent HTTPS related problems both locally -        # and in production. -        self._connector = aiohttp.TCPConnector( -            resolver=self._resolver, -            family=socket.AF_INET, -        ) - -        # Client.login() will call HTTPClient.static_login() which will create a session using -        # this connector attribute. -        self.http.connector = self._connector - -        # Its __del__ does send a warning but it doesn't always show up for some reason. -        if self.http_session and not self.http_session.closed: -            log.warning( -                "The previous session was not closed; it will remain open and be overwritten" -            ) - -        self.http_session = aiohttp.ClientSession(connector=self._connector) -        self.api_client.recreate(force=True, connector=self._connector) - -        # Build the FilterList cache -        self.loop.create_task(self.cache_filter_list_data()) -      @classmethod      def create(cls) -> "Bot":          """Create and return an instance of a Bot.""" @@ -158,21 +144,29 @@ class Bot(commands.Bot):          return command      def clear(self) -> None: -        """ -        Clears the internal state of the bot and recreates the connector and sessions. - -        Will cause a DeprecationWarning if called outside a coroutine. -        """ -        # Because discord.py recreates the HTTPClient session, may as well follow suit and recreate -        # our own stuff here too. -        self._recreate() -        super().clear() +        """Not implemented! Re-instantiate the bot instead of attempting to re-use a closed one.""" +        raise NotImplementedError("Re-using a Bot object after closing it is not supported.")      async def close(self) -> None:          """Close the Discord connection and the aiohttp session, connector, statsd client, and resolver.""" +        # Done before super().close() to allow tasks finish before the HTTP session closes. +        for ext in list(self.extensions): +            with suppress(Exception): +                self.unload_extension(ext) + +        for cog in list(self.cogs): +            with suppress(Exception): +                self.remove_cog(cog) + +        # Wait until all tasks that have to be completed before bot is closing is done +        log.trace("Waiting for tasks before closing.") +        await asyncio.gather(*self.closing_tasks) + +        # Now actually do full close of bot          await super().close() -        await self.api_client.close() +        if self.api_client: +            await self.api_client.close()          if self.http_session:              await self.http_session.close() @@ -189,6 +183,9 @@ class Bot(commands.Bot):          if self.redis_session:              await self.redis_session.close() +        if self._statsd_timerhandle: +            self._statsd_timerhandle.cancel() +      def insert_item_into_filter_list_cache(self, item: Dict[str, str]) -> None:          """Add an item to the bots filter_list_cache."""          type_ = item["type"] @@ -204,7 +201,31 @@ class Bot(commands.Bot):      async def login(self, *args, **kwargs) -> None:          """Re-create the connector and set up sessions before logging into Discord.""" -        self._recreate() +        # Use asyncio for DNS resolution instead of threads so threads aren't spammed. +        self._resolver = aiohttp.AsyncResolver() + +        # Use AF_INET as its socket family to prevent HTTPS related problems both locally +        # and in production. +        self._connector = aiohttp.TCPConnector( +            resolver=self._resolver, +            family=socket.AF_INET, +        ) + +        # Client.login() will call HTTPClient.static_login() which will create a session using +        # this connector attribute. +        self.http.connector = self._connector + +        self.http_session = aiohttp.ClientSession(connector=self._connector) +        self.api_client = api.APIClient(connector=self._connector) + +        if self.redis_session.closed: +            # If the RedisSession was somehow closed, we try to reconnect it +            # here. Normally, this shouldn't happen. +            await self.redis_session.connect() + +        # Build the FilterList cache +        await self.cache_filter_list_data() +          await self.stats.create_socket()          await super().login(*args, **kwargs) diff --git a/bot/constants.py b/bot/constants.py index fb280b042..be8d303f6 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -13,7 +13,7 @@ their default values from `config-default.yml`.  import logging  import os  from collections.abc import Mapping -from enum import Enum +from enum import Enum, IntEnum  from pathlib import Path  from typing import Dict, List, Optional @@ -249,6 +249,9 @@ class Colours(metaclass=YAMLGetter):      soft_green: int      soft_orange: int      bright_green: int +    orange: int +    pink: int +    purple: int  class DuckPond(metaclass=YAMLGetter): @@ -299,6 +302,8 @@ class Emojis(metaclass=YAMLGetter):      comments: str      user: str +    ok_hand: str +  class Icons(metaclass=YAMLGetter):      section = "style" @@ -391,12 +396,15 @@ class Channels(metaclass=YAMLGetter):      admin_announcements: int      admin_spam: int      admins: int +    admins_voice: int      announcements: int      attachment_log: int      big_brother_logs: int      bot_commands: int      change_log: int -    code_help_voice: int +    code_help_chat_1: int +    code_help_chat_2: int +    code_help_voice_1: int      code_help_voice_2: int      cooldown: int      defcon: int @@ -405,6 +413,7 @@ class Channels(metaclass=YAMLGetter):      dev_log: int      dm_log: int      esoteric: int +    general_voice: int      helpers: int      incidents: int      incidents_archive: int @@ -425,10 +434,11 @@ class Channels(metaclass=YAMLGetter):      python_news: int      reddit: int      staff_announcements: int +    staff_voice: int +    staff_voice_chat: int      talent_pool: int      user_event_announcements: int      user_log: int -    verification: int      voice_chat: int      voice_gate: int      voice_log: int @@ -465,8 +475,6 @@ class Roles(metaclass=YAMLGetter):      python_community: int      sprinters: int      team_leaders: int -    unverified: int -    verified: int  # This is the Developers role on PyDis, here named verified for readability reasons.      voice_verified: int @@ -487,6 +495,7 @@ class Keys(metaclass=YAMLGetter):      section = "keys"      site_api: Optional[str] +    github: Optional[str]  class URLs(metaclass=YAMLGetter): @@ -587,26 +596,22 @@ class PythonNews(metaclass=YAMLGetter):      webhook: int -class Verification(metaclass=YAMLGetter): -    section = "verification" - -    unverified_after: int -    kicked_after: int -    reminder_frequency: int -    bot_message_delete_delay: int -    kick_confirmation_threshold: float - -  class VoiceGate(metaclass=YAMLGetter):      section = "voice_gate" -    minimum_days_verified: int +    minimum_days_member: int      minimum_messages: int      bot_message_delete_delay: int      minimum_activity_blocks: int      voice_ping_delete_delay: int +class Branding(metaclass=YAMLGetter): +    section = "branding" + +    cycle_frequency: int + +  class Event(Enum):      """      Event names. This does not include every event (for example, raw @@ -650,6 +655,9 @@ MODERATION_CHANNELS = Guild.moderation_channels  # Category combinations  MODERATION_CATEGORIES = Guild.moderation_categories +# Git SHA for Sentry +GIT_SHA = os.environ.get("GIT_SHA", "development") +  # Bot replies  NEGATIVE_REPLIES = [      "Noooooo!!", diff --git a/bot/converters.py b/bot/converters.py index 2e118d476..d0a9731d6 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -549,6 +549,35 @@ def _snowflake_from_regex(pattern: t.Pattern, arg: str) -> int:      return int(match.group(1)) +class Infraction(Converter): +    """ +    Attempts to convert a given infraction ID into an infraction. + +    Alternatively, `l`, `last`, or `recent` can be passed in order to +    obtain the most recent infraction by the actor. +    """ + +    async def convert(self, ctx: Context, arg: str) -> t.Optional[dict]: +        """Attempts to convert `arg` into an infraction `dict`.""" +        if arg in ("l", "last", "recent"): +            params = { +                "actor__id": ctx.author.id, +                "ordering": "-inserted_at" +            } + +            infractions = await ctx.bot.api_client.get("bot/infractions", params=params) + +            if not infractions: +                raise BadArgument( +                    "Couldn't find most recent infraction; you have never given an infraction." +                ) +            else: +                return infractions[0] + +        else: +            return await ctx.bot.api_client.get(f"bot/infractions/{arg}") + +  Expiry = t.Union[Duration, ISODateTime]  FetchedMember = t.Union[discord.Member, FetchedUser]  UserMention = partial(_snowflake_from_regex, RE_USER_MENTION) diff --git a/bot/exts/backend/branding/__init__.py b/bot/exts/backend/branding/__init__.py new file mode 100644 index 000000000..81ea3bf49 --- /dev/null +++ b/bot/exts/backend/branding/__init__.py @@ -0,0 +1,7 @@ +from bot.bot import Bot +from bot.exts.backend.branding._cog import BrandingManager + + +def setup(bot: Bot) -> None: +    """Loads BrandingManager cog.""" +    bot.add_cog(BrandingManager(bot)) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py new file mode 100644 index 000000000..20df83a89 --- /dev/null +++ b/bot/exts/backend/branding/_cog.py @@ -0,0 +1,566 @@ +import asyncio +import itertools +import logging +import random +import typing as t +from datetime import datetime, time, timedelta + +import arrow +import async_timeout +import discord +from async_rediscache import RedisCache +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import Branding, Colours, Emojis, Guild, MODERATION_ROLES +from bot.exts.backend.branding import _constants, _decorators, _errors, _seasons + +log = logging.getLogger(__name__) + + +class GitHubFile(t.NamedTuple): +    """ +    Represents a remote file on GitHub. + +    The `sha` hash is kept so that we can determine that a file has changed, +    despite its filename remaining unchanged. +    """ + +    download_url: str +    path: str +    sha: str + + +def pretty_files(files: t.Iterable[GitHubFile]) -> str: +    """Provide a human-friendly representation of `files`.""" +    return "\n".join(file.path for file in files) + + +def time_until_midnight() -> timedelta: +    """ +    Determine amount of time until the next-up UTC midnight. + +    The exact `midnight` moment is actually delayed to 5 seconds after, in order +    to avoid potential problems due to imprecise sleep. +    """ +    now = datetime.utcnow() +    tomorrow = now + timedelta(days=1) +    midnight = datetime.combine(tomorrow, time(second=5)) + +    return midnight - now + + +class BrandingManager(commands.Cog): +    """ +    Manages the guild's branding. + +    The purpose of this cog is to help automate the synchronization of the branding +    repository with the guild. It is capable of discovering assets in the repository +    via GitHub's API, resolving download urls for them, and delegating +    to the `bot` instance to upload them to the guild. + +    BrandingManager is designed to be entirely autonomous. Its `daemon` background task awakens +    once a day (see `time_until_midnight`) to detect new seasons, or to cycle icons within a single +    season. The daemon can be turned on and off via the `daemon` cmd group. The value set via +    its `start` and `stop` commands is persisted across sessions. If turned on, the daemon will +    automatically start on the next bot start-up. Otherwise, it will wait to be started manually. + +    All supported operations, e.g. setting seasons, applying the branding, or cycling icons, can +    also be invoked manually, via the following API: + +        branding list +            - Show all available seasons + +        branding set <season_name> +            - Set the cog's internal state to represent `season_name`, if it exists +            - If no `season_name` is given, set chronologically current season +            - This will not automatically apply the season's branding to the guild, +              the cog's state can be detached from the guild +            - Seasons can therefore be 'previewed' using this command + +        branding info +            - View detailed information about resolved assets for current season + +        branding refresh +            - Refresh internal state, i.e. synchronize with branding repository + +        branding apply +            - Apply the current internal state to the guild, i.e. upload the assets + +        branding cycle +            - If there are multiple available icons for current season, randomly pick +              and apply the next one + +    The daemon calls these methods autonomously as appropriate. The use of this cog +    is locked to moderation roles. As it performs media asset uploads, it is prone to +    rate-limits - the `apply` command should be used with caution. The `set` command can, +    however, be used freely to 'preview' seasonal branding and check whether paths have been +    resolved as appropriate. + +    While the bot is in debug mode, it will 'mock' asset uploads by logging the passed +    download urls and pretending that the upload was successful. Make use of this +    to test this cog's behaviour. +    """ + +    current_season: t.Type[_seasons.SeasonBase] + +    banner: t.Optional[GitHubFile] + +    available_icons: t.List[GitHubFile] +    remaining_icons: t.List[GitHubFile] + +    days_since_cycle: t.Iterator + +    daemon: t.Optional[asyncio.Task] + +    # Branding configuration +    branding_configuration = RedisCache() + +    def __init__(self, bot: Bot) -> None: +        """ +        Assign safe default values on init. + +        At this point, we don't have information about currently available branding. +        Most of these attributes will be overwritten once the daemon connects, or once +        the `refresh` command is used. +        """ +        self.bot = bot +        self.current_season = _seasons.get_current_season() + +        self.banner = None + +        self.available_icons = [] +        self.remaining_icons = [] + +        self.days_since_cycle = itertools.cycle([None]) + +        self.daemon = None +        self._startup_task = self.bot.loop.create_task(self._initial_start_daemon()) + +    async def _initial_start_daemon(self) -> None: +        """Checks is daemon active and when is, start it at cog load.""" +        if await self.branding_configuration.get("daemon_active"): +            self.daemon = self.bot.loop.create_task(self._daemon_func()) + +    @property +    def _daemon_running(self) -> bool: +        """True if the daemon is currently active, False otherwise.""" +        return self.daemon is not None and not self.daemon.done() + +    async def _daemon_func(self) -> None: +        """ +        Manage all automated behaviour of the BrandingManager cog. + +        Once a day, the daemon will perform the following tasks: +            - Update `current_season` +            - Poll GitHub API to see if the available branding for `current_season` has changed +            - Update assets if changes are detected (banner, guild icon, bot avatar, bot nickname) +            - Check whether it's time to cycle guild icons + +        The internal loop runs once when activated, then periodically at the time +        given by `time_until_midnight`. + +        All method calls in the internal loop are considered safe, i.e. no errors propagate +        to the daemon's loop. The daemon itself does not perform any error handling on its own. +        """ +        await self.bot.wait_until_guild_available() + +        while True: +            self.current_season = _seasons.get_current_season() +            branding_changed = await self.refresh() + +            if branding_changed: +                await self.apply() + +            elif next(self.days_since_cycle) == Branding.cycle_frequency: +                await self.cycle() + +            until_midnight = time_until_midnight() +            await asyncio.sleep(until_midnight.total_seconds()) + +    async def _info_embed(self) -> discord.Embed: +        """Make an informative embed representing current season.""" +        info_embed = discord.Embed(description=self.current_season.description, colour=self.current_season.colour) + +        # If we're in a non-evergreen season, also show active months +        if self.current_season is not _seasons.SeasonBase: +            title = f"{self.current_season.season_name} ({', '.join(str(m) for m in self.current_season.months)})" +        else: +            title = self.current_season.season_name + +        # Use the author field to show the season's name and avatar if available +        info_embed.set_author(name=title) + +        banner = self.banner.path if self.banner is not None else "Unavailable" +        info_embed.add_field(name="Banner", value=banner, inline=False) + +        icons = pretty_files(self.available_icons) or "Unavailable" +        info_embed.add_field(name="Available icons", value=icons, inline=False) + +        # Only display cycle frequency if we're actually cycling +        if len(self.available_icons) > 1 and Branding.cycle_frequency: +            info_embed.set_footer(text=f"Icon cycle frequency: {Branding.cycle_frequency}") + +        return info_embed + +    async def _reset_remaining_icons(self) -> None: +        """Set `remaining_icons` to a shuffled copy of `available_icons`.""" +        self.remaining_icons = random.sample(self.available_icons, k=len(self.available_icons)) + +    async def _reset_days_since_cycle(self) -> None: +        """ +        Reset the `days_since_cycle` iterator based on configured frequency. + +        If the current season only has 1 icon, or if `Branding.cycle_frequency` is falsey, +        the iterator will always yield None. This signals that the icon shouldn't be cycled. + +        Otherwise, it will yield ints in range [1, `Branding.cycle_frequency`] indefinitely. +        When the iterator yields a value equal to `Branding.cycle_frequency`, it is time to cycle. +        """ +        if len(self.available_icons) > 1 and Branding.cycle_frequency: +            sequence = range(1, Branding.cycle_frequency + 1) +        else: +            sequence = [None] + +        self.days_since_cycle = itertools.cycle(sequence) + +    async def _get_files(self, path: str, include_dirs: bool = False) -> t.Dict[str, GitHubFile]: +        """ +        Get files at `path` in the branding repository. + +        If `include_dirs` is False (default), only returns files at `path`. +        Otherwise, will return both files and directories. Never returns symlinks. + +        Return dict mapping from filename to corresponding `GitHubFile` instance. +        This may return an empty dict if the response status is non-200, +        or if the target directory is empty. +        """ +        url = f"{_constants.BRANDING_URL}/{path}" +        async with self.bot.http_session.get( +            url, headers=_constants.HEADERS, params=_constants.PARAMS +        ) as resp: +            # Short-circuit if we get non-200 response +            if resp.status != _constants.STATUS_OK: +                log.error(f"GitHub API returned non-200 response: {resp}") +                return {} +            directory = await resp.json()  # Directory at `path` + +        allowed_types = {"file", "dir"} if include_dirs else {"file"} +        return { +            file["name"]: GitHubFile(file["download_url"], file["path"], file["sha"]) +            for file in directory +            if file["type"] in allowed_types +        } + +    async def refresh(self) -> bool: +        """ +        Synchronize available assets with branding repository. + +        If the current season is not the evergreen, and lacks at least one asset, +        we use the evergreen seasonal dir as fallback for missing assets. + +        Finally, if neither the seasonal nor fallback branding directories contain +        an asset, it will simply be ignored. + +        Return True if the branding has changed. This will be the case when we enter +        a new season, or when something changes in the current seasons's directory +        in the branding repository. +        """ +        old_branding = (self.banner, self.available_icons) +        seasonal_dir = await self._get_files(self.current_season.branding_path, include_dirs=True) + +        # Only make a call to the fallback directory if there is something to be gained +        branding_incomplete = any( +            asset not in seasonal_dir +            for asset in (_constants.FILE_BANNER, _constants.FILE_AVATAR, _constants.SERVER_ICONS) +        ) +        if branding_incomplete and self.current_season is not _seasons.SeasonBase: +            fallback_dir = await self._get_files( +                _seasons.SeasonBase.branding_path, include_dirs=True +            ) +        else: +            fallback_dir = {} + +        # Resolve assets in this directory, None is a safe value +        self.banner = ( +            seasonal_dir.get(_constants.FILE_BANNER) +            or fallback_dir.get(_constants.FILE_BANNER) +        ) + +        # Now resolve server icons by making a call to the proper sub-directory +        if _constants.SERVER_ICONS in seasonal_dir: +            icons_dir = await self._get_files( +                f"{self.current_season.branding_path}/{_constants.SERVER_ICONS}" +            ) +            self.available_icons = list(icons_dir.values()) + +        elif _constants.SERVER_ICONS in fallback_dir: +            icons_dir = await self._get_files( +                f"{_seasons.SeasonBase.branding_path}/{_constants.SERVER_ICONS}" +            ) +            self.available_icons = list(icons_dir.values()) + +        else: +            self.available_icons = []  # This should never be the case, but an empty list is a safe value + +        # GitHubFile instances carry a `sha` attr so this will pick up if a file changes +        branding_changed = old_branding != (self.banner, self.available_icons) + +        if branding_changed: +            log.info(f"New branding detected (season: {self.current_season.season_name})") +            await self._reset_remaining_icons() +            await self._reset_days_since_cycle() + +        return branding_changed + +    async def cycle(self) -> bool: +        """ +        Apply the next-up server icon. + +        Returns True if an icon is available and successfully gets applied, False otherwise. +        """ +        if not self.available_icons: +            log.info("Cannot cycle: no icons for this season") +            return False + +        if not self.remaining_icons: +            log.info("Reset & shuffle remaining icons") +            await self._reset_remaining_icons() + +        next_up = self.remaining_icons.pop(0) +        success = await self.set_icon(next_up.download_url) + +        return success + +    async def apply(self) -> t.List[str]: +        """ +        Apply current branding to the guild and bot. + +        This delegates to the bot instance to do all the work. We only provide download urls +        for available assets. Assets unavailable in the branding repo will be ignored. + +        Returns a list of names of all failed assets. An asset is considered failed +        if it isn't found in the branding repo, or if something goes wrong while the +        bot is trying to apply it. + +        An empty list denotes that all assets have been applied successfully. +        """ +        report = {asset: False for asset in ("banner", "icon")} + +        if self.banner is not None: +            report["banner"] = await self.set_banner(self.banner.download_url) + +        report["icon"] = await self.cycle() + +        failed_assets = [asset for asset, succeeded in report.items() if not succeeded] +        return failed_assets + +    @commands.has_any_role(*MODERATION_ROLES) +    @commands.group(name="branding") +    async def branding_cmds(self, ctx: commands.Context) -> None: +        """Manual branding control.""" +        if not ctx.invoked_subcommand: +            await ctx.send_help(ctx.command) + +    @branding_cmds.command(name="list", aliases=["ls"]) +    async def branding_list(self, ctx: commands.Context) -> None: +        """List all available seasons and branding sources.""" +        embed = discord.Embed(title="Available seasons", colour=Colours.soft_green) + +        for season in _seasons.get_all_seasons(): +            if season is _seasons.SeasonBase: +                active_when = "always" +            else: +                active_when = f"in {', '.join(str(m) for m in season.months)}" + +            description = ( +                f"Active {active_when}\n" +                f"Branding: {season.branding_path}" +            ) +            embed.add_field(name=season.season_name, value=description, inline=False) + +        await ctx.send(embed=embed) + +    @branding_cmds.command(name="set") +    async def branding_set(self, ctx: commands.Context, *, season_name: t.Optional[str] = None) -> None: +        """ +        Manually set season, or reset to current if none given. + +        Season search is a case-less comparison against both seasonal class name, +        and its `season_name` attr. + +        This only pre-loads the cog's internal state to the chosen season, but does not +        automatically apply the branding. As that is an expensive operation, the `apply` +        command must be called explicitly after this command finishes. + +        This means that this command can be used to 'preview' a season gathering info +        about its available assets, without applying them to the guild. + +        If the daemon is running, it will automatically reset the season to current when +        it wakes up. The season set via this command can therefore remain 'detached' from +        what it should be - the daemon will make sure that it's set back properly. +        """ +        if season_name is None: +            new_season = _seasons.get_current_season() +        else: +            new_season = _seasons.get_season(season_name) +            if new_season is None: +                raise _errors.BrandingError("No such season exists") + +        if self.current_season is new_season: +            raise _errors.BrandingError(f"Season {self.current_season.season_name} already active") + +        self.current_season = new_season +        await self.branding_refresh(ctx) + +    @branding_cmds.command(name="info", aliases=["status"]) +    async def branding_info(self, ctx: commands.Context) -> None: +        """ +        Show available assets for current season. + +        This can be used to confirm that assets have been resolved properly. +        When `apply` is used, it attempts to upload exactly the assets listed here. +        """ +        await ctx.send(embed=await self._info_embed()) + +    @branding_cmds.command(name="refresh") +    async def branding_refresh(self, ctx: commands.Context) -> None: +        """Sync currently available assets with branding repository.""" +        async with ctx.typing(): +            await self.refresh() +            await self.branding_info(ctx) + +    @branding_cmds.command(name="apply") +    async def branding_apply(self, ctx: commands.Context) -> None: +        """ +        Apply current season's branding to the guild. + +        Use `info` to check which assets will be applied. Shows which assets have +        failed to be applied, if any. +        """ +        async with ctx.typing(): +            failed_assets = await self.apply() +            if failed_assets: +                raise _errors.BrandingError( +                    f"Failed to apply following assets: {', '.join(failed_assets)}" +                ) + +            response = discord.Embed(description=f"All assets applied {Emojis.ok_hand}", colour=Colours.soft_green) +            await ctx.send(embed=response) + +    @branding_cmds.command(name="cycle") +    async def branding_cycle(self, ctx: commands.Context) -> None: +        """ +        Apply the next-up guild icon, if multiple are available. + +        The order is random. +        """ +        async with ctx.typing(): +            success = await self.cycle() +            if not success: +                raise _errors.BrandingError("Failed to cycle icon") + +            response = discord.Embed(description=f"Success {Emojis.ok_hand}", colour=Colours.soft_green) +            await ctx.send(embed=response) + +    @branding_cmds.group(name="daemon", aliases=["d", "task"]) +    async def daemon_group(self, ctx: commands.Context) -> None: +        """Control the background daemon.""" +        if not ctx.invoked_subcommand: +            await ctx.send_help(ctx.command) + +    @daemon_group.command(name="status") +    async def daemon_status(self, ctx: commands.Context) -> None: +        """Check whether daemon is currently active.""" +        if self._daemon_running: +            remaining_time = (arrow.utcnow() + time_until_midnight()).humanize() +            response = discord.Embed(description=f"Daemon running {Emojis.ok_hand}", colour=Colours.soft_green) +            response.set_footer(text=f"Next refresh {remaining_time}") +        else: +            response = discord.Embed(description="Daemon not running", colour=Colours.soft_red) + +        await ctx.send(embed=response) + +    @daemon_group.command(name="start") +    async def daemon_start(self, ctx: commands.Context) -> None: +        """If the daemon isn't running, start it.""" +        if self._daemon_running: +            raise _errors.BrandingError("Daemon already running!") + +        self.daemon = self.bot.loop.create_task(self._daemon_func()) +        await self.branding_configuration.set("daemon_active", True) + +        response = discord.Embed(description=f"Daemon started {Emojis.ok_hand}", colour=Colours.soft_green) +        await ctx.send(embed=response) + +    @daemon_group.command(name="stop") +    async def daemon_stop(self, ctx: commands.Context) -> None: +        """If the daemon is running, stop it.""" +        if not self._daemon_running: +            raise _errors.BrandingError("Daemon not running!") + +        self.daemon.cancel() +        await self.branding_configuration.set("daemon_active", False) + +        response = discord.Embed(description=f"Daemon stopped {Emojis.ok_hand}", colour=Colours.soft_green) +        await ctx.send(embed=response) + +    async def _fetch_image(self, url: str) -> bytes: +        """Retrieve and read image from `url`.""" +        log.debug(f"Getting image from: {url}") +        async with self.bot.http_session.get(url) as resp: +            return await resp.read() + +    async def _apply_asset(self, target: discord.Guild, asset: _constants.AssetType, url: str) -> bool: +        """ +        Internal method for applying media assets to the guild. + +        This shouldn't be called directly. The purpose of this method is mainly generic +        error handling to reduce needless code repetition. + +        Return True if upload was successful, False otherwise. +        """ +        log.info(f"Attempting to set {asset.name}: {url}") + +        kwargs = {asset.value: await self._fetch_image(url)} +        try: +            async with async_timeout.timeout(5): +                await target.edit(**kwargs) + +        except asyncio.TimeoutError: +            log.info("Asset upload timed out") +            return False + +        except discord.HTTPException as discord_error: +            log.exception("Asset upload failed", exc_info=discord_error) +            return False + +        else: +            log.info("Asset successfully applied") +            return True + +    @_decorators.mock_in_debug(return_value=True) +    async def set_banner(self, url: str) -> bool: +        """Set the guild's banner to image at `url`.""" +        guild = self.bot.get_guild(Guild.id) +        if guild is None: +            log.info("Failed to get guild instance, aborting asset upload") +            return False + +        return await self._apply_asset(guild, _constants.AssetType.BANNER, url) + +    @_decorators.mock_in_debug(return_value=True) +    async def set_icon(self, url: str) -> bool: +        """Sets the guild's icon to image at `url`.""" +        guild = self.bot.get_guild(Guild.id) +        if guild is None: +            log.info("Failed to get guild instance, aborting asset upload") +            return False + +        return await self._apply_asset(guild, _constants.AssetType.SERVER_ICON, url) + +    def cog_unload(self) -> None: +        """Cancels startup and daemon task.""" +        self._startup_task.cancel() +        if self.daemon is not None: +            self.daemon.cancel() diff --git a/bot/exts/backend/branding/_constants.py b/bot/exts/backend/branding/_constants.py new file mode 100644 index 000000000..dbc7615f2 --- /dev/null +++ b/bot/exts/backend/branding/_constants.py @@ -0,0 +1,51 @@ +from enum import Enum, IntEnum + +from bot.constants import Keys + + +class Month(IntEnum): +    """All month constants for seasons.""" + +    JANUARY = 1 +    FEBRUARY = 2 +    MARCH = 3 +    APRIL = 4 +    MAY = 5 +    JUNE = 6 +    JULY = 7 +    AUGUST = 8 +    SEPTEMBER = 9 +    OCTOBER = 10 +    NOVEMBER = 11 +    DECEMBER = 12 + +    def __str__(self) -> str: +        return self.name.title() + + +class AssetType(Enum): +    """ +    Discord media assets. + +    The values match exactly the kwarg keys that can be passed to `Guild.edit`. +    """ + +    BANNER = "banner" +    SERVER_ICON = "icon" + + +STATUS_OK = 200  # HTTP status code + +FILE_BANNER = "banner.png" +FILE_AVATAR = "avatar.png" +SERVER_ICONS = "server_icons" + +BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents" + +PARAMS = {"ref": "master"}  # Target branch +HEADERS = {"Accept": "application/vnd.github.v3+json"}  # Ensure we use API v3 + +# A GitHub token is not necessary for the cog to operate, +# unauthorized requests are however limited to 60 per hour +if Keys.github: +    HEADERS["Authorization"] = f"token {Keys.github}" diff --git a/bot/exts/backend/branding/_decorators.py b/bot/exts/backend/branding/_decorators.py new file mode 100644 index 000000000..6a1e7e869 --- /dev/null +++ b/bot/exts/backend/branding/_decorators.py @@ -0,0 +1,27 @@ +import functools +import logging +import typing as t + +from bot.constants import DEBUG_MODE + +log = logging.getLogger(__name__) + + +def mock_in_debug(return_value: t.Any) -> t.Callable: +    """ +    Short-circuit function execution if in debug mode and return `return_value`. + +    The original function name, and the incoming args and kwargs are DEBUG level logged +    upon each call. This is useful for expensive operations, i.e. media asset uploads +    that are prone to rate-limits but need to be tested extensively. +    """ +    def decorator(func: t.Callable) -> t.Callable: +        @functools.wraps(func) +        async def wrapped(*args, **kwargs) -> t.Any: +            """Short-circuit and log if in debug mode.""" +            if DEBUG_MODE: +                log.debug(f"Function {func.__name__} called with args: {args}, kwargs: {kwargs}") +                return return_value +            return await func(*args, **kwargs) +        return wrapped +    return decorator diff --git a/bot/exts/backend/branding/_errors.py b/bot/exts/backend/branding/_errors.py new file mode 100644 index 000000000..7cd271af3 --- /dev/null +++ b/bot/exts/backend/branding/_errors.py @@ -0,0 +1,2 @@ +class BrandingError(Exception): +    """Exception raised by the BrandingManager cog.""" diff --git a/bot/exts/backend/branding/_seasons.py b/bot/exts/backend/branding/_seasons.py new file mode 100644 index 000000000..5f6256b30 --- /dev/null +++ b/bot/exts/backend/branding/_seasons.py @@ -0,0 +1,175 @@ +import logging +import typing as t +from datetime import datetime + +from bot.constants import Colours +from bot.exts.backend.branding._constants import Month +from bot.exts.backend.branding._errors import BrandingError + +log = logging.getLogger(__name__) + + +class SeasonBase: +    """ +    Base for Seasonal classes. + +    This serves as the off-season fallback for when no specific +    seasons are active. + +    Seasons are 'registered' simply by inheriting from `SeasonBase`. +    We discover them by calling `__subclasses__`. +    """ + +    season_name: str = "Evergreen" + +    colour: str = Colours.soft_green +    description: str = "The default season!" + +    branding_path: str = "seasonal/evergreen" + +    months: t.Set[Month] = set(Month) + + +class Christmas(SeasonBase): +    """Branding for December.""" + +    season_name = "Festive season" + +    colour = Colours.soft_red +    description = ( +        "The time is here to get into the festive spirit! No matter who you are, where you are, " +        "or what beliefs you may follow, we hope every one of you enjoy this festive season!" +    ) + +    branding_path = "seasonal/christmas" + +    months = {Month.DECEMBER} + + +class Easter(SeasonBase): +    """Branding for April.""" + +    season_name = "Easter" + +    colour = Colours.bright_green +    description = ( +        "Bunny here, bunny there, bunny everywhere! Here at Python Discord, we celebrate " +        "our version of Easter during the entire month of April." +    ) + +    branding_path = "seasonal/easter" + +    months = {Month.APRIL} + + +class Halloween(SeasonBase): +    """Branding for October.""" + +    season_name = "Halloween" + +    colour = Colours.orange +    description = "Trick or treat?!" + +    branding_path = "seasonal/halloween" + +    months = {Month.OCTOBER} + + +class Pride(SeasonBase): +    """Branding for June.""" + +    season_name = "Pride" + +    colour = Colours.pink +    description = ( +        "The month of June is a special month for us at Python Discord. It is very important to us " +        "that everyone feels welcome here, no matter their origin, identity or sexuality. During the " +        "month of June, while some of you are participating in Pride festivals across the world, " +        "we will be celebrating individuality and commemorating the history and challenges " +        "of the LGBTQ+ community with a Pride event of our own!" +    ) + +    branding_path = "seasonal/pride" + +    months = {Month.JUNE} + + +class Valentines(SeasonBase): +    """Branding for February.""" + +    season_name = "Valentines" + +    colour = Colours.pink +    description = "Love is in the air!" + +    branding_path = "seasonal/valentines" + +    months = {Month.FEBRUARY} + + +class Wildcard(SeasonBase): +    """Branding for August.""" + +    season_name = "Wildcard" + +    colour = Colours.purple +    description = "A season full of surprises!" + +    months = {Month.AUGUST} + + +def get_all_seasons() -> t.List[t.Type[SeasonBase]]: +    """Give all available season classes.""" +    return [SeasonBase] + SeasonBase.__subclasses__() + + +def get_current_season() -> t.Type[SeasonBase]: +    """Give active season, based on current UTC month.""" +    current_month = Month(datetime.utcnow().month) + +    active_seasons = tuple( +        season +        for season in SeasonBase.__subclasses__() +        if current_month in season.months +    ) + +    if not active_seasons: +        return SeasonBase + +    return active_seasons[0] + + +def get_season(name: str) -> t.Optional[t.Type[SeasonBase]]: +    """ +    Give season such that its class name or its `season_name` attr match `name` (caseless). + +    If no such season exists, return None. +    """ +    name = name.casefold() + +    for season in get_all_seasons(): +        matches = (season.__name__.casefold(), season.season_name.casefold()) + +        if name in matches: +            return season + + +def _validate_season_overlap() -> None: +    """ +    Raise BrandingError if there are any colliding seasons. + +    This serves as a local test to ensure that seasons haven't been misconfigured. +    """ +    month_to_season = {} + +    for season in SeasonBase.__subclasses__(): +        for month in season.months: +            colliding_season = month_to_season.get(month) + +            if colliding_season: +                raise BrandingError(f"Season {season} collides with {colliding_season} in {month.name}") +            else: +                month_to_season[month] = season + + +_validate_season_overlap() diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py index c643d346e..b8bb3757f 100644 --- a/bot/exts/backend/error_handler.py +++ b/bot/exts/backend/error_handler.py @@ -1,5 +1,7 @@  import contextlib +import difflib  import logging +import random  import typing as t  from discord import Embed @@ -8,9 +10,10 @@ from sentry_sdk import push_scope  from bot.api import ResponseCodeError  from bot.bot import Bot -from bot.constants import Channels, Colours +from bot.constants import Colours, ERROR_REPLIES, Icons, MODERATION_ROLES  from bot.converters import TagNameConverter  from bot.errors import LockedResourceError +from bot.exts.backend.branding._errors import BrandingError  from bot.utils.checks import InWhitelistCheckFailure  log = logging.getLogger(__name__) @@ -47,7 +50,6 @@ class ErrorHandler(Cog):              * If CommandNotFound is raised when invoking the tag (determined by the presence of the                `invoked_from_error_handler` attribute), this error is treated as being unexpected                and therefore sends an error message -            * Commands in the verification channel are ignored          2. UserInputError: see `handle_user_input_error`          3. CheckFailure: see `handle_check_failure`          4. CommandOnCooldown: send an error message in the invoking context @@ -63,10 +65,9 @@ class ErrorHandler(Cog):          if isinstance(e, errors.CommandNotFound) and not hasattr(ctx, "invoked_from_error_handler"):              if await self.try_silence(ctx):                  return -            if ctx.channel.id != Channels.verification: -                # Try to look for a tag with the command's name -                await self.try_get_tag(ctx) -                return  # Exit early to avoid logging. +            # Try to look for a tag with the command's name +            await self.try_get_tag(ctx) +            return  # Exit early to avoid logging.          elif isinstance(e, errors.UserInputError):              await self.handle_user_input_error(ctx, e)          elif isinstance(e, errors.CheckFailure): @@ -78,6 +79,9 @@ class ErrorHandler(Cog):                  await self.handle_api_error(ctx, e.original)              elif isinstance(e.original, LockedResourceError):                  await ctx.send(f"{e.original} Please wait for it to finish and try again later.") +            elif isinstance(e.original, BrandingError): +                await ctx.send(embed=self._get_error_embed(random.choice(ERROR_REPLIES), str(e.original))) +                return              else:                  await self.handle_unexpected_error(ctx, e.original)              return  # Exit early to avoid logging. @@ -156,10 +160,46 @@ class ErrorHandler(Cog):              )          else:              with contextlib.suppress(ResponseCodeError): -                await ctx.invoke(tags_get_command, tag_name=tag_name) +                if await ctx.invoke(tags_get_command, tag_name=tag_name): +                    return + +        if not any(role.id in MODERATION_ROLES for role in ctx.author.roles): +            await self.send_command_suggestion(ctx, ctx.invoked_with) +          # Return to not raise the exception          return +    async def send_command_suggestion(self, ctx: Context, command_name: str) -> None: +        """Sends user similar commands if any can be found.""" +        # No similar tag found, or tag on cooldown - +        # searching for a similar command +        raw_commands = [] +        for cmd in self.bot.walk_commands(): +            if not cmd.hidden: +                raw_commands += (cmd.name, *cmd.aliases) +        if similar_command_data := difflib.get_close_matches(command_name, raw_commands, 1): +            similar_command_name = similar_command_data[0] +            similar_command = self.bot.get_command(similar_command_name) + +            if not similar_command: +                return + +            log_msg = "Cancelling attempt to suggest a command due to failed checks." +            try: +                if not await similar_command.can_run(ctx): +                    log.debug(log_msg) +                    return +            except errors.CommandError as cmd_error: +                log.debug(log_msg) +                await self.on_command_error(ctx, cmd_error) +                return + +            misspelled_content = ctx.message.content +            e = Embed() +            e.set_author(name="Did you mean:", icon_url=Icons.questionmark) +            e.description = f"{misspelled_content.replace(command_name, similar_command_name, 1)}" +            await ctx.send(embed=e, delete_after=10.0) +      async def handle_user_input_error(self, ctx: Context, e: errors.UserInputError) -> None:          """          Send an error message in `ctx` for UserInputError, sometimes invoking the help command too. diff --git a/bot/exts/backend/sync/_syncers.py b/bot/exts/backend/sync/_syncers.py index 2eb9f9971..c9f2d2da8 100644 --- a/bot/exts/backend/sync/_syncers.py +++ b/bot/exts/backend/sync/_syncers.py @@ -5,12 +5,15 @@ from collections import namedtuple  from discord import Guild  from discord.ext.commands import Context +from more_itertools import chunked  import bot  from bot.api import ResponseCodeError  log = logging.getLogger(__name__) +CHUNK_SIZE = 1000 +  # These objects are declared as namedtuples because tuples are hashable,  # something that we make use of when diffing site roles against guild roles.  _Role = namedtuple('Role', ('id', 'name', 'colour', 'permissions', 'position')) @@ -207,10 +210,13 @@ class UserSyncer(Syncer):      @staticmethod      async def _sync(diff: _Diff) -> None:          """Synchronise the database with the user cache of `guild`.""" +        # Using asyncio.gather would still consume too many resources on the site.          log.trace("Syncing created users...")          if diff.created: -            await bot.instance.api_client.post("bot/users", json=diff.created) +            for chunk in chunked(diff.created, CHUNK_SIZE): +                await bot.instance.api_client.post("bot/users", json=chunk)          log.trace("Syncing updated users...")          if diff.updated: -            await bot.instance.api_client.patch("bot/users/bulk_patch", json=diff.updated) +            for chunk in chunked(diff.updated, CHUNK_SIZE): +                await bot.instance.api_client.patch("bot/users/bulk_patch", json=chunk) diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index e22d4663e..983c5d183 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -145,22 +145,17 @@ class HelpChannels(commands.Cog):          Make the current in-use help channel dormant.          Make the channel dormant if the user passes the `dormant_check`, -        delete the message that invoked this, -        and reset the send permissions cooldown for the user who started the session. +        delete the message that invoked this.          """          log.trace("close command invoked; checking if the channel is in-use.") -        if ctx.channel.category == self.in_use_category: -            if await self.dormant_check(ctx): -                await _cooldown.remove_cooldown_role(ctx.author) -                # Ignore missing task when cooldown has passed but the channel still isn't dormant. -                if ctx.author.id in self.scheduler: -                    self.scheduler.cancel(ctx.author.id) - -                await self.move_to_dormant(ctx.channel, "command") -                self.scheduler.cancel(ctx.channel.id) -        else: +        if ctx.channel.category != self.in_use_category:              log.debug(f"{ctx.author} invoked command 'dormant' outside an in-use help channel") +            return + +        if await self.dormant_check(ctx): +            await self.move_to_dormant(ctx.channel, "command") +            self.scheduler.cancel(ctx.channel.id)      async def get_available_candidate(self) -> discord.TextChannel:          """ @@ -368,12 +363,13 @@ class HelpChannels(commands.Cog):          """          log.info(f"Moving #{channel} ({channel.id}) to the Dormant category.") -        await _caches.claimants.delete(channel.id)          await self.move_to_bottom_position(              channel=channel,              category_id=constants.Categories.help_dormant,          ) +        await self.unclaim_channel(channel) +          self.bot.stats.incr(f"help.dormant_calls.{caller}")          in_use_time = await _channel.get_in_use_time(channel.id) @@ -397,6 +393,28 @@ class HelpChannels(commands.Cog):          self.channel_queue.put_nowait(channel)          self.report_stats() +    async def unclaim_channel(self, channel: discord.TextChannel) -> None: +        """ +        Mark the channel as unclaimed and remove the cooldown role from the claimant if needed. + +        The role is only removed if they have no claimed channels left once the current one is unclaimed. +        This method also handles canceling the automatic removal of the cooldown role. +        """ +        claimant_id = await _caches.claimants.pop(channel.id) + +        # Ignore missing task when cooldown has passed but the channel still isn't dormant. +        if claimant_id in self.scheduler: +            self.scheduler.cancel(claimant_id) + +        claimant = self.bot.get_guild(constants.Guild.id).get_member(claimant_id) +        if claimant is None: +            log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed") +            return + +        # Remove the cooldown role if the claimant has no other channels left +        if not any(claimant.id == user_id for _, user_id in await _caches.claimants.items()): +            await _cooldown.remove_cooldown_role(claimant) +      async def move_to_in_use(self, channel: discord.TextChannel) -> None:          """Make a channel in-use and schedule it to be made dormant."""          log.info(f"Moving #{channel} ({channel.id}) to the In Use category.") diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 5aaf85e5a..38e760ee3 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -11,6 +11,7 @@ from discord.abc import GuildChannel  from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role  from bot import constants +from bot.api import ResponseCodeError  from bot.bot import Bot  from bot.converters import FetchedMember  from bot.decorators import in_whitelist @@ -21,7 +22,6 @@ from bot.utils.time import time_since  log = logging.getLogger(__name__) -  STATUS_EMOTES = {      Status.offline: constants.Emojis.status_offline,      Status.dnd: constants.Emojis.status_dnd, @@ -224,13 +224,16 @@ class Information(Cog):              if is_set and (emoji := getattr(constants.Emojis, f"badge_{badge}", None)):                  badges.append(emoji) +        activity = await self.user_messages(user) +          if on_server:              joined = time_since(user.joined_at, max_units=3)              roles = ", ".join(role.mention for role in user.roles[1:]) -            membership = textwrap.dedent(f""" -                             Joined: {joined} -                             Roles: {roles or None} -                         """).strip() +            membership = {"Joined": joined, "Verified": not user.pending, "Roles": roles or None} +            if not is_mod_channel(ctx.channel): +                membership.pop("Verified") + +            membership = textwrap.dedent("\n".join([f"{key}: {value}" for key, value in membership.items()]))          else:              roles = None              membership = "The user is not a member of the server" @@ -252,6 +255,8 @@ class Information(Cog):          # Show more verbose output in moderation channels for infractions and nominations          if is_mod_channel(ctx.channel): +            fields.append(activity) +              fields.append(await self.expanded_user_infraction_counts(user))              fields.append(await self.user_nomination_counts(user))          else: @@ -354,6 +359,30 @@ class Information(Cog):          return "Nominations", "\n".join(output) +    async def user_messages(self, user: FetchedMember) -> Tuple[Union[bool, str], Tuple[str, str]]: +        """ +        Gets the amount of messages for `member`. + +        Fetches information from the metricity database that's hosted by the site. +        If the database returns a code besides a 404, then many parts of the bot are broken including this one. +        """ +        activity_output = [] + +        try: +            user_activity = await self.bot.api_client.get(f"bot/users/{user.id}/metricity_data") +        except ResponseCodeError as e: +            if e.status == 404: +                activity_output = "No activity" +        else: +            activity_output.append(user_activity["total_messages"] or "No messages") +            activity_output.append(user_activity["activity_blocks"] or "No activity") + +            activity_output = "\n".join( +                f"{name}: {metric}" for name, metric in zip(["Messages", "Activity blocks"], activity_output) +            ) + +        return ("Activity", activity_output) +      def format_fields(self, mapping: Mapping[str, Any], field_width: Optional[int] = None) -> str:          """Format a mapping to be readable to a human."""          # sorting is technically superfluous but nice if you want to look for a specific field @@ -390,10 +419,14 @@ class Information(Cog):          return out.rstrip()      @cooldown_with_role_bypass(2, 60 * 3, BucketType.member, bypass_roles=constants.STAFF_ROLES) -    @group(invoke_without_command=True, enabled=False) +    @group(invoke_without_command=True)      @in_whitelist(channels=(constants.Channels.bot_commands,), roles=constants.STAFF_ROLES)      async def raw(self, ctx: Context, *, message: Message, json: bool = False) -> None:          """Shows information about the raw API response.""" +        if ctx.author not in message.channel.members: +            await ctx.send(":x: You do not have permissions to see the channel this message is in.") +            return +          # I *guess* it could be deleted right as the command is invoked but I felt like it wasn't worth handling          # doing this extra request is also much easier than trying to convert everything back into a dictionary again          raw_data = await ctx.bot.http.get_message(message.channel.id, message.id) @@ -425,7 +458,7 @@ class Information(Cog):          for page in paginator.pages:              await ctx.send(page) -    @raw.command(enabled=False) +    @raw.command()      async def json(self, ctx: Context, message: Message) -> None:          """Shows information about the raw API response in a copy-pasteable Python format."""          await ctx.invoke(self.raw, message=message, json=True) diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py new file mode 100644 index 000000000..8ac96bbdb --- /dev/null +++ b/bot/exts/info/pep.py @@ -0,0 +1,164 @@ +import logging +from datetime import datetime, timedelta +from email.parser import HeaderParser +from io import StringIO +from typing import Dict, Optional, Tuple + +from discord import Colour, Embed +from discord.ext.commands import Cog, Context, command + +from bot.bot import Bot +from bot.constants import Keys +from bot.utils.cache import AsyncCache + +log = logging.getLogger(__name__) + +ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png" +BASE_PEP_URL = "http://www.python.org/dev/peps/pep-" +PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master" + +pep_cache = AsyncCache() + +GITHUB_API_HEADERS = {} +if Keys.github: +    GITHUB_API_HEADERS["Authorization"] = f"token {Keys.github}" + + +class PythonEnhancementProposals(Cog): +    """Cog for displaying information about PEPs.""" + +    def __init__(self, bot: Bot): +        self.bot = bot +        self.peps: Dict[int, str] = {} +        # To avoid situations where we don't have last datetime, set this to now. +        self.last_refreshed_peps: datetime = datetime.now() +        self.bot.loop.create_task(self.refresh_peps_urls()) + +    async def refresh_peps_urls(self) -> None: +        """Refresh PEP URLs listing in every 3 hours.""" +        # Wait until HTTP client is available +        await self.bot.wait_until_ready() +        log.trace("Started refreshing PEP URLs.") +        self.last_refreshed_peps = datetime.now() + +        async with self.bot.http_session.get( +            PEPS_LISTING_API_URL, +            headers=GITHUB_API_HEADERS +        ) as resp: +            if resp.status != 200: +                log.warning(f"Fetching PEP URLs from GitHub API failed with code {resp.status}") +                return + +            listing = await resp.json() + +        log.trace("Got PEP URLs listing from GitHub API") + +        for file in listing: +            name = file["name"] +            if name.startswith("pep-") and name.endswith((".rst", ".txt")): +                pep_number = name.replace("pep-", "").split(".")[0] +                self.peps[int(pep_number)] = file["download_url"] + +        log.info("Successfully refreshed PEP URLs listing.") + +    @staticmethod +    def get_pep_zero_embed() -> Embed: +        """Get information embed about PEP 0.""" +        pep_embed = Embed( +            title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**", +            url="https://www.python.org/dev/peps/" +        ) +        pep_embed.set_thumbnail(url=ICON_URL) +        pep_embed.add_field(name="Status", value="Active") +        pep_embed.add_field(name="Created", value="13-Jul-2000") +        pep_embed.add_field(name="Type", value="Informational") + +        return pep_embed + +    async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]: +        """Validate is PEP number valid. When it isn't, return error embed, otherwise None.""" +        if ( +            pep_nr not in self.peps +            and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now() +            and len(str(pep_nr)) < 5 +        ): +            await self.refresh_peps_urls() + +        if pep_nr not in self.peps: +            log.trace(f"PEP {pep_nr} was not found") +            return Embed( +                title="PEP not found", +                description=f"PEP {pep_nr} does not exist.", +                colour=Colour.red() +            ) + +        return None + +    def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed: +        """Generate PEP embed based on PEP headers data.""" +        # Assemble the embed +        pep_embed = Embed( +            title=f"**PEP {pep_nr} - {pep_header['Title']}**", +            description=f"[Link]({BASE_PEP_URL}{pep_nr:04})", +        ) + +        pep_embed.set_thumbnail(url=ICON_URL) + +        # Add the interesting information +        fields_to_check = ("Status", "Python-Version", "Created", "Type") +        for field in fields_to_check: +            # Check for a PEP metadata field that is present but has an empty value +            # embed field values can't contain an empty string +            if pep_header.get(field, ""): +                pep_embed.add_field(name=field, value=pep_header[field]) + +        return pep_embed + +    @pep_cache(arg_offset=1) +    async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]: +        """Fetch, generate and return PEP embed. Second item of return tuple show does getting success.""" +        response = await self.bot.http_session.get(self.peps[pep_nr]) + +        if response.status == 200: +            log.trace(f"PEP {pep_nr} found") +            pep_content = await response.text() + +            # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179 +            pep_header = HeaderParser().parse(StringIO(pep_content)) +            return self.generate_pep_embed(pep_header, pep_nr), True +        else: +            log.trace( +                f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}." +            ) +            return Embed( +                title="Unexpected error", +                description="Unexpected HTTP error during PEP search. Please let us know.", +                colour=Colour.red() +            ), False + +    @command(name='pep', aliases=('get_pep', 'p')) +    async def pep_command(self, ctx: Context, pep_number: int) -> None: +        """Fetches information about a PEP and sends it to the channel.""" +        # Trigger typing in chat to show users that bot is responding +        await ctx.trigger_typing() + +        # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs. +        if pep_number == 0: +            pep_embed = self.get_pep_zero_embed() +            success = True +        else: +            success = False +            if not (pep_embed := await self.validate_pep_number(pep_number)): +                pep_embed, success = await self.get_pep_embed(pep_number) + +        await ctx.send(embed=pep_embed) +        if success: +            log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.") +            self.bot.stats.incr(f"pep_fetches.{pep_number}") +        else: +            log.trace(f"Getting PEP {pep_number} failed. Error embed sent.") + + +def setup(bot: Bot) -> None: +    """Load the PEP cog.""" +    bot.add_cog(PythonEnhancementProposals(bot)) diff --git a/bot/exts/info/reddit.py b/bot/exts/info/reddit.py index bad4c504d..6790be762 100644 --- a/bot/exts/info/reddit.py +++ b/bot/exts/info/reddit.py @@ -45,7 +45,7 @@ class Reddit(Cog):          """Stop the loop task and revoke the access token when the cog is unloaded."""          self.auto_poster_loop.cancel()          if self.access_token and self.access_token.expires_at > datetime.utcnow(): -            asyncio.create_task(self.revoke_access_token()) +            self.bot.closing_tasks.append(asyncio.create_task(self.revoke_access_token()))      async def init_reddit_ready(self) -> None:          """Sets the reddit webhook when the cog is loaded.""" diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py index 8f15f932b..00b4d1a78 100644 --- a/bot/exts/info/tags.py +++ b/bot/exts/info/tags.py @@ -46,7 +46,7 @@ class Tags(Cog):                      "embed": {                          "description": file.read_text(encoding="utf8"),                      }, -                    "restricted_to": "developers", +                    "restricted_to": None,                      "location": f"/bot/{file}"                  } @@ -63,7 +63,7 @@ class Tags(Cog):      @staticmethod      def check_accessibility(user: Member, tag: dict) -> bool:          """Check if user can access a tag.""" -        return tag["restricted_to"].lower() in [role.name.lower() for role in user.roles] +        return not tag["restricted_to"] or tag["restricted_to"].lower() in [role.name.lower() for role in user.roles]      @staticmethod      def _fuzzy_search(search: str, target: str) -> float: @@ -182,10 +182,15 @@ class Tags(Cog):          matching_tags = self._get_tags_via_content(any, keywords or 'any', ctx.author)          await self._send_matching_tags(ctx, keywords, matching_tags) -    @tags_group.command(name='get', aliases=('show', 'g')) -    async def get_command(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None: -        """Get a specified tag, or a list of all tags if no tag is specified.""" +    async def display_tag(self, ctx: Context, tag_name: str = None) -> bool: +        """ +        If a tag is not found, display similar tag names as suggestions. +        If a tag is not specified, display a paginated embed of all tags. + +        Tags are on cooldowns on a per-tag, per-channel basis. If a tag is on cooldown, display +        nothing and return False. +        """          def _command_on_cooldown(tag_name: str) -> bool:              """              Check if the command is currently on cooldown, on a per-tag, per-channel basis. @@ -212,7 +217,7 @@ class Tags(Cog):                  f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. "                  f"Cooldown ends in {time_left:.1f} seconds."              ) -            return +            return False          if tag_name is not None:              temp_founds = self._get_tag(tag_name) @@ -237,6 +242,7 @@ class Tags(Cog):                      await ctx.send(embed=Embed.from_dict(tag['embed'])),                      [ctx.author.id],                  ) +                return True              elif founds and len(tag_name) >= 3:                  await wait_for_deletion(                      await ctx.send( @@ -247,6 +253,7 @@ class Tags(Cog):                      ),                      [ctx.author.id],                  ) +                return True          else:              tags = self._cache.values() @@ -255,6 +262,7 @@ class Tags(Cog):                      description="**There are no tags in the database!**",                      colour=Colour.red()                  )) +                return True              else:                  embed: Embed = Embed(title="**Current tags**")                  await LinePaginator.paginate( @@ -268,6 +276,18 @@ class Tags(Cog):                      empty=False,                      max_lines=15                  ) +                return True + +        return False + +    @tags_group.command(name='get', aliases=('show', 'g')) +    async def get_command(self, ctx: Context, *, tag_name: TagNameConverter = None) -> bool: +        """ +        Get a specified tag, or a list of all tags if no tag is specified. + +        Returns False if a tag is on cooldown, or if no matches are found. +        """ +        return await self.display_tag(ctx, tag_name)  def setup(bot: Bot) -> None: diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py index c062ae7f8..242b2d30f 100644 --- a/bot/exts/moderation/infraction/_scheduler.py +++ b/bot/exts/moderation/infraction/_scheduler.py @@ -74,8 +74,21 @@ class InfractionScheduler:              return          # Allowing mod log since this is a passive action that should be logged. -        await apply_coro -        log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.") +        try: +            await apply_coro +        except discord.HTTPException as e: +            # When user joined and then right after this left again before action completed, this can't apply roles +            if e.code == 10007 or e.status == 404: +                log.info( +                    f"Can't reapply {infraction['type']} to user {infraction['user']} because user left the guild." +                ) +            else: +                log.exception( +                    f"Got unexpected HTTPException (HTTP {e.status}, Discord code {e.code})" +                    f"when awaiting {infraction['type']} coroutine for {infraction['user']}." +                ) +        else: +            log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.")      async def apply_infraction(          self, @@ -178,6 +191,10 @@ class InfractionScheduler:                  log_msg = f"Failed to apply {' '.join(infr_type.split('_'))} infraction #{id_} to {user}"                  if isinstance(e, discord.Forbidden):                      log.warning(f"{log_msg}: bot lacks permissions.") +                elif e.code == 10007 or e.status == 404: +                    log.info( +                        f"Can't apply {infraction['type']} to user {infraction['user']} because user left from guild." +                    )                  else:                      log.exception(log_msg)                  failed = True @@ -352,9 +369,16 @@ class InfractionScheduler:              log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)"              log_content = mod_role.mention          except discord.HTTPException as e: -            log.exception(f"Failed to deactivate infraction #{id_} ({type_})") -            log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}." -            log_content = mod_role.mention +            if e.code == 10007 or e.status == 404: +                log.info( +                    f"Can't pardon {infraction['type']} for user {infraction['user']} because user left the guild." +                ) +                log_text["Failure"] = "User left the guild." +                log_content = mod_role.mention +            else: +                log.exception(f"Failed to deactivate infraction #{id_} ({type_})") +                log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}." +                log_content = mod_role.mention          # Check if the user is currently being watched by Big Brother.          try: diff --git a/bot/exts/moderation/infraction/management.py b/bot/exts/moderation/infraction/management.py index 4cd7d15bf..b3783cd60 100644 --- a/bot/exts/moderation/infraction/management.py +++ b/bot/exts/moderation/infraction/management.py @@ -10,7 +10,7 @@ from discord.utils import escape_markdown  from bot import constants  from bot.bot import Bot -from bot.converters import Expiry, Snowflake, UserMention, allowed_strings, proxy_user +from bot.converters import Expiry, Infraction, Snowflake, UserMention, allowed_strings, proxy_user  from bot.exts.moderation.infraction.infractions import Infractions  from bot.exts.moderation.modlog import ModLog  from bot.pagination import LinePaginator @@ -45,11 +45,50 @@ class ModManagement(commands.Cog):          """Infraction manipulation commands."""          await ctx.send_help(ctx.command) +    @infraction_group.command(name="append", aliases=("amend", "add", "a")) +    async def infraction_append( +        self, +        ctx: Context, +        infraction: Infraction, +        duration: t.Union[Expiry, allowed_strings("p", "permanent"), None],   # noqa: F821 +        *, +        reason: str = None +    ) -> None: +        """ +        Append text and/or edit the duration of an infraction. + +        Durations are relative to the time of updating and should be appended with a unit of time. +        Units (∗case-sensitive): +        \u2003`y` - years +        \u2003`m` - months∗ +        \u2003`w` - weeks +        \u2003`d` - days +        \u2003`h` - hours +        \u2003`M` - minutes∗ +        \u2003`s` - seconds + +        Use "l", "last", or "recent" as the infraction ID to specify that the most recent infraction +        authored by the command invoker should be edited. + +        Use "p" or "permanent" to mark the infraction as permanent. Alternatively, an ISO 8601 +        timestamp can be provided for the duration. + +        If a previous infraction reason does not end with an ending punctuation mark, this automatically +        adds a period before the amended reason. +        """ +        old_reason = infraction["reason"] + +        if old_reason is not None: +            add_period = not old_reason.endswith((".", "!", "?")) +            reason = old_reason + (". " if add_period else " ") + reason + +        await self.infraction_edit(ctx, infraction, duration, reason=reason) +      @infraction_group.command(name='edit', aliases=('e',))      async def infraction_edit(          self,          ctx: Context, -        infraction_id: t.Union[int, allowed_strings("l", "last", "recent")],  # noqa: F821 +        infraction: Infraction,          duration: t.Union[Expiry, allowed_strings("p", "permanent"), None],   # noqa: F821          *,          reason: str = None @@ -77,30 +116,13 @@ class ModManagement(commands.Cog):              # Unlike UserInputError, the error handler will show a specified message for BadArgument              raise commands.BadArgument("Neither a new expiry nor a new reason was specified.") -        # Retrieve the previous infraction for its information. -        if isinstance(infraction_id, str): -            params = { -                "actor__id": ctx.author.id, -                "ordering": "-inserted_at" -            } -            infractions = await self.bot.api_client.get("bot/infractions", params=params) - -            if infractions: -                old_infraction = infractions[0] -                infraction_id = old_infraction["id"] -            else: -                await ctx.send( -                    ":x: Couldn't find most recent infraction; you have never given an infraction." -                ) -                return -        else: -            old_infraction = await self.bot.api_client.get(f"bot/infractions/{infraction_id}") +        infraction_id = infraction["id"]          request_data = {}          confirm_messages = []          log_text = "" -        if duration is not None and not old_infraction['active']: +        if duration is not None and not infraction['active']:              if reason is None:                  await ctx.send(":x: Cannot edit the expiration of an expired infraction.")                  return @@ -119,7 +141,7 @@ class ModManagement(commands.Cog):              request_data['reason'] = reason              confirm_messages.append("set a new reason")              log_text += f""" -                Previous reason: {old_infraction['reason']} +                Previous reason: {infraction['reason']}                  New reason: {reason}              """.rstrip()          else: @@ -134,7 +156,7 @@ class ModManagement(commands.Cog):          # Re-schedule infraction if the expiration has been updated          if 'expires_at' in request_data:              # A scheduled task should only exist if the old infraction wasn't permanent -            if old_infraction['expires_at']: +            if infraction['expires_at']:                  self.infractions_cog.scheduler.cancel(new_infraction['id'])              # If the infraction was not marked as permanent, schedule a new expiration task @@ -142,7 +164,7 @@ class ModManagement(commands.Cog):                  self.infractions_cog.schedule_expiration(new_infraction)              log_text += f""" -                Previous expiry: {old_infraction['expires_at'] or "Permanent"} +                Previous expiry: {infraction['expires_at'] or "Permanent"}                  New expiry: {new_infraction['expires_at'] or "Permanent"}              """.rstrip() @@ -175,7 +197,7 @@ class ModManagement(commands.Cog):      # endregion      # region: Search infractions -    @infraction_group.group(name="search", invoke_without_command=True) +    @infraction_group.group(name="search", aliases=('s',), invoke_without_command=True)      async def infraction_search_group(self, ctx: Context, query: t.Union[UserMention, Snowflake, str]) -> None:          """Searches for infractions in the database."""          if isinstance(query, int): diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 96dfb562f..ffc470c54 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -104,14 +104,14 @@ class Superstarify(InfractionScheduler, Cog):              await self.reapply_infraction(infraction, action) -    @command(name="superstarify", aliases=("force_nick", "star")) +    @command(name="superstarify", aliases=("force_nick", "star", "starify"))      async def superstarify(          self,          ctx: Context,          member: Member,          duration: Expiry,          *, -        reason: str = None, +        reason: str = '',      ) -> None:          """          Temporarily force a random superstar name (like Taylor Swift) to be the user's nickname. @@ -128,16 +128,16 @@ class Superstarify(InfractionScheduler, Cog):          Alternatively, an ISO 8601 timestamp can be provided for the duration. -        An optional reason can be provided. If no reason is given, the original name will be shown -        in a generated reason. +        An optional reason can be provided, which would be added to a message stating their old nickname +        and linking to the nickname policy.          """          if await _utils.get_active_infraction(ctx, member, "superstar"):              return          # Post the infraction to the API          old_nick = member.display_name -        reason = reason or f"old nick: {old_nick}" -        infraction = await _utils.post_infraction(ctx, member, "superstar", reason, duration, active=True) +        infraction_reason = f'Old nickname: {old_nick}. {reason}' +        infraction = await _utils.post_infraction(ctx, member, "superstar", infraction_reason, duration, active=True)          id_ = infraction["id"]          forced_nick = self.get_nick(id_, member.id) @@ -152,37 +152,38 @@ class Superstarify(InfractionScheduler, Cog):          old_nick = escape_markdown(old_nick)          forced_nick = escape_markdown(forced_nick) -        superstar_reason = f"Your nickname didn't comply with our [nickname policy]({NICKNAME_POLICY_URL})."          nickname_info = textwrap.dedent(f"""              Old nickname: `{old_nick}`              New nickname: `{forced_nick}`          """).strip() +        user_message = ( +            f"Your previous nickname, **{old_nick}**, " +            f"was so bad that we have decided to change it. " +            f"Your new nickname will be **{forced_nick}**.\n\n" +            "{reason}" +            f"You will be unable to change your nickname until **{expiry_str}**. " +            "If you're confused by this, please read our " +            f"[official nickname policy]({NICKNAME_POLICY_URL})." +        ).format +          successful = await self.apply_infraction(              ctx, infraction, member, action(), -            user_reason=superstar_reason, +            user_reason=user_message(reason=f'**Additional details:** {reason}\n\n' if reason else ''),              additional_info=nickname_info          ) -        # Send an embed with the infraction information to the invoking context if -        # superstar was successful. +        # Send an embed with to the invoking context if superstar was successful.          if successful:              log.trace(f"Sending superstar #{id_} embed.")              embed = Embed( -                title="Congratulations!", +                title="Superstarified!",                  colour=constants.Colours.soft_orange, -                description=( -                    f"Your previous nickname, **{old_nick}**, " -                    f"was so bad that we have decided to change it. " -                    f"Your new nickname will be **{forced_nick}**.\n\n" -                    f"You will be unable to change your nickname until **{expiry_str}**.\n\n" -                    "If you're confused by this, please read our " -                    f"[official nickname policy]({NICKNAME_POLICY_URL})." -                ) +                description=user_message(reason='')              )              await ctx.send(embed=embed) -    @command(name="unsuperstarify", aliases=("release_nick", "unstar")) +    @command(name="unsuperstarify", aliases=("release_nick", "unstar", "unstarify"))      async def unsuperstarify(self, ctx: Context, member: Member) -> None:          """Remove the superstarify infraction and allow the user to change their nickname."""          await self.pardon_infraction(ctx, "superstar", member) diff --git a/bot/exts/moderation/silence.py b/bot/exts/moderation/silence.py index e6712b3b6..2a7ca932e 100644 --- a/bot/exts/moderation/silence.py +++ b/bot/exts/moderation/silence.py @@ -72,7 +72,7 @@ class SilenceNotifier(tasks.Loop):  class Silence(commands.Cog): -    """Commands for stopping channel messages for `verified` role in a channel.""" +    """Commands for stopping channel messages for `everyone` role in a channel."""      # Maps muted channel IDs to their previous overwrites for send_message and add_reactions.      # Overwrites are stored as JSON. @@ -93,7 +93,7 @@ class Silence(commands.Cog):          await self.bot.wait_until_guild_available()          guild = self.bot.get_guild(Guild.id) -        self._verified_role = guild.get_role(Roles.verified) +        self._everyone_role = guild.default_role          self._mod_alerts_channel = self.bot.get_channel(Channels.mod_alerts)          self.notifier = SilenceNotifier(self.bot.get_channel(Channels.mod_log))          await self._reschedule() @@ -142,7 +142,7 @@ class Silence(commands.Cog):      async def _unsilence_wrapper(self, channel: TextChannel) -> None:          """Unsilence `channel` and send a success/failure message."""          if not await self._unsilence(channel): -            overwrite = channel.overwrites_for(self._verified_role) +            overwrite = channel.overwrites_for(self._everyone_role)              if overwrite.send_messages is False or overwrite.add_reactions is False:                  await channel.send(MSG_UNSILENCE_MANUAL)              else: @@ -152,14 +152,14 @@ class Silence(commands.Cog):      async def _set_silence_overwrites(self, channel: TextChannel) -> bool:          """Set silence permission overwrites for `channel` and return True if successful.""" -        overwrite = channel.overwrites_for(self._verified_role) +        overwrite = channel.overwrites_for(self._everyone_role)          prev_overwrites = dict(send_messages=overwrite.send_messages, add_reactions=overwrite.add_reactions)          if channel.id in self.scheduler or all(val is False for val in prev_overwrites.values()):              return False          overwrite.update(send_messages=False, add_reactions=False) -        await channel.set_permissions(self._verified_role, overwrite=overwrite) +        await channel.set_permissions(self._everyone_role, overwrite=overwrite)          await self.previous_overwrites.set(channel.id, json.dumps(prev_overwrites))          return True @@ -188,14 +188,14 @@ class Silence(commands.Cog):              log.info(f"Tried to unsilence channel #{channel} ({channel.id}) but the channel was not silenced.")              return False -        overwrite = channel.overwrites_for(self._verified_role) +        overwrite = channel.overwrites_for(self._everyone_role)          if prev_overwrites is None:              log.info(f"Missing previous overwrites for #{channel} ({channel.id}); defaulting to None.")              overwrite.update(send_messages=None, add_reactions=None)          else:              overwrite.update(**json.loads(prev_overwrites)) -        await channel.set_permissions(self._verified_role, overwrite=overwrite) +        await channel.set_permissions(self._everyone_role, overwrite=overwrite)          log.info(f"Unsilenced channel #{channel} ({channel.id}).")          self.scheduler.cancel(channel.id) @@ -207,7 +207,7 @@ class Silence(commands.Cog):              await self._mod_alerts_channel.send(                  f"<@&{Roles.admins}> Restored overwrites with default values after unsilencing "                  f"{channel.mention}. Please check that the `Send Messages` and `Add Reactions` " -                f"overwrites for {self._verified_role.mention} are at their desired values." +                f"overwrites for {self._everyone_role.mention} are at their desired values."              )          return True diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py index c599156d0..bfe9b74b4 100644 --- a/bot/exts/moderation/verification.py +++ b/bot/exts/moderation/verification.py @@ -1,27 +1,18 @@ -import asyncio  import logging  import typing as t -from contextlib import suppress -from datetime import datetime, timedelta  import discord -from async_rediscache import RedisCache -from discord.ext import tasks -from discord.ext.commands import Cog, Context, command, group, has_any_role -from discord.utils import snowflake_time +from discord.ext.commands import Cog, Context, command, has_any_role  from bot import constants -from bot.api import ResponseCodeError  from bot.bot import Bot -from bot.decorators import has_no_roles, in_whitelist -from bot.exts.moderation.modlog import ModLog -from bot.utils.checks import InWhitelistCheckFailure, has_no_roles_check -from bot.utils.messages import format_user +from bot.decorators import in_whitelist +from bot.utils.checks import InWhitelistCheckFailure  log = logging.getLogger(__name__)  # Sent via DMs once user joins the guild -ON_JOIN_MESSAGE = f""" +ON_JOIN_MESSAGE = """  Welcome to Python Discord!  To show you what kind of community we are, we've created this video: @@ -29,33 +20,10 @@ https://youtu.be/ZH26PuX3re0  As a new user, you have read-only access to a few select channels to give you a taste of what our server is like. \  In order to see the rest of the channels and to send messages, you first have to accept our rules. - -Please visit <#{constants.Channels.verification}> to get started. Thank you!  """ -# Sent via DMs once user verifies  VERIFIED_MESSAGE = f""" -Thanks for verifying yourself! - -For your records, these are the documents you accepted: - -`1)` Our rules, here: <https://pythondiscord.com/pages/rules> -`2)` Our privacy policy, here: <https://pythondiscord.com/pages/privacy> - you can find information on how to have \ -your information removed here as well. - -Feel free to review them at any point! - -Additionally, if you'd like to receive notifications for the announcements \ -we post in <#{constants.Channels.announcements}> -from time to time, you can send `!subscribe` to <#{constants.Channels.bot_commands}> at any time \ -to assign yourself the **Announcements** role. We'll mention this role every time we make an announcement. - -If you'd like to unsubscribe from the announcement notifications, simply send `!unsubscribe` to \ -<#{constants.Channels.bot_commands}>. -""" - -ALTERNATE_VERIFIED_MESSAGE = f""" -Thanks for accepting our rules! +You are now verified!  You can find a copy of our rules for reference at <https://pythondiscord.com/pages/rules>. @@ -71,61 +39,6 @@ To introduce you to our community, we've made the following video:  https://youtu.be/ZH26PuX3re0  """ -# Sent via DMs to users kicked for failing to verify -KICKED_MESSAGE = f""" -Hi! You have been automatically kicked from Python Discord as you have failed to accept our rules \ -within `{constants.Verification.kicked_after}` days. If this was an accident, please feel free to join us again! - -{constants.Guild.invite} -""" - -# Sent periodically in the verification channel -REMINDER_MESSAGE = f""" -<@&{constants.Roles.unverified}> - -Welcome to Python Discord! Please read the documents mentioned above and type `!accept` to gain permissions \ -to send messages in the community! - -You will be kicked if you don't verify within `{constants.Verification.kicked_after}` days. -""".strip() - -# An async function taking a Member param -Request = t.Callable[[discord.Member], t.Awaitable] - - -class StopExecution(Exception): -    """Signals that a task should halt immediately & alert admins.""" - -    def __init__(self, reason: discord.HTTPException) -> None: -        super().__init__() -        self.reason = reason - - -class Limit(t.NamedTuple): -    """Composition over config for throttling requests.""" - -    batch_size: int  # Amount of requests after which to pause -    sleep_secs: int  # Sleep this many seconds after each batch - - -def mention_role(role_id: int) -> discord.AllowedMentions: -    """Construct an allowed mentions instance that allows pinging `role_id`.""" -    return discord.AllowedMentions(roles=[discord.Object(role_id)]) - - -def is_verified(member: discord.Member) -> bool: -    """ -    Check whether `member` is considered verified. - -    Members are considered verified if they have at least 1 role other than -    the default role (@everyone) and the @Unverified role. -    """ -    unverified_roles = { -        member.guild.get_role(constants.Roles.unverified), -        member.guild.default_role, -    } -    return len(set(member.roles) - unverified_roles) > 0 -  async def safe_dm(coro: t.Coroutine) -> None:      """ @@ -150,411 +63,16 @@ class Verification(Cog):      """      User verification and role management. -    There are two internal tasks in this cog: - -    * `update_unverified_members` -        * Unverified members are given the @Unverified role after configured `unverified_after` days -        * Unverified members are kicked after configured `kicked_after` days -    * `ping_unverified` -        * Periodically ping the @Unverified role in the verification channel -      Statistics are collected in the 'verification.' namespace. -    Moderators+ can use the `verification` command group to start or stop both internal -    tasks, if necessary. Settings are persisted in Redis across sessions. - -    Additionally, this cog offers the !accept, !subscribe and !unsubscribe commands, -    and keeps the verification channel clean by deleting messages. +    Additionally, this cog offers the !subscribe and !unsubscribe commands,      """ -    # Persist task settings & last sent `REMINDER_MESSAGE` id -    # RedisCache[ -    #   "tasks_running": int (0 or 1), -    #   "last_reminder": int (discord.Message.id), -    # ] -    task_cache = RedisCache() - -    # Create a cache for storing recipients of the alternate welcome DM. -    member_gating_cache = RedisCache() -      def __init__(self, bot: Bot) -> None:          """Start internal tasks."""          self.bot = bot -        self.bot.loop.create_task(self._maybe_start_tasks()) - -    def cog_unload(self) -> None: -        """ -        Cancel internal tasks. - -        This is necessary, as tasks are not automatically cancelled on cog unload. -        """ -        self._stop_tasks(gracefully=False) - -    @property -    def mod_log(self) -> ModLog: -        """Get currently loaded ModLog cog instance.""" -        return self.bot.get_cog("ModLog") - -    async def _maybe_start_tasks(self) -> None: -        """ -        Poll Redis to check whether internal tasks should start. - -        Redis must be interfaced with from an async function. -        """ -        log.trace("Checking whether background tasks should begin") -        setting: t.Optional[int] = await self.task_cache.get("tasks_running")  # This can be None if never set - -        if setting: -            log.trace("Background tasks will be started") -            self.update_unverified_members.start() -            self.ping_unverified.start() - -    def _stop_tasks(self, *, gracefully: bool) -> None: -        """ -        Stop the update users & ping @Unverified tasks. - -        If `gracefully` is True, the tasks will be able to finish their current iteration. -        Otherwise, they are cancelled immediately. -        """ -        log.info(f"Stopping internal tasks ({gracefully=})") -        if gracefully: -            self.update_unverified_members.stop() -            self.ping_unverified.stop() -        else: -            self.update_unverified_members.cancel() -            self.ping_unverified.cancel() - -    # region: automatically update unverified users - -    async def _verify_kick(self, n_members: int) -> bool: -        """ -        Determine whether `n_members` is a reasonable amount of members to kick. - -        First, `n_members` is checked against the size of the PyDis guild. If `n_members` are -        more than the configured `kick_confirmation_threshold` of the guild, the operation -        must be confirmed by staff in #core-dev. Otherwise, the operation is seen as safe. -        """ -        log.debug(f"Checking whether {n_members} members are safe to kick") - -        await self.bot.wait_until_guild_available()  # Ensure cache is populated before we grab the guild -        pydis = self.bot.get_guild(constants.Guild.id) - -        percentage = n_members / len(pydis.members) -        if percentage < constants.Verification.kick_confirmation_threshold: -            log.debug(f"Kicking {percentage:.2%} of the guild's population is seen as safe") -            return True - -        # Since `n_members` is a suspiciously large number, we will ask for confirmation -        log.debug("Amount of users is too large, requesting staff confirmation") - -        core_dev_channel = pydis.get_channel(constants.Channels.dev_core) -        core_dev_ping = f"<@&{constants.Roles.core_developers}>" - -        confirmation_msg = await core_dev_channel.send( -            f"{core_dev_ping} Verification determined that `{n_members}` members should be kicked as they haven't " -            f"verified in `{constants.Verification.kicked_after}` days. This is `{percentage:.2%}` of the guild's " -            f"population. Proceed?", -            allowed_mentions=mention_role(constants.Roles.core_developers), -        ) - -        options = (constants.Emojis.incident_actioned, constants.Emojis.incident_unactioned) -        for option in options: -            await confirmation_msg.add_reaction(option) - -        core_dev_ids = [member.id for member in pydis.get_role(constants.Roles.core_developers).members] - -        def check(reaction: discord.Reaction, user: discord.User) -> bool: -            """Check whether `reaction` is a valid reaction to `confirmation_msg`.""" -            return ( -                reaction.message.id == confirmation_msg.id  # Reacted to `confirmation_msg` -                and str(reaction.emoji) in options  # With one of `options` -                and user.id in core_dev_ids  # By a core developer -            ) - -        timeout = 60 * 5  # Seconds, i.e. 5 minutes -        try: -            choice, _ = await self.bot.wait_for("reaction_add", check=check, timeout=timeout) -        except asyncio.TimeoutError: -            log.debug("Staff prompt not answered, aborting operation") -            return False -        finally: -            with suppress(discord.HTTPException): -                await confirmation_msg.clear_reactions() - -        result = str(choice) == constants.Emojis.incident_actioned -        log.debug(f"Received answer: {choice}, result: {result}") - -        # Edit the prompt message to reflect the final choice -        if result is True: -            result_msg = f":ok_hand: {core_dev_ping} Request to kick `{n_members}` members was authorized!" -        else: -            result_msg = f":warning: {core_dev_ping} Request to kick `{n_members}` members was denied!" - -        with suppress(discord.HTTPException): -            await confirmation_msg.edit(content=result_msg) - -        return result - -    async def _alert_admins(self, exception: discord.HTTPException) -> None: -        """ -        Ping @Admins with information about `exception`. - -        This is used when a critical `exception` caused a verification task to abort. -        """ -        await self.bot.wait_until_guild_available() -        log.info(f"Sending admin alert regarding exception: {exception}") - -        admins_channel = self.bot.get_guild(constants.Guild.id).get_channel(constants.Channels.admins) -        ping = f"<@&{constants.Roles.admins}>" - -        await admins_channel.send( -            f"{ping} Aborted updating unverified users due to the following exception:\n" -            f"```{exception}```\n" -            f"Internal tasks will be stopped.", -            allowed_mentions=mention_role(constants.Roles.admins), -        ) - -    async def _send_requests(self, members: t.Collection[discord.Member], request: Request, limit: Limit) -> int: -        """ -        Pass `members` one by one to `request` handling Discord exceptions. - -        This coroutine serves as a generic `request` executor for kicking members and adding -        roles, as it allows us to define the error handling logic in one place only. - -        Any `request` has the ability to completely abort the execution by raising `StopExecution`. -        In such a case, the @Admins will be alerted of the reason attribute. - -        To avoid rate-limits, pass a `limit` configuring the batch size and the amount of seconds -        to sleep between batches. - -        Returns the amount of successful requests. Failed requests are logged at info level. -        """ -        log.trace(f"Sending {len(members)} requests") -        n_success, bad_statuses = 0, set() - -        for progress, member in enumerate(members, start=1): -            if is_verified(member):  # Member could have verified in the meantime -                continue -            try: -                await request(member) -            except StopExecution as stop_execution: -                await self._alert_admins(stop_execution.reason) -                await self.task_cache.set("tasks_running", 0) -                self._stop_tasks(gracefully=True)  # Gracefully finish current iteration, then stop -                break -            except discord.HTTPException as http_exc: -                bad_statuses.add(http_exc.status) -            else: -                n_success += 1 - -            if progress % limit.batch_size == 0: -                log.trace(f"Processed {progress} requests, pausing for {limit.sleep_secs} seconds") -                await asyncio.sleep(limit.sleep_secs) - -        if bad_statuses: -            log.info(f"Failed to send {len(members) - n_success} requests due to following statuses: {bad_statuses}") - -        return n_success - -    async def _add_kick_note(self, member: discord.Member) -> None: -        """ -        Post a note regarding `member` being kicked to site. - -        Allows keeping track of kicked members for auditing purposes. -        """ -        payload = { -            "active": False, -            "actor": self.bot.user.id,  # Bot actions this autonomously -            "expires_at": None, -            "hidden": True, -            "reason": "Verification kick", -            "type": "note", -            "user": member.id, -        } - -        log.trace(f"Posting kick note for member {member} ({member.id})") -        try: -            await self.bot.api_client.post("bot/infractions", json=payload) -        except ResponseCodeError as api_exc: -            log.warning("Failed to post kick note", exc_info=api_exc) - -    async def _kick_members(self, members: t.Collection[discord.Member]) -> int: -        """ -        Kick `members` from the PyDis guild. - -        Due to strict ratelimits on sending messages (120 requests / 60 secs), we sleep for a second -        after each 2 requests to allow breathing room for other features. - -        Note that this is a potentially destructive operation. Returns the amount of successful requests. -        """ -        log.info(f"Kicking {len(members)} members (not verified after {constants.Verification.kicked_after} days)") - -        async def kick_request(member: discord.Member) -> None: -            """Send `KICKED_MESSAGE` to `member` and kick them from the guild.""" -            try: -                await safe_dm(member.send(KICKED_MESSAGE))  # Suppress disabled DMs -            except discord.HTTPException as suspicious_exception: -                raise StopExecution(reason=suspicious_exception) -            await member.kick(reason=f"User has not verified in {constants.Verification.kicked_after} days") -            await self._add_kick_note(member) - -        n_kicked = await self._send_requests(members, kick_request, Limit(batch_size=2, sleep_secs=1)) -        self.bot.stats.incr("verification.kicked", count=n_kicked) - -        return n_kicked - -    async def _give_role(self, members: t.Collection[discord.Member], role: discord.Role) -> int: -        """ -        Give `role` to all `members`. - -        We pause for a second after batches of 25 requests to ensure ratelimits aren't exceeded. - -        Returns the amount of successful requests. -        """ -        log.info( -            f"Assigning {role} role to {len(members)} members (not verified " -            f"after {constants.Verification.unverified_after} days)" -        ) +        self.pending_members = set() -        async def role_request(member: discord.Member) -> None: -            """Add `role` to `member`.""" -            await member.add_roles(role, reason=f"Not verified after {constants.Verification.unverified_after} days") - -        return await self._send_requests(members, role_request, Limit(batch_size=25, sleep_secs=1)) - -    async def _check_members(self) -> t.Tuple[t.Set[discord.Member], t.Set[discord.Member]]: -        """ -        Check in on the verification status of PyDis members. - -        This coroutine finds two sets of users: -        * Not verified after configured `unverified_after` days, should be given the @Unverified role -        * Not verified after configured `kicked_after` days, should be kicked from the guild - -        These sets are always disjoint, i.e. share no common members. -        """ -        await self.bot.wait_until_guild_available()  # Ensure cache is ready -        pydis = self.bot.get_guild(constants.Guild.id) - -        unverified = pydis.get_role(constants.Roles.unverified) -        current_dt = datetime.utcnow()  # Discord timestamps are UTC - -        # Users to be given the @Unverified role, and those to be kicked, these should be entirely disjoint -        for_role, for_kick = set(), set() - -        log.debug("Checking verification status of guild members") -        for member in pydis.members: - -            # Skip verified members, bots, and members for which we do not know their join date, -            # this should be extremely rare but docs mention that it can happen -            if is_verified(member) or member.bot or member.joined_at is None: -                continue - -            # At this point, we know that `member` is an unverified user, and we will decide what -            # to do with them based on time passed since their join date -            since_join = current_dt - member.joined_at - -            if since_join > timedelta(days=constants.Verification.kicked_after): -                for_kick.add(member)  # User should be removed from the guild - -            elif ( -                since_join > timedelta(days=constants.Verification.unverified_after) -                and unverified not in member.roles -            ): -                for_role.add(member)  # User should be given the @Unverified role - -        log.debug(f"Found {len(for_role)} users for {unverified} role, {len(for_kick)} users to be kicked") -        return for_role, for_kick - -    @tasks.loop(minutes=30) -    async def update_unverified_members(self) -> None: -        """ -        Periodically call `_check_members` and update unverified members accordingly. - -        After each run, a summary will be sent to the modlog channel. If a suspiciously high -        amount of members to be kicked is found, the operation is guarded by `_verify_kick`. -        """ -        log.info("Updating unverified guild members") - -        await self.bot.wait_until_guild_available() -        unverified = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.unverified) - -        for_role, for_kick = await self._check_members() - -        if not for_role: -            role_report = f"Found no users to be assigned the {unverified.mention} role." -        else: -            n_roles = await self._give_role(for_role, unverified) -            role_report = f"Assigned {unverified.mention} role to `{n_roles}`/`{len(for_role)}` members." - -        if not for_kick: -            kick_report = "Found no users to be kicked." -        elif not await self._verify_kick(len(for_kick)): -            kick_report = f"Not authorized to kick `{len(for_kick)}` members." -        else: -            n_kicks = await self._kick_members(for_kick) -            kick_report = f"Kicked `{n_kicks}`/`{len(for_kick)}` members from the guild." - -        await self.mod_log.send_log_message( -            icon_url=self.bot.user.avatar_url, -            colour=discord.Colour.blurple(), -            title="Verification system", -            text=f"{kick_report}\n{role_report}", -        ) - -    # endregion -    # region: periodically ping @Unverified - -    @tasks.loop(hours=constants.Verification.reminder_frequency) -    async def ping_unverified(self) -> None: -        """ -        Delete latest `REMINDER_MESSAGE` and send it again. - -        This utilizes RedisCache to persist the latest reminder message id. -        """ -        await self.bot.wait_until_guild_available() -        verification = self.bot.get_guild(constants.Guild.id).get_channel(constants.Channels.verification) - -        last_reminder: t.Optional[int] = await self.task_cache.get("last_reminder") - -        if last_reminder is not None: -            log.trace(f"Found verification reminder message in cache, deleting: {last_reminder}") - -            with suppress(discord.HTTPException):  # If something goes wrong, just ignore it -                await self.bot.http.delete_message(verification.id, last_reminder) - -        log.trace("Sending verification reminder") -        new_reminder = await verification.send( -            REMINDER_MESSAGE, allowed_mentions=mention_role(constants.Roles.unverified), -        ) - -        await self.task_cache.set("last_reminder", new_reminder.id) - -    @ping_unverified.before_loop -    async def _before_first_ping(self) -> None: -        """ -        Sleep until `REMINDER_MESSAGE` should be sent again. - -        If latest reminder is not cached, exit instantly. Otherwise, wait wait until the -        configured `reminder_frequency` has passed. -        """ -        last_reminder: t.Optional[int] = await self.task_cache.get("last_reminder") - -        if last_reminder is None: -            log.trace("Latest verification reminder message not cached, task will not wait") -            return - -        # Convert cached message id into a timestamp -        time_since = datetime.utcnow() - snowflake_time(last_reminder) -        log.trace(f"Time since latest verification reminder: {time_since}") - -        to_sleep = timedelta(hours=constants.Verification.reminder_frequency) - time_since -        log.trace(f"Time to sleep until next ping: {to_sleep}") - -        # Delta can be negative if `reminder_frequency` has already passed -        secs = max(to_sleep.total_seconds(), 0) -        await asyncio.sleep(secs) - -    # endregion      # region: listeners      @Cog.listener() @@ -563,24 +81,11 @@ class Verification(Cog):          if member.guild.id != constants.Guild.id:              return  # Only listen for PyDis events -        raw_member = await self.bot.http.get_member(member.guild.id, member.id) - -        # If the user has the is_pending flag set, they will be using the alternate +        # If the user has the pending flag set, they will be using the alternate          # gate and will not need a welcome DM with verification instructions.          # We will send them an alternate DM once they verify with the welcome -        # video. -        if raw_member.get("is_pending"): -            await self.member_gating_cache.set(member.id, True) - -            # TODO: Temporary, remove soon after asking joe. -            await self.mod_log.send_log_message( -                icon_url=self.bot.user.avatar_url, -                colour=discord.Colour.blurple(), -                title="New native gated user", -                channel_id=constants.Channels.user_log, -                text=f"<@{member.id}> ({member.id})", -            ) - +        # video when they pass the gate. +        if member.pending:              return          log.trace(f"Sending on join message to new member: {member.id}") @@ -592,193 +97,18 @@ class Verification(Cog):      @Cog.listener()      async def on_member_update(self, before: discord.Member, after: discord.Member) -> None:          """Check if we need to send a verification DM to a gated user.""" -        before_roles = [role.id for role in before.roles] -        after_roles = [role.id for role in after.roles] - -        if constants.Roles.verified not in before_roles and constants.Roles.verified in after_roles: -            if await self.member_gating_cache.pop(after.id): -                try: -                    # If the member has not received a DM from our !accept command -                    # and has gone through the alternate gating system we should send -                    # our alternate welcome DM which includes info such as our welcome -                    # video. -                    await safe_dm(after.send(ALTERNATE_VERIFIED_MESSAGE)) -                except discord.HTTPException: -                    log.exception("DM dispatch failed on unexpected error code") - -    @Cog.listener() -    async def on_message(self, message: discord.Message) -> None: -        """Check new message event for messages to the checkpoint channel & process.""" -        if message.channel.id != constants.Channels.verification: -            return  # Only listen for #checkpoint messages - -        if message.content == REMINDER_MESSAGE: -            return  # Ignore bots own verification reminder - -        if message.author.bot: -            # They're a bot, delete their message after the delay. -            await message.delete(delay=constants.Verification.bot_message_delete_delay) -            return - -        # if a user mentions a role or guild member -        # alert the mods in mod-alerts channel -        if message.mentions or message.role_mentions: -            log.debug( -                f"{message.author} mentioned one or more users " -                f"and/or roles in {message.channel.name}" -            ) - -            embed_text = ( -                f"{format_user(message.author)} sent a message in " -                f"{message.channel.mention} that contained user and/or role mentions." -                f"\n\n**Original message:**\n>>> {message.content}" -            ) - -            # Send pretty mod log embed to mod-alerts -            await self.mod_log.send_log_message( -                icon_url=constants.Icons.filtering, -                colour=discord.Colour(constants.Colours.soft_red), -                title=f"User/Role mentioned in {message.channel.name}", -                text=embed_text, -                thumbnail=message.author.avatar_url_as(static_format="png"), -                channel_id=constants.Channels.mod_alerts, -            ) - -        ctx: Context = await self.bot.get_context(message) -        if ctx.command is not None and ctx.command.name == "accept": -            return - -        if any(r.id == constants.Roles.verified for r in ctx.author.roles): -            log.info( -                f"{ctx.author} posted '{ctx.message.content}' " -                "in the verification channel, but is already verified." -            ) -            return - -        log.debug( -            f"{ctx.author} posted '{ctx.message.content}' in the verification " -            "channel. We are providing instructions how to verify." -        ) -        await ctx.send( -            f"{ctx.author.mention} Please type `!accept` to verify that you accept our rules, " -            f"and gain access to the rest of the server.", -            delete_after=20 -        ) - -        log.trace(f"Deleting the message posted by {ctx.author}") -        with suppress(discord.NotFound): -            await ctx.message.delete() - -    # endregion -    # region: task management commands - -    @has_any_role(*constants.MODERATION_ROLES) -    @group(name="verification") -    async def verification_group(self, ctx: Context) -> None: -        """Manage internal verification tasks.""" -        if ctx.invoked_subcommand is None: -            await ctx.send_help(ctx.command) - -    @verification_group.command(name="status") -    async def status_cmd(self, ctx: Context) -> None: -        """Check whether verification tasks are running.""" -        log.trace("Checking status of verification tasks") - -        if self.update_unverified_members.is_running(): -            update_status = f"{constants.Emojis.incident_actioned} Member update task is running." -        else: -            update_status = f"{constants.Emojis.incident_unactioned} Member update task is **not** running." - -        mention = f"<@&{constants.Roles.unverified}>" -        if self.ping_unverified.is_running(): -            ping_status = f"{constants.Emojis.incident_actioned} Ping {mention} task is running." -        else: -            ping_status = f"{constants.Emojis.incident_unactioned} Ping {mention} task is **not** running." - -        embed = discord.Embed( -            title="Verification system", -            description=f"{update_status}\n{ping_status}", -            colour=discord.Colour.blurple(), -        ) -        await ctx.send(embed=embed) - -    @verification_group.command(name="start") -    async def start_cmd(self, ctx: Context) -> None: -        """Start verification tasks if they are not already running.""" -        log.info("Starting verification tasks") - -        if not self.update_unverified_members.is_running(): -            self.update_unverified_members.start() - -        if not self.ping_unverified.is_running(): -            self.ping_unverified.start() - -        await self.task_cache.set("tasks_running", 1) - -        colour = discord.Colour.blurple() -        await ctx.send(embed=discord.Embed(title="Verification system", description="Done. :ok_hand:", colour=colour)) - -    @verification_group.command(name="stop", aliases=["kill"]) -    async def stop_cmd(self, ctx: Context) -> None: -        """Stop verification tasks.""" -        log.info("Stopping verification tasks") - -        self._stop_tasks(gracefully=False) -        await self.task_cache.set("tasks_running", 0) - -        colour = discord.Colour.blurple() -        await ctx.send(embed=discord.Embed(title="Verification system", description="Tasks canceled.", colour=colour)) +        if before.pending is True and after.pending is False: +            try: +                # If the member has not received a DM from our !accept command +                # and has gone through the alternate gating system we should send +                # our alternate welcome DM which includes info such as our welcome +                # video. +                await safe_dm(after.send(VERIFIED_MESSAGE)) +            except discord.HTTPException: +                log.exception("DM dispatch failed on unexpected error code")      # endregion -    # region: accept and subscribe commands - -    def _bump_verified_stats(self, verified_member: discord.Member) -> None: -        """ -        Increment verification stats for `verified_member`. - -        Each member falls into one of the three categories: -            * Verified within 24 hours after joining -            * Does not have @Unverified role yet -            * Does have @Unverified role - -        Stats for member kicking are handled separately. -        """ -        if verified_member.joined_at is None:  # Docs mention this can happen -            return - -        if (datetime.utcnow() - verified_member.joined_at) < timedelta(hours=24): -            category = "accepted_on_day_one" -        elif constants.Roles.unverified not in [role.id for role in verified_member.roles]: -            category = "accepted_before_unverified" -        else: -            category = "accepted_after_unverified" - -        log.trace(f"Bumping verification stats in category: {category}") -        self.bot.stats.incr(f"verification.{category}") - -    @command(name='accept', aliases=('verify', 'verified', 'accepted'), hidden=True) -    @has_no_roles(constants.Roles.verified) -    @in_whitelist(channels=(constants.Channels.verification,)) -    async def accept_command(self, ctx: Context, *_) -> None:  # We don't actually care about the args -        """Accept our rules and gain access to the rest of the server.""" -        log.debug(f"{ctx.author} called !accept. Assigning the 'Developer' role.") -        await ctx.author.add_roles(discord.Object(constants.Roles.verified), reason="Accepted the rules") - -        self._bump_verified_stats(ctx.author)  # This checks for @Unverified so make sure it's not yet removed - -        if constants.Roles.unverified in [role.id for role in ctx.author.roles]: -            log.debug(f"Removing Unverified role from: {ctx.author}") -            await ctx.author.remove_roles(discord.Object(constants.Roles.unverified)) - -        try: -            await safe_dm(ctx.author.send(VERIFIED_MESSAGE)) -        except discord.HTTPException: -            log.exception(f"Sending welcome message failed for {ctx.author}.") -        finally: -            log.trace(f"Deleting accept message by {ctx.author}.") -            with suppress(discord.NotFound): -                self.mod_log.ignore(constants.Event.message_delete, ctx.message.id) -                await ctx.message.delete() +    # region: subscribe commands      @command(name='subscribe')      @in_whitelist(channels=(constants.Channels.bot_commands,)) @@ -839,14 +169,23 @@ class Verification(Cog):          if isinstance(error, InWhitelistCheckFailure):              error.handled = True -    @staticmethod -    async def bot_check(ctx: Context) -> bool: -        """Block any command within the verification channel that is not !accept.""" -        is_verification = ctx.channel.id == constants.Channels.verification -        if is_verification and await has_no_roles_check(ctx, *constants.MODERATION_ROLES): -            return ctx.command.name == "accept" -        else: -            return True +    @command(name='verify') +    @has_any_role(*constants.MODERATION_ROLES) +    async def perform_manual_verification(self, ctx: Context, user: discord.Member) -> None: +        """Command for moderators to verify any user.""" +        log.trace(f'verify command called by {ctx.author} for {user.id}.') + +        if not user.pending: +            log.trace(f'{user.id} is already verified, aborting.') +            await ctx.send(f'{constants.Emojis.cross_mark} {user.mention} is already verified.') +            return + +        # Adding a role automatically verifies the user, so we add and remove the Announcements role. +        temporary_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.announcements) +        await user.add_roles(temporary_role) +        await user.remove_roles(temporary_role) +        log.trace(f'{user.id} manually verified.') +        await ctx.send(f'{constants.Emojis.check_mark} {user.mention} is now verified.')      # endregion diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py index 4d48d2c1b..0cbce6a51 100644 --- a/bot/exts/moderation/voice_gate.py +++ b/bot/exts/moderation/voice_gate.py @@ -5,7 +5,6 @@ from datetime import datetime, timedelta  import discord  from async_rediscache import RedisCache -from dateutil import parser  from discord import Colour, Member, VoiceState  from discord.ext.commands import Cog, Context, command @@ -29,7 +28,7 @@ FAILED_MESSAGE = (  )  MESSAGE_FIELD_MAP = { -    "verified_at": f"have been verified for less than {GateConf.minimum_days_verified} days", +    "joined_at": f"have been on the server for less than {GateConf.minimum_days_member} days",      "voice_banned": "have an active voice ban infraction",      "total_messages": f"have sent less than {GateConf.minimum_messages} messages",      "activity_blocks": f"have been active for fewer than {GateConf.minimum_activity_blocks} ten-minute blocks", @@ -149,14 +148,8 @@ class VoiceGate(Cog):              await ctx.author.send(embed=embed)              return -        # Pre-parse this for better code style -        if data["verified_at"] is not None: -            data["verified_at"] = parser.isoparse(data["verified_at"]) -        else: -            data["verified_at"] = datetime.utcnow() - timedelta(days=3) -          checks = { -            "verified_at": data["verified_at"] > datetime.utcnow() - timedelta(days=GateConf.minimum_days_verified), +            "joined_at": ctx.author.joined_at > datetime.utcnow() - timedelta(days=GateConf.minimum_days_member),              "total_messages": data["total_messages"] < GateConf.minimum_messages,              "voice_banned": data["voice_banned"],              "activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index 7118dee02..f9fc12dc3 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -342,11 +342,14 @@ class WatchChannel(metaclass=CogABCMeta):          """Takes care of unloading the cog and canceling the consumption task."""          self.log.trace("Unloading the cog")          if self._consume_task and not self._consume_task.done(): +            def done_callback(task: asyncio.Task) -> None: +                """Send exception when consuming task have been cancelled.""" +                try: +                    task.result() +                except asyncio.CancelledError: +                    self.log.info( +                        f"The consume task of {type(self).__name__} was canceled. Messages may be lost." +                    ) + +            self._consume_task.add_done_callback(done_callback)              self._consume_task.cancel() -            try: -                self._consume_task.result() -            except asyncio.CancelledError as e: -                self.log.exception( -                    "The consume task was canceled. Messages may be lost.", -                    exc_info=e -                ) diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index a77dbe156..dd3349c3a 100644 --- a/bot/exts/moderation/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -64,12 +64,12 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):      @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",))      @has_any_role(*STAFF_ROLES) -    async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: +    async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None:          """          Relay messages sent by the given `user` to the `#talent-pool` channel. -        A `reason` for adding the user to the talent pool is required and will be displayed -        in the header when relaying messages of this user to the channel. +        A `reason` for adding the user to the talent pool is optional. +        If given, it will be displayed in the header when relaying messages of this user to the channel.          """          if user.bot:              await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") @@ -122,8 +122,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):          if history:              total = f"({len(history)} previous nominations in total)"              start_reason = f"Watched: {textwrap.shorten(history[0]['reason'], width=500, placeholder='...')}" -            end_reason = f"Unwatched: {textwrap.shorten(history[0]['end_reason'], width=500, placeholder='...')}" -            msg += f"\n\nUser's previous watch reasons {total}:```{start_reason}\n\n{end_reason}```" +            msg += f"\n\nUser's previous watch reasons {total}:```{start_reason}```"          await ctx.send(msg) @@ -202,7 +201,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):              f"{self.api_endpoint}/{nomination_id}",              json={field: reason}          ) - +        await self.fetch_user_cache()  # Update cache.          await ctx.send(f":white_check_mark: Updated the {field} of the nomination!")      @Cog.listener() @@ -243,8 +242,8 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):          actor = guild.get_member(actor_id)          active = nomination_object["active"] -        log.debug(active) -        log.debug(type(nomination_object["inserted_at"])) + +        reason = nomination_object["reason"] or "*None*"          start_date = time.format_infraction(nomination_object["inserted_at"])          if active: @@ -254,7 +253,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):                  Status: **Active**                  Date: {start_date}                  Actor: {actor.mention if actor else actor_id} -                Reason: {nomination_object["reason"]} +                Reason: {reason}                  Nomination ID: `{nomination_object["id"]}`                  ===============                  """ @@ -267,7 +266,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):                  Status: Inactive                  Date: {start_date}                  Actor: {actor.mention if actor else actor_id} -                Reason: {nomination_object["reason"]} +                Reason: {reason}                  End date: {end_date}                  Unwatch reason: {nomination_object["end_reason"]} diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py index 69d623581..a4c828f95 100644 --- a/bot/exts/utils/bot.py +++ b/bot/exts/utils/bot.py @@ -5,7 +5,7 @@ from discord import Embed, TextChannel  from discord.ext.commands import Cog, Context, command, group, has_any_role  from bot.bot import Bot -from bot.constants import Guild, MODERATION_ROLES, Roles, URLs +from bot.constants import Guild, MODERATION_ROLES, URLs  log = logging.getLogger(__name__) @@ -17,13 +17,11 @@ class BotCog(Cog, name="Bot"):          self.bot = bot      @group(invoke_without_command=True, name="bot", hidden=True) -    @has_any_role(Roles.verified)      async def botinfo_group(self, ctx: Context) -> None:          """Bot informational commands."""          await ctx.send_help(ctx.command)      @botinfo_group.command(name='about', aliases=('info',), hidden=True) -    @has_any_role(Roles.verified)      async def about_command(self, ctx: Context) -> None:          """Get information about the bot."""          embed = Embed( diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py index bf25cb4c2..8acaf9131 100644 --- a/bot/exts/utils/clean.py +++ b/bot/exts/utils/clean.py @@ -191,7 +191,7 @@ class Clean(Cog):              channel_id=Channels.mod_log,          ) -    @group(invoke_without_command=True, name="clean", aliases=["purge"]) +    @group(invoke_without_command=True, name="clean", aliases=["clear", "purge"])      @has_any_role(*MODERATION_ROLES)      async def clean_group(self, ctx: Context) -> None:          """Commands for cleaning messages in channels.""" diff --git a/bot/exts/utils/jams.py b/bot/exts/utils/jams.py index 1c0988343..98fbcb303 100644 --- a/bot/exts/utils/jams.py +++ b/bot/exts/utils/jams.py @@ -93,10 +93,6 @@ class CodeJams(commands.Cog):                  connect=True              ),              guild.default_role: PermissionOverwrite(read_messages=False, connect=False), -            guild.get_role(Roles.verified): PermissionOverwrite( -                read_messages=False, -                connect=False -            )          }          # Rest of members should just have read_messages diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index 6d8d98695..eb92dfca7 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -2,20 +2,19 @@ import difflib  import logging  import re  import unicodedata -from datetime import datetime, timedelta -from email.parser import HeaderParser -from io import StringIO -from typing import Dict, Optional, Tuple, Union +from typing import Tuple, Union  from discord import Colour, Embed, utils  from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role +from discord.utils import snowflake_time  from bot.bot import Bot  from bot.constants import Channels, MODERATION_ROLES, STAFF_ROLES +from bot.converters import Snowflake  from bot.decorators import in_whitelist  from bot.pagination import LinePaginator  from bot.utils import messages -from bot.utils.cache import AsyncCache +from bot.utils.time import time_since  log = logging.getLogger(__name__) @@ -41,23 +40,12 @@ If the implementation is easy to explain, it may be a good idea.  Namespaces are one honking great idea -- let's do more of those!  """ -ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png" - -pep_cache = AsyncCache() -  class Utils(Cog):      """A selection of utilities which don't have a clear category.""" -    BASE_PEP_URL = "http://www.python.org/dev/peps/pep-" -    BASE_GITHUB_PEP_URL = "https://raw.githubusercontent.com/python/peps/master/pep-" -    PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master" -      def __init__(self, bot: Bot):          self.bot = bot -        self.peps: Dict[int, str] = {} -        self.last_refreshed_peps: Optional[datetime] = None -        self.bot.loop.create_task(self.refresh_peps_urls())      @command()      @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) @@ -166,6 +154,21 @@ class Utils(Cog):          embed.description = best_match          await ctx.send(embed=embed) +    @command(aliases=("snf", "snfl", "sf")) +    @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) +    async def snowflake(self, ctx: Context, snowflake: Snowflake) -> None: +        """Get Discord snowflake creation time.""" +        created_at = snowflake_time(snowflake) +        embed = Embed( +            description=f"**Created at {created_at}** ({time_since(created_at, max_units=3)}).", +            colour=Colour.blue() +        ) +        embed.set_author( +            name=f"Snowflake: {snowflake}", +            icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" +        ) +        await ctx.send(embed=embed) +      @command(aliases=("poll",))      @has_any_role(*MODERATION_ROLES)      async def vote(self, ctx: Context, title: clean_content(fix_channel_mentions=True), *options: str) -> None: @@ -189,126 +192,6 @@ class Utils(Cog):          for reaction in options:              await message.add_reaction(reaction) -    # region: PEP - -    async def refresh_peps_urls(self) -> None: -        """Refresh PEP URLs listing in every 3 hours.""" -        # Wait until HTTP client is available -        await self.bot.wait_until_ready() -        log.trace("Started refreshing PEP URLs.") - -        async with self.bot.http_session.get(self.PEPS_LISTING_API_URL) as resp: -            listing = await resp.json() - -        log.trace("Got PEP URLs listing from GitHub API") - -        for file in listing: -            name = file["name"] -            if name.startswith("pep-") and name.endswith((".rst", ".txt")): -                pep_number = name.replace("pep-", "").split(".")[0] -                self.peps[int(pep_number)] = file["download_url"] - -        self.last_refreshed_peps = datetime.now() -        log.info("Successfully refreshed PEP URLs listing.") - -    @command(name='pep', aliases=('get_pep', 'p')) -    async def pep_command(self, ctx: Context, pep_number: int) -> None: -        """Fetches information about a PEP and sends it to the channel.""" -        # Trigger typing in chat to show users that bot is responding -        await ctx.trigger_typing() - -        # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs. -        if pep_number == 0: -            pep_embed = self.get_pep_zero_embed() -            success = True -        else: -            success = False -            if not (pep_embed := await self.validate_pep_number(pep_number)): -                pep_embed, success = await self.get_pep_embed(pep_number) - -        await ctx.send(embed=pep_embed) -        if success: -            log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.") -            self.bot.stats.incr(f"pep_fetches.{pep_number}") -        else: -            log.trace(f"Getting PEP {pep_number} failed. Error embed sent.") - -    @staticmethod -    def get_pep_zero_embed() -> Embed: -        """Get information embed about PEP 0.""" -        pep_embed = Embed( -            title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**", -            url="https://www.python.org/dev/peps/" -        ) -        pep_embed.set_thumbnail(url=ICON_URL) -        pep_embed.add_field(name="Status", value="Active") -        pep_embed.add_field(name="Created", value="13-Jul-2000") -        pep_embed.add_field(name="Type", value="Informational") - -        return pep_embed - -    async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]: -        """Validate is PEP number valid. When it isn't, return error embed, otherwise None.""" -        if ( -            pep_nr not in self.peps -            and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now() -            and len(str(pep_nr)) < 5 -        ): -            await self.refresh_peps_urls() - -        if pep_nr not in self.peps: -            log.trace(f"PEP {pep_nr} was not found") -            return Embed( -                title="PEP not found", -                description=f"PEP {pep_nr} does not exist.", -                colour=Colour.red() -            ) - -        return None - -    def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed: -        """Generate PEP embed based on PEP headers data.""" -        # Assemble the embed -        pep_embed = Embed( -            title=f"**PEP {pep_nr} - {pep_header['Title']}**", -            description=f"[Link]({self.BASE_PEP_URL}{pep_nr:04})", -        ) - -        pep_embed.set_thumbnail(url=ICON_URL) - -        # Add the interesting information -        fields_to_check = ("Status", "Python-Version", "Created", "Type") -        for field in fields_to_check: -            # Check for a PEP metadata field that is present but has an empty value -            # embed field values can't contain an empty string -            if pep_header.get(field, ""): -                pep_embed.add_field(name=field, value=pep_header[field]) - -        return pep_embed - -    @pep_cache(arg_offset=1) -    async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]: -        """Fetch, generate and return PEP embed. Second item of return tuple show does getting success.""" -        response = await self.bot.http_session.get(self.peps[pep_nr]) - -        if response.status == 200: -            log.trace(f"PEP {pep_nr} found") -            pep_content = await response.text() - -            # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179 -            pep_header = HeaderParser().parse(StringIO(pep_content)) -            return self.generate_pep_embed(pep_header, pep_nr), True -        else: -            log.trace( -                f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}." -            ) -            return Embed( -                title="Unexpected error", -                description="Unexpected HTTP error during PEP search. Please let us know.", -                colour=Colour.red() -            ), False -    # endregion -  def setup(bot: Bot) -> None:      """Load the Utils cog.""" diff --git a/bot/log.py b/bot/log.py index 13141de40..0935666d1 100644 --- a/bot/log.py +++ b/bot/log.py @@ -6,7 +6,6 @@ from pathlib import Path  import coloredlogs  import sentry_sdk -from sentry_sdk.integrations.aiohttp import AioHttpIntegration  from sentry_sdk.integrations.logging import LoggingIntegration  from sentry_sdk.integrations.redis import RedisIntegration @@ -67,9 +66,9 @@ def setup_sentry() -> None:          dsn=constants.Bot.sentry_dsn,          integrations=[              sentry_logging, -            AioHttpIntegration(),              RedisIntegration(), -        ] +        ], +        release=f"bot@{constants.GIT_SHA}"      ) diff --git a/bot/resources/elements.json b/bot/resources/elements.json index 2dc9b6fd6..a3ac5b99f 100644 --- a/bot/resources/elements.json +++ b/bot/resources/elements.json @@ -32,7 +32,6 @@      "gallium",      "germanium",      "arsenic", -    "selenium",      "bromine",      "krypton",      "rubidium", diff --git a/bot/resources/tags/codeblock.md b/bot/resources/tags/codeblock.md index 8d48bdf06..ac64656e5 100644 --- a/bot/resources/tags/codeblock.md +++ b/bot/resources/tags/codeblock.md @@ -1,7 +1,7 @@  Here's how to format Python code on Discord: -\```py +\`\`\`py  print('Hello world!') -\``` +\`\`\`  **These are backticks, not quotes.** Check [this](https://superuser.com/questions/254076/how-do-i-type-the-tick-and-backtick-characters-on-windows/254077#254077) out if you can't find the backtick key. diff --git a/bot/rules/burst_shared.py b/bot/rules/burst_shared.py index 0e66df69c..bbe9271b3 100644 --- a/bot/rules/burst_shared.py +++ b/bot/rules/burst_shared.py @@ -2,20 +2,11 @@ from typing import Dict, Iterable, List, Optional, Tuple  from discord import Member, Message -from bot.constants import Channels -  async def apply(      last_message: Message, recent_messages: List[Message], config: Dict[str, int]  ) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: -    """ -    Detects repeated messages sent by multiple users. - -    This filter never triggers in the verification channel. -    """ -    if last_message.channel.id == Channels.verification: -        return - +    """Detects repeated messages sent by multiple users."""      total_recent = len(recent_messages)      if total_recent > config['max']: diff --git a/config-default.yml b/config-default.yml index 82023aae1..f8368c5d2 100644 --- a/config-default.yml +++ b/config-default.yml @@ -28,6 +28,9 @@ style:          soft_green: 0x68c290          soft_orange: 0xf9cb54          bright_green: 0x01d277 +        orange: 0xe67e22 +        pink: 0xcf84e0 +        purple: 0xb734eb      emojis:          defcon_disabled: "<:defcondisabled:470326273952972810>" @@ -68,6 +71,8 @@ style:          comments:       "<:reddit_comments:755845255001014384>"          user:           "<:reddit_users:755845303822974997>" +        ok_hand: ":ok_hand:" +      icons:          crown_blurple: "https://cdn.discordapp.com/emojis/469964153289965568.png"          crown_green:   "https://cdn.discordapp.com/emojis/469964154719961088.png" @@ -173,7 +178,6 @@ guild:          # Special          bot_commands:       &BOT_CMD        267659945086812160          esoteric:                           470884583684964352 -        verification:                       352442727016693763          voice_gate:                         764802555427029012          # Staff @@ -186,6 +190,8 @@ guild:          mods:               &MODS           305126844661760000          mod_alerts:                         473092532147060736          mod_spam:           &MOD_SPAM       620607373828030464 +        mod_tools:          &MOD_TOOLS      775413915391098921 +        mod_meta:           &MOD_META       775412552795947058          organisation:       &ORGANISATION   551789653284356126          staff_lounge:       &STAFF_LOUNGE   464905259261755392          duck_pond:          &DUCK_POND      637820308341915648 @@ -195,13 +201,19 @@ guild:          mod_announcements:      &MOD_ANNOUNCEMENTS      372115205867700225          admin_announcements:    &ADMIN_ANNOUNCEMENTS    749736155569848370 -        # Voice -        code_help_voice:                    755154969761677312 -        code_help_voice_2:                  766330079135268884 -        voice_chat:                         412357430186344448 +        # Voice Channels          admins_voice:       &ADMINS_VOICE   500734494840717332 +        code_help_voice_1:                  751592231726481530 +        code_help_voice_2:                  764232549840846858 +        general_voice:                      751591688538947646          staff_voice:        &STAFF_VOICE    412375055910043655 +        # Voice Chat +        code_help_chat_1:                   755154969761677312 +        code_help_chat_2:                   766330079135268884 +        staff_voice_chat:                   541638762007101470 +        voice_chat:                         412357430186344448 +          # Watch          big_brother_logs:   &BB_LOGS        468507907357409333          talent_pool:        &TALENT_POOL    534321732593647616 @@ -213,6 +225,8 @@ guild:      moderation_channels:          - *ADMINS          - *ADMIN_SPAM +        - *MOD_META +        - *MOD_TOOLS          - *MODS          - *MOD_SPAM @@ -238,8 +252,6 @@ guild:          python_community:   &PY_COMMUNITY_ROLE  458226413825294336          sprinters:          &SPRINTERS          758422482289426471 -        unverified:                             739794855945044069 -        verified:                               352427296948486144  # @Developers on PyDis          voice_verified:                         764802720779337729          # Staff @@ -317,6 +329,7 @@ filter:  keys:      site_api:    !ENV "BOT_API_KEY" +    github:      !ENV "GITHUB_API_KEY"  urls: @@ -482,7 +495,7 @@ redirect_output:  duck_pond: -    threshold: 4 +    threshold: 5      channel_blacklist:          - *ANNOUNCEMENTS          - *PYNEWS_CHANNEL @@ -507,25 +520,17 @@ python_news:      webhook: *PYNEWS_WEBHOOK -verification: -    unverified_after: 3  # Days after which non-Developers receive the @Unverified role -    kicked_after: 30  # Days after which non-Developers get kicked from the guild -    reminder_frequency: 28  # Hours between @Unverified pings -    bot_message_delete_delay: 10  # Seconds before deleting bots response in #verification - -    # Number in range [0, 1] determining the percentage of unverified users that are safe -    # to be kicked from the guild in one batch, any larger amount will require staff confirmation, -    # set this to 0 to require explicit approval for batches of any size -    kick_confirmation_threshold: 0.01  # 1% - -  voice_gate: -    minimum_days_verified: 3  # How many days the user must have been verified for +    minimum_days_member: 3  # How many days the user must have been a member for      minimum_messages: 50  # How many messages a user must have to be eligible for voice      bot_message_delete_delay: 10  # Seconds before deleting bot's response in Voice Gate      minimum_activity_blocks: 3  # Number of 10 minute blocks during which a user must have been active      voice_ping_delete_delay: 60  # Seconds before deleting the bot's ping to user in Voice Gate +branding: +    cycle_frequency: 3  # How many days bot wait before refreshing server icon + +  config:      required_keys: ['bot.token'] diff --git a/tests/bot/exts/backend/sync/test_users.py b/tests/bot/exts/backend/sync/test_users.py index 61673e1bb..27932be95 100644 --- a/tests/bot/exts/backend/sync/test_users.py +++ b/tests/bot/exts/backend/sync/test_users.py @@ -188,30 +188,37 @@ class UserSyncerSyncTests(unittest.IsolatedAsyncioTestCase):      """Tests for the API requests that sync users."""      def setUp(self): -        patcher = mock.patch("bot.instance", new=helpers.MockBot()) -        self.bot = patcher.start() -        self.addCleanup(patcher.stop) +        bot_patcher = mock.patch("bot.instance", new=helpers.MockBot()) +        self.bot = bot_patcher.start() +        self.addCleanup(bot_patcher.stop) + +        chunk_patcher = mock.patch("bot.exts.backend.sync._syncers.CHUNK_SIZE", 2) +        self.chunk_size = chunk_patcher.start() +        self.addCleanup(chunk_patcher.stop) + +        self.chunk_count = 2 +        self.users = [fake_user(id=i) for i in range(self.chunk_size * self.chunk_count)]      async def test_sync_created_users(self):          """Only POST requests should be made with the correct payload.""" -        users = [fake_user(id=111), fake_user(id=222)] - -        diff = _Diff(users, [], None) +        diff = _Diff(self.users, [], None)          await UserSyncer._sync(diff) -        self.bot.api_client.post.assert_called_once_with("bot/users", json=diff.created) +        self.bot.api_client.post.assert_any_call("bot/users", json=diff.created[:self.chunk_size]) +        self.bot.api_client.post.assert_any_call("bot/users", json=diff.created[self.chunk_size:]) +        self.assertEqual(self.bot.api_client.post.call_count, self.chunk_count)          self.bot.api_client.put.assert_not_called()          self.bot.api_client.delete.assert_not_called()      async def test_sync_updated_users(self):          """Only PUT requests should be made with the correct payload.""" -        users = [fake_user(id=111), fake_user(id=222)] - -        diff = _Diff([], users, None) +        diff = _Diff([], self.users, None)          await UserSyncer._sync(diff) -        self.bot.api_client.patch.assert_called_once_with("bot/users/bulk_patch", json=diff.updated) +        self.bot.api_client.patch.assert_any_call("bot/users/bulk_patch", json=diff.updated[:self.chunk_size]) +        self.bot.api_client.patch.assert_any_call("bot/users/bulk_patch", json=diff.updated[self.chunk_size:]) +        self.assertEqual(self.bot.api_client.patch.call_count, self.chunk_count)          self.bot.api_client.post.assert_not_called()          self.bot.api_client.delete.assert_not_called() diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index daede54c5..d077be960 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -355,6 +355,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):          self.assertEqual(              textwrap.dedent(f"""                  Joined: {"1 year ago"} +                Verified: {"True"}                  Roles: &Moderators              """).strip(),              embed.fields[1].value diff --git a/tests/bot/exts/moderation/test_silence.py b/tests/bot/exts/moderation/test_silence.py index 104293d8e..fa5fc9e81 100644 --- a/tests/bot/exts/moderation/test_silence.py +++ b/tests/bot/exts/moderation/test_silence.py @@ -117,15 +117,6 @@ class SilenceCogTests(unittest.IsolatedAsyncioTestCase):          self.bot.get_guild.assert_called_once_with(Guild.id)      @autospec(silence, "SilenceNotifier", pass_mocks=False) -    async def test_async_init_got_role(self): -        """Got `Roles.verified` role from guild.""" -        guild = self.bot.get_guild() -        guild.get_role.side_effect = lambda id_: Mock(id=id_) - -        await self.cog._async_init() -        self.assertEqual(self.cog._verified_role.id, Roles.verified) - -    @autospec(silence, "SilenceNotifier", pass_mocks=False)      async def test_async_init_got_channels(self):          """Got channels from bot."""          self.bot.get_channel.side_effect = lambda id_: MockTextChannel(id=id_) @@ -302,7 +293,7 @@ class SilenceTests(unittest.IsolatedAsyncioTestCase):          self.assertFalse(self.overwrite.send_messages)          self.assertFalse(self.overwrite.add_reactions)          self.channel.set_permissions.assert_awaited_once_with( -            self.cog._verified_role, +            self.cog._everyone_role,              overwrite=self.overwrite          ) @@ -435,7 +426,7 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase):          """Channel's `send_message` and `add_reactions` overwrites were restored."""          await self.cog._unsilence(self.channel)          self.channel.set_permissions.assert_awaited_once_with( -            self.cog._verified_role, +            self.cog._everyone_role,              overwrite=self.overwrite,          ) @@ -449,7 +440,7 @@ class UnsilenceTests(unittest.IsolatedAsyncioTestCase):          await self.cog._unsilence(self.channel)          self.channel.set_permissions.assert_awaited_once_with( -            self.cog._verified_role, +            self.cog._everyone_role,              overwrite=self.overwrite,          ) diff --git a/tests/bot/exts/utils/test_jams.py b/tests/bot/exts/utils/test_jams.py index 45e7b5b51..85d6a1173 100644 --- a/tests/bot/exts/utils/test_jams.py +++ b/tests/bot/exts/utils/test_jams.py @@ -118,11 +118,9 @@ class JamCreateTeamTests(unittest.IsolatedAsyncioTestCase):              self.assertTrue(overwrites[member].read_messages)              self.assertTrue(overwrites[member].connect) -        # Everyone and verified role overwrite +        # Everyone role overwrite          self.assertFalse(overwrites[self.guild.default_role].read_messages)          self.assertFalse(overwrites[self.guild.default_role].connect) -        self.assertFalse(overwrites[self.guild.get_role(Roles.verified)].read_messages) -        self.assertFalse(overwrites[self.guild.get_role(Roles.verified)].connect)      async def test_team_channels_creation(self):          """Should create new voice and text channel for team.""" diff --git a/tests/bot/test_api.py b/tests/bot/test_api.py index 99e942813..76bcb481d 100644 --- a/tests/bot/test_api.py +++ b/tests/bot/test_api.py @@ -13,14 +13,6 @@ class APIClientTests(unittest.IsolatedAsyncioTestCase):          cls.error_api_response = MagicMock()          cls.error_api_response.status = 999 -    def test_loop_is_not_running_by_default(self): -        """The event loop should not be running by default.""" -        self.assertFalse(api.loop_is_running()) - -    async def test_loop_is_running_in_async_context(self): -        """The event loop should be running in an async context.""" -        self.assertTrue(api.loop_is_running()) -      def test_response_code_error_default_initialization(self):          """Test the default initialization of `ResponseCodeError` without `text` or `json`"""          error = api.ResponseCodeError(response=self.error_api_response) diff --git a/tests/helpers.py b/tests/helpers.py index 870f66197..496363ae3 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -230,7 +230,7 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin      spec_set = member_instance      def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None: -        default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False} +        default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False, "pending": False}          super().__init__(**collections.ChainMap(kwargs, default_kwargs))          self.roles = [MockRole(name="@everyone", position=1, id=0)] | 
