diff options
| author | 2021-01-02 20:23:17 -0800 | |
|---|---|---|
| committer | 2021-01-02 20:23:17 -0800 | |
| commit | 01406f86ad9b83d5b378c3e264c5d8b3e767ac4c (patch) | |
| tree | 046895a131aaba337f1489485478f6678eec696e | |
| parent | Merge branch 'feat/F4zi/CommandSuggestion' of https://github.com/python-disco... (diff) | |
| parent | Merge pull request #1334 from python-discord/bug/precommit-pycharm (diff) | |
Rebased after a long time of being abandon
Since the cogs folder has been removed, the error_handler and tag cogs had to be removed and transfer into their respective places in the exts folder.
249 files changed, 19057 insertions, 8062 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index cf5f1590d..ad813d893 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1,37 @@ -* @python-discord/core-developers +# Extensions +**/bot/exts/backend/sync/** @MarkKoz +**/bot/exts/filters/*token_remover.py @MarkKoz +**/bot/exts/moderation/*silence.py @MarkKoz +bot/exts/info/codeblock/** @MarkKoz +bot/exts/utils/extensions.py @MarkKoz +bot/exts/utils/snekbox.py @MarkKoz @Akarys42 +bot/exts/help_channels/** @MarkKoz @Akarys42 +bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 +bot/exts/info/** @Akarys42 @mbaruh @Den4200 +bot/exts/filters/** @mbaruh +bot/exts/fun/** @ks129 +bot/exts/utils/** @ks129 + +# Utils +bot/utils/extensions.py @MarkKoz +bot/utils/function.py @MarkKoz +bot/utils/lock.py @MarkKoz +bot/utils/regex.py @Akarys42 +bot/utils/scheduling.py @MarkKoz + +# Tests +tests/_autospec.py @MarkKoz +tests/bot/exts/test_cogs.py @MarkKoz +tests/** @Akarys42 + +# CI & Docker +.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 +Dockerfile @MarkKoz @Akarys42 @Den4200 +docker-compose.yml @MarkKoz @Akarys42 @Den4200 + +# Tools +Pipfile* @Akarys42 + +# Statistics +bot/async_stats.py @jb3 +bot/exts/info/stats.py @jb3 diff --git a/.github/review-policy.yml b/.github/review-policy.yml new file mode 100644 index 000000000..421b30f8a --- /dev/null +++ b/.github/review-policy.yml @@ -0,0 +1,3 @@ +remote: python-discord/.github +path: review-policies/core-developers.yml +ref: main diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..6c97e8784 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,59 @@ +name: Build + +on: + workflow_run: + workflows: ["Lint & Test"] + branches: + - master + types: + - completed + +jobs: + build: + if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'push' + name: Build & Push + runs-on: ubuntu-latest + + steps: + # Create a commit SHA-based tag for the container repositories + - name: Create SHA Container Tag + id: sha_tag + run: | + tag=$(cut -c 1-7 <<< $GITHUB_SHA) + echo "::set-output name=tag::$tag" + + - name: Checkout code + uses: actions/checkout@v2 + + # The current version (v2) of Docker's build-push action uses + # buildx, which comes with BuildKit features that help us speed + # up our builds using additional cache features. Buildx also + # has a lot of other features that are not as relevant to us. + # + # See https://github.com/docker/build-push-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to Github Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GHCR_TOKEN }} + + # Build and push the container to the GitHub Container + # Repository. The container will be tagged as "latest" + # and with the short SHA of the commit. + - name: Build and push + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile + push: true + cache-from: type=registry,ref=ghcr.io/python-discord/bot:latest + cache-to: type=inline + tags: | + ghcr.io/python-discord/bot:latest + ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }} + build-args: | + git_sha=${{ github.sha }} diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 000000000..5a4aede30 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,42 @@ +name: Deploy + +on: + workflow_run: + workflows: ["Build"] + branches: + - master + types: + - completed + +jobs: + build: + if: github.event.workflow_run.conclusion == 'success' + name: Build & Push + runs-on: ubuntu-latest + + steps: + - name: Create SHA Container Tag + id: sha_tag + run: | + tag=$(cut -c 1-7 <<< $GITHUB_SHA) + echo "::set-output name=tag::$tag" + + - name: Checkout code + uses: actions/checkout@v2 + with: + repository: python-discord/kubernetes + token: ${{ secrets.REPO_TOKEN }} + + - name: Authenticate with Kubernetes + uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.KUBECONFIG }} + + - name: Deploy to Kubernetes + uses: Azure/k8s-deploy@v1 + with: + manifests: | + bot/deployment.yaml + images: 'ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }}' + kubectl-version: 'latest' diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml new file mode 100644 index 000000000..6fa8e8333 --- /dev/null +++ b/.github/workflows/lint-test.yml @@ -0,0 +1,137 @@ +name: Lint & Test + +on: + push: + branches: + - master + pull_request: + + +jobs: + lint-test: + runs-on: ubuntu-latest + env: + # Dummy values for required bot environment variables + BOT_API_KEY: foo + BOT_SENTRY_DSN: blah + BOT_TOKEN: bar + REDDIT_CLIENT_ID: spam + REDDIT_SECRET: ham + REDIS_PASSWORD: '' + + # Configure pip to cache dependencies and do a user install + PIP_NO_CACHE_DIR: false + PIP_USER: 1 + + # Hide the graphical elements from pipenv's output + PIPENV_HIDE_EMOJIS: 1 + PIPENV_NOSPIN: 1 + + # Make sure pipenv does not try reuse an environment it's running in + PIPENV_IGNORE_VIRTUALENVS: 1 + + # Specify explicit paths for python dependencies and the pre-commit + # environment so we know which directories to cache + PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base + PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache + + steps: + - name: Add custom PYTHONUSERBASE to PATH + run: echo '${{ env.PYTHONUSERBASE }}/bin/' >> $GITHUB_PATH + + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Setup python + id: python + uses: actions/setup-python@v2 + with: + python-version: '3.8' + + # This step caches our Python dependencies. To make sure we + # only restore a cache when the dependencies, the python version, + # the runner operating system, and the dependency location haven't + # changed, we create a cache key that is a composite of those states. + # + # Only when the context is exactly the same, we will restore the cache. + - name: Python Dependency Caching + uses: actions/cache@v2 + id: python_cache + with: + path: ${{ env.PYTHONUSERBASE }} + key: "python-0-${{ runner.os }}-${{ env.PYTHONUSERBASE }}-\ + ${{ steps.python.outputs.python-version }}-\ + ${{ hashFiles('./Pipfile', './Pipfile.lock') }}" + + # Install our dependencies if we did not restore a dependency cache + - name: Install dependencies using pipenv + if: steps.python_cache.outputs.cache-hit != 'true' + run: | + pip install pipenv + pipenv install --dev --deploy --system + + # This step caches our pre-commit environment. To make sure we + # do create a new environment when our pre-commit setup changes, + # we create a cache key based on relevant factors. + - name: Pre-commit Environment Caching + uses: actions/cache@v2 + with: + path: ${{ env.PRE_COMMIT_HOME }} + key: "precommit-0-${{ runner.os }}-${{ env.PRE_COMMIT_HOME }}-\ + ${{ steps.python.outputs.python-version }}-\ + ${{ hashFiles('./.pre-commit-config.yaml') }}" + + # We will not run `flake8` here, as we will use a separate flake8 + # action. As pre-commit does not support user installs, we set + # PIP_USER=0 to not do a user install. + - name: Run pre-commit hooks + run: export PIP_USER=0; SKIP=flake8 pre-commit run --all-files + + # Run flake8 and have it format the linting errors in the format of + # the GitHub Workflow command to register error annotations. This + # means that our flake8 output is automatically added as an error + # annotation to both the run result and in the "Files" tab of a + # pull request. + # + # Format used: + # ::error file={filename},line={line},col={col}::{message} + - name: Run flake8 + run: "flake8 \ + --format='::error file=%(path)s,line=%(row)d,col=%(col)d::\ + [flake8] %(code)s: %(text)s'" + + # We run `coverage` using the `python` command so we can suppress + # irrelevant warnings in our CI output. + - name: Run tests and generate coverage report + run: | + python -Wignore -m coverage run -m unittest + coverage report -m + + # This step will publish the coverage reports coveralls.io and + # print a "job" link in the output of the GitHub Action + - name: Publish coverage report to coveralls.io + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: coveralls + + # Prepare the Pull Request Payload artifact. If this fails, we + # we fail silently using the `continue-on-error` option. It's + # nice if this succeeds, but if it fails for any reason, it + # does not mean that our lint-test checks failed. + - name: Prepare Pull Request Payload artifact + id: prepare-artifact + if: always() && github.event_name == 'pull_request' + continue-on-error: true + run: cat $GITHUB_EVENT_PATH | jq '.pull_request' > pull_request_payload.json + + # This only makes sense if the previous step succeeded. To + # get the original outcome of the previous step before the + # `continue-on-error` conclusion is applied, we use the + # `.outcome` value. This step also fails silently. + - name: Upload a Build Artifact + if: always() && steps.prepare-artifact.outcome == 'success' + continue-on-error: true + uses: actions/upload-artifact@v2 + with: + name: pull-request-payload + path: pull_request_payload.json diff --git a/.github/workflows/sentry_release.yml b/.github/workflows/sentry_release.yml new file mode 100644 index 000000000..b8d92e90a --- /dev/null +++ b/.github/workflows/sentry_release.yml @@ -0,0 +1,24 @@ +name: Create Sentry release + +on: + push: + branches: + - master + +jobs: + create_sentry_release: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@master + + - name: Create a Sentry.io release + uses: tclindner/[email protected] + env: + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + SENTRY_ORG: python-discord + SENTRY_PROJECT: bot + with: + tagName: ${{ github.sha }} + environment: production + releaseNamePrefix: bot@ diff --git a/.github/workflows/status_embed.yaml b/.github/workflows/status_embed.yaml new file mode 100644 index 000000000..b6a71b887 --- /dev/null +++ b/.github/workflows/status_embed.yaml @@ -0,0 +1,78 @@ +name: Status Embed + +on: + workflow_run: + workflows: + - Lint & Test + - Build + - Deploy + types: + - completed + +jobs: + status_embed: + # We need to send a status embed whenever the workflow + # sequence we're running terminates. There are a number + # of situations in which that happens: + # + # 1. We reach the end of the Deploy workflow, without + # it being skipped. + # + # 2. A `pull_request` triggered a Lint & Test workflow, + # as the sequence always terminates with one run. + # + # 3. If any workflow ends in failure or was cancelled. + if: >- + (github.event.workflow_run.name == 'Deploy' && github.event.workflow_run.conclusion != 'skipped') || + github.event.workflow_run.event == 'pull_request' || + github.event.workflow_run.conclusion == 'failure' || + github.event.workflow_run.conclusion == 'cancelled' + name: Send Status Embed to Discord + runs-on: ubuntu-latest + + steps: + # A workflow_run event does not contain all the information + # we need for a PR embed. That's why we upload an artifact + # with that information in the Lint workflow. + - name: Get Pull Request Information + id: pr_info + if: github.event.workflow_run.event == 'pull_request' + run: | + curl -s -H "Authorization: token $GITHUB_TOKEN" ${{ github.event.workflow_run.artifacts_url }} > artifacts.json + DOWNLOAD_URL=$(cat artifacts.json | jq -r '.artifacts[] | select(.name == "pull-request-payload") | .archive_download_url') + [ -z "$DOWNLOAD_URL" ] && exit 1 + wget --quiet --header="Authorization: token $GITHUB_TOKEN" -O pull_request_payload.zip $DOWNLOAD_URL || exit 2 + unzip -p pull_request_payload.zip > pull_request_payload.json + [ -s pull_request_payload.json ] || exit 3 + echo "::set-output name=pr_author_login::$(jq -r '.user.login // empty' pull_request_payload.json)" + echo "::set-output name=pr_number::$(jq -r '.number // empty' pull_request_payload.json)" + echo "::set-output name=pr_title::$(jq -r '.title // empty' pull_request_payload.json)" + echo "::set-output name=pr_source::$(jq -r '.head.label // empty' pull_request_payload.json)" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Send an informational status embed to Discord instead of the + # standard embeds that Discord sends. This embed will contain + # more information and we can fine tune when we actually want + # to send an embed. + - name: GitHub Actions Status Embed for Discord + uses: SebastiaanZ/[email protected] + with: + # Our GitHub Actions webhook + webhook_id: '784184528997842985' + webhook_token: ${{ secrets.GHA_WEBHOOK_TOKEN }} + + # Workflow information + workflow_name: ${{ github.event.workflow_run.name }} + run_id: ${{ github.event.workflow_run.id }} + run_number: ${{ github.event.workflow_run.run_number }} + status: ${{ github.event.workflow_run.conclusion }} + actor: ${{ github.actor }} + repository: ${{ github.repository }} + ref: ${{ github.ref }} + sha: ${{ github.event.workflow_run.head_sha }} + + pr_author_login: ${{ steps.pr_info.outputs.pr_author_login }} + pr_number: ${{ steps.pr_info.outputs.pr_number }} + pr_title: ${{ steps.pr_info.outputs.pr_title }} + pr_source: ${{ steps.pr_info.outputs.pr_source }} diff --git a/.gitignore b/.gitignore index fb3156ab1..9186dbe06 100644 --- a/.gitignore +++ b/.gitignore @@ -110,6 +110,8 @@ ENV/ # Logfiles log.* +*.log.* +!log.py # Custom user configuration config.yml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 860357868..1597592ca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,26 @@ repos: -- repo: local + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.5.0 hooks: - - id: flake8 + - id: check-merge-conflict + - id: check-toml + - id: check-yaml + args: [--unsafe] # Required due to custom constructors (e.g. !ENV) + - id: end-of-file-fixer + - id: mixed-line-ending + args: [--fix=lf] + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.5.1 + hooks: + - id: python-check-blanket-noqa + - repo: local + hooks: + - id: flake8 name: Flake8 description: This hook runs flake8 within our project's pipenv environment. - entry: pipenv run lint - language: python + entry: pipenv run flake8 + language: system types: [python] - require_serial: true
\ No newline at end of file + require_serial: true diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 39f76c7b4..be591d17e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# Contributing to one of our projects +# Contributing to one of Our Projects Our projects are open-source and are automatically deployed whenever commits are pushed to the `master` branch on each repository, so we've created a set of guidelines in order to keep everything clean and in working order. @@ -10,12 +10,12 @@ Note that contributions may be rejected on the basis of a contributor failing to 2. If you have direct access to the repository, **create a branch for your changes** and create a pull request for that branch. If not, create a branch on a fork of the repository and create a pull request from there. * It's common practice for a repository to reject direct pushes to `master`, so make branching a habit! * If PRing from your own fork, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process. -3. **Adhere to the prevailing code style**, which we enforce using [flake8](http://flake8.pycqa.org/en/latest/index.html). - * Run `flake8` against your code **before** you push it. Your commit will be rejected by the build server if it fails to lint. - * [Git Hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) are a powerful tool that can be a daunting to set up. Fortunately, [`pre-commit`](https://github.com/pre-commit/pre-commit) abstracts this process away from you and is provided as a dev dependency for this project. Run `pipenv run precommit` when setting up the project and you'll never have to worry about breaking the build for linting errors. +3. **Adhere to the prevailing code style**, which we enforce using [`flake8`](http://flake8.pycqa.org/en/latest/index.html) and [`pre-commit`](https://pre-commit.com/). + * Run `flake8` and `pre-commit` against your code [**before** you push it](https://soundcloud.com/lemonsaurusrex/lint-before-you-push). Your commit will be rejected by the build server if it fails to lint. + * [Git Hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) are a powerful git feature for executing custom scripts when certain important git actions occur. The pre-commit hook is the first hook executed during the commit process and can be used to check the code being committed & abort the commit if issues, such as linting failures, are detected. While git hooks can seem daunting to configure, the `pre-commit` framework abstracts this process away from you and is provided as a dev dependency for this project. Run `pipenv run precommit` when setting up the project and you'll never have to worry about committing code that fails linting. 4. **Make great commits**. A well structured git log is key to a project's maintainability; it efficiently provides insight into when and *why* things were done for future maintainers of the project. * Commits should be as narrow in scope as possible. Commits that span hundreds of lines across multiple unrelated functions and/or files are very hard for maintainers to follow. After about a week they'll probably be hard for you to follow too. - * Try to avoid making minor commits for fixing typos or linting errors. Since you've already set up a pre-commit hook to run `flake8` before a commit, you shouldn't be committing linting issues anyway. + * Avoid making minor commits for fixing typos or linting errors. Since you've already set up a `pre-commit` hook to run the linting pipeline before a commit, you shouldn't be committing linting issues anyway. * A more in-depth guide to writing great commit messages can be found in Chris Beam's [*How to Write a Git Commit Message*](https://chris.beams.io/posts/git-commit/) 5. **Avoid frequent pushes to the main repository**. This goes for PRs opened against your fork as well. Our test build pipelines are triggered every time a push to the repository (or PR) is made. Try to batch your commits until you've finished working for that session, or you've reached a point where collaborators need your commits to continue their own work. This also provides you the opportunity to amend commits for minor changes rather than having to commit them on their own because you've already pushed. * This includes merging master into your branch. Try to leave merging from master for after your PR passes review; a maintainer will bring your PR up to date before merging. Exceptions to this include: resolving merge conflicts, needing something that was pushed to master for your branch, or something was pushed to master that could potentionally affect the functionality of what you're writing. @@ -24,13 +24,12 @@ Note that contributions may be rejected on the basis of a contributor failing to * One option is to fork the other contributor's repository and submit your changes to their branch with your own pull request. We suggest following these guidelines when interacting with their repository as well. * The author(s) of inactive PRs and claimed issues will be be pinged after a week of inactivity for an update. Continued inactivity may result in the issue being released back to the community and/or PR closure. 8. **Work as a team** and collaborate wherever possible. Keep things friendly and help each other out - these are shared projects and nobody likes to have their feet trodden on. -9. **Internal projects are internal**. As a contributor, you have access to information that the rest of the server does not. With this trust comes responsibility - do not release any information you have learned as a result of your contributor position. We are very strict about announcing things at specific times, and many staff members will not appreciate a disruption of the announcement schedule. -10. All static content, such as images or audio, **must be licensed for open public use**. +9. All static content, such as images or audio, **must be licensed for open public use**. * Static content must be hosted by a service designed to do so. Failing to do so is known as "leeching" and is frowned upon, as it generates extra bandwidth costs to the host without providing benefit. It would be best if appropriately licensed content is added to the repository itself so it can be served by PyDis' infrastructure. -Above all, the needs of our community should come before the wants of an individual. Work together, build solutions to problems and try to do so in a way that people can learn from easily. Abuse of our trust may result in the loss of your Contributor role, especially in relation to Rule 7. +Above all, the needs of our community should come before the wants of an individual. Work together, build solutions to problems and try to do so in a way that people can learn from easily. Abuse of our trust may result in the loss of your Contributor role. -## Changes to this arrangement +## Changes to this Arrangement All projects evolve over time, and this contribution guide is no different. This document is open to pull requests or changes by contributors. If you believe you have something valuable to add or change, please don't hesitate to do so in a PR. @@ -43,15 +42,19 @@ To provide a standalone development environment for this project, docker compose When pulling down changes from GitHub, remember to sync your environment using `pipenv sync --dev` to ensure you're using the most up-to-date versions the project's dependencies. ### Type Hinting -[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5. Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types. +[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5. Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types. For example: ```py -def foo(input_1: int, input_2: dict) -> bool: +import typing as t + + +def foo(input_1: int, input_2: t.Dict[str, str]) -> bool: + ... ``` -Tells us that `foo` accepts an `int` and a `dict` and returns a `bool`. +Tells us that `foo` accepts an `int` and a `dict`, with `str` keys and values, and returns a `bool`. All function declarations should be type hinted in code contributed to the PyDis organization. @@ -63,15 +66,19 @@ Many documentation packages provide support for automatic documentation generati For example: ```py -def foo(bar: int, baz: dict=None) -> bool: +import typing as t + + +def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool: """ Does some things with some stuff. :param bar: Some input - :param baz: Optional, some other input + :param baz: Optional, some dictionary with string keys and values :return: Some boolean """ + ... ``` Since PyDis does not utilize automatic documentation generation, use of this syntax should not be used in code contributed to the organization. Should the purpose and type of the input variables not be easily discernable from the variable name and type annotation, a prose explanation can be used. Explicit references to variables, functions, classes, etc. should be wrapped with backticks (`` ` ``). @@ -79,25 +86,33 @@ Since PyDis does not utilize automatic documentation generation, use of this syn For example, the above docstring would become: ```py -def foo(bar: int, baz: dict=None) -> bool: +import typing as t + + +def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool: """ Does some things with some stuff. This function takes an index, `bar` and checks for its presence in the database `baz`, passed as a dictionary. Returns `False` if `baz` is not passed. """ + ... ``` ### Logging Levels -The project currently defines [`logging`](https://docs.python.org/3/library/logging.html) levels as follows: -* **TRACE:** Use this for tracing every step of a complex process. That way we can see which step of the process failed. Err on the side of verbose. **Note:** This is a PyDis-implemented logging level. -* **DEBUG:** Someone is interacting with the application, and the application is behaving as expected. -* **INFO:** Something completely ordinary happened. Like a cog loading during startup. -* **WARNING:** Someone is interacting with the application in an unexpected way or the application is responding in an unexpected way, but without causing an error. -* **ERROR:** An error that affects the specific part that is being interacted with -* **CRITICAL:** An error that affects the whole application. +The project currently defines [`logging`](https://docs.python.org/3/library/logging.html) levels as follows, from lowest to highest severity: +* **TRACE:** These events should be used to provide a *verbose* trace of every step of a complex process. This is essentially the `logging` equivalent of sprinkling `print` statements throughout the code. + * **Note:** This is a PyDis-implemented logging level. +* **DEBUG:** These events should add context to what's happening in a development setup to make it easier to follow what's going while working on a project. This is in the same vein as **TRACE** logging but at a much lower level of verbosity. +* **INFO:** These events are normal and don't need direct attention but are worth keeping track of in production, like checking which cogs were loaded during a start-up. +* **WARNING:** These events are out of the ordinary and should be fixed, but have not caused a failure. + * **NOTE:** Events at this logging level and higher should be reserved for events that require the attention of the DevOps team. +* **ERROR:** These events have caused a failure in a specific part of the application and require urgent attention. +* **CRITICAL:** These events have caused the whole application to fail and require immediate intervention. + +Ensure that log messages are succinct. Should you want to pass additional useful information that would otherwise make the log message overly verbose the `logging` module accepts an `extra` kwarg, which can be used to pass a dictionary. This is used to populate the `__dict__` of the `LogRecord` created for the logging event with user-defined attributes that can be accessed by a log handler. Additional information and caveats may be found [in Python's `logging` documentation](https://docs.python.org/3/library/logging.html#logging.Logger.debug). ### Work in Progress (WIP) PRs -Github [has introduced a new PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) that allows the PR author to mark it as a WIP. This provides both a visual and functional indicator that the contents of the PR are in a draft state and not yet ready for formal review. +Github [provides a PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) that allows the PR author to mark it as a WIP. This provides both a visual and functional indicator that the contents of the PR are in a draft state and not yet ready for formal review. This feature should be utilized in place of the traditional method of prepending `[WIP]` to the PR title. diff --git a/Dockerfile b/Dockerfile index 271c25050..5d0380b44 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,20 +1,32 @@ -FROM python:3.7-slim +FROM python:3.8-slim + +# Define Git SHA build argument +ARG git_sha="development" # Set pip to have cleaner logs and no saved cache ENV PIP_NO_CACHE_DIR=false \ PIPENV_HIDE_EMOJIS=1 \ PIPENV_IGNORE_VIRTUALENVS=1 \ - PIPENV_NOSPIN=1 + PIPENV_NOSPIN=1 \ + GIT_SHA=$git_sha + +RUN apt-get -y update \ + && apt-get install -y \ + git \ + && rm -rf /var/lib/apt/lists/* # Install pipenv RUN pip install -U pipenv -# Copy project files into working directory +# Create the working directory WORKDIR /bot -COPY . . # Install project dependencies +COPY Pipfile* ./ RUN pipenv install --system --deploy +# Copy the source code in last to optimize rebuilding the image +COPY . . + ENTRYPOINT ["python3"] CMD ["-m", "bot"] diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY new file mode 100644 index 000000000..eacd9b952 --- /dev/null +++ b/LICENSE-THIRD-PARTY @@ -0,0 +1,88 @@ +--------------------------------------------------------------------------------------------------- + BSD 3-Clause License +Applies to: + - Copyright (c) 2008-Present, IPython Development Team + Copyright (c) 2001-2007, Fernando Perez <[email protected]> + Copyright (c) 2001, Janko Hauser <[email protected]> + Copyright (c) 2001, Nathaniel Gray <[email protected]> + All rights reserved. + - bot/exts/info/codeblock/_parsing.py: _RE_PYTHON_REPL and portions of _RE_IPYTHON_REPL +--------------------------------------------------------------------------------------------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--------------------------------------------------------------------------------------------------- + PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +Applies to: + - Copyright © 2001-2020 Python Software Foundation. All rights reserved. + - tests/_autospec.py: _decoration_helper +--------------------------------------------------------------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. @@ -4,45 +4,53 @@ verify_ssl = true name = "pypi" [packages] -discord-py = "~=1.3.1" +aio-pika = "~=6.1" aiodns = "~=2.0" aiohttp = "~=3.5" -sphinx = "~=2.2" -markdownify = "~=0.4" -lxml = "~=4.4" -pyyaml = "~=5.1" +aioping = "~=0.3.1" +aioredis = "~=1.3.1" +"async-rediscache[fakeredis]" = "~=0.1.2" +beautifulsoup4 = "~=4.9" +colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"} +coloredlogs = "~=14.0" +deepdiff = "~=4.0" +"discord.py" = {git = "https://github.com/Rapptz/discord.py.git", ref = "93f102ca907af6722ee03638766afd53dfe93a7f"} +feedparser = "~=5.2" fuzzywuzzy = "~=0.17" -aio-pika = "~=6.1" +lxml = "~=4.4" +markdownify = "==0.5.3" +more_itertools = "~=8.2" python-dateutil = "~=2.8" -deepdiff = "~=4.0" +pyyaml = "~=5.1" requests = "~=2.22" -more_itertools = "~=7.2" -urllib3 = ">=1.24.2,<1.25" -sentry-sdk = "~=0.14" +sentry-sdk = "~=0.19" +sphinx = "~=2.2" +statsd = "~=3.3" +emoji = "~=0.6" [dev-packages] -coverage = "~=4.5" -flake8 = "~=3.7" +coverage = "~=5.0" +flake8 = "~=3.8" flake8-annotations = "~=2.0" -flake8-bugbear = "~=19.8" +flake8-bugbear = "~=20.1" flake8-docstrings = "~=1.4" flake8-import-order = "~=0.18" flake8-string-format = "~=0.2" -flake8-tidy-imports = "~=2.0" +flake8-tidy-imports = "~=4.0" flake8-todo = "~=0.7" -pre-commit = "~=1.18" -safety = "~=1.8" -unittest-xml-reporting = "~=2.5" -dodgy = "~=0.1" +pep8-naming = "~=0.9" +pre-commit = "~=2.1" +coveralls = "~=2.1" [requires] -python_version = "3.7" +python_version = "3.8" [scripts] start = "python -m bot" -lint = "python -m flake8" +lint = "pre-commit run --all-files" precommit = "pre-commit install" -build = "docker build -t pythondiscord/bot:latest -f Dockerfile ." -push = "docker push pythondiscord/bot:latest" +build = "docker build -t ghcr.io/python-discord/bot:latest -f Dockerfile ." +push = "docker push ghcr.io/python-discord/bot:latest" test = "coverage run -m unittest" +html = "coverage html" report = "coverage report" diff --git a/Pipfile.lock b/Pipfile.lock index 6a8a982a4..6606a3791 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "c7706a61eb96c06d073898018ea2dbcf5bd3b15d007496e2d60120a65647f31e" + "sha256": "1ba637e521c654a23bcc82950e155f5366219eae00bbf809170a371122961a4f" }, "pipfile-spec": 6, "requires": { - "python_version": "3.7" + "python_version": "3.8" }, "sources": [ { @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:4199122a450dffd8303b7857a9d82657bf1487fe329e489520833b40fbe92406", - "sha256:fe85c7456e5c060bce4eb9cffab5b2c4d3c563cb72177977b3556c54c8e3aeb6" + "sha256:9773440a89840941ac3099a7720bf9d51e8764a484066b82ede4d395660ff430", + "sha256:a8065be3c722eb8f9fff8c0e7590729e7782202cdb9363d9830d7d5d47b45c7c" ], "index": "pypi", - "version": "==6.5.2" + "version": "==6.7.1" }, "aiodns": { "hashes": [ @@ -34,28 +34,70 @@ }, "aiohttp": { "hashes": [ - "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e", - "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326", - "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a", - "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654", - "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a", - "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4", - "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17", - "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec", - "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd", - "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48", - "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59", - "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965" + "sha256:0b795072bb1bf87b8620120a6373a3c61bfcb8da7e5c2377f4bb23ff4f0b62c9", + "sha256:0d438c8ca703b1b714e82ed5b7a4412c82577040dadff479c08405e2a715564f", + "sha256:16a3cb5df5c56f696234ea9e65e227d1ebe9c18aa774d36ff42f532139066a5f", + "sha256:1edfd82a98c5161497bbb111b2b70c0813102ad7e0aa81cbeb34e64c93863005", + "sha256:2406dc1dda01c7f6060ab586e4601f18affb7a6b965c50a8c90ff07569cf782a", + "sha256:2858b2504c8697beb9357be01dc47ef86438cc1cb36ecb6991796d19475faa3e", + "sha256:2a7b7640167ab536c3cb90cfc3977c7094f1c5890d7eeede8b273c175c3910fd", + "sha256:3228b7a51e3ed533f5472f54f70fd0b0a64c48dc1649a0f0e809bec312934d7a", + "sha256:328b552513d4f95b0a2eea4c8573e112866107227661834652a8984766aa7656", + "sha256:39f4b0a6ae22a1c567cb0630c30dd082481f95c13ca528dc501a7766b9c718c0", + "sha256:3b0036c978cbcc4a4512278e98e3e6d9e6b834dc973206162eddf98b586ef1c6", + "sha256:3ea8c252d8df5e9166bcf3d9edced2af132f4ead8ac422eac723c5781063709a", + "sha256:41608c0acbe0899c852281978492f9ce2c6fbfaf60aff0cefc54a7c4516b822c", + "sha256:59d11674964b74a81b149d4ceaff2b674b3b0e4d0f10f0be1533e49c4a28408b", + "sha256:5e479df4b2d0f8f02133b7e4430098699450e1b2a826438af6bec9a400530957", + "sha256:684850fb1e3e55c9220aad007f8386d8e3e477c4ec9211ae54d968ecdca8c6f9", + "sha256:6ccc43d68b81c424e46192a778f97da94ee0630337c9bbe5b2ecc9b0c1c59001", + "sha256:6d42debaf55450643146fabe4b6817bb2a55b23698b0434107e892a43117285e", + "sha256:710376bf67d8ff4500a31d0c207b8941ff4fba5de6890a701d71680474fe2a60", + "sha256:756ae7efddd68d4ea7d89c636b703e14a0c686688d42f588b90778a3c2fc0564", + "sha256:77149002d9386fae303a4a162e6bce75cc2161347ad2ba06c2f0182561875d45", + "sha256:78e2f18a82b88cbc37d22365cf8d2b879a492faedb3f2975adb4ed8dfe994d3a", + "sha256:7d9b42127a6c0bdcc25c3dcf252bb3ddc70454fac593b1b6933ae091396deb13", + "sha256:8389d6044ee4e2037dca83e3f6994738550f6ee8cfb746762283fad9b932868f", + "sha256:9c1a81af067e72261c9cbe33ea792893e83bc6aa987bfbd6fdc1e5e7b22777c4", + "sha256:c1e0920909d916d3375c7a1fdb0b1c78e46170e8bb42792312b6eb6676b2f87f", + "sha256:c68fdf21c6f3573ae19c7ee65f9ff185649a060c9a06535e9c3a0ee0bbac9235", + "sha256:c733ef3bdcfe52a1a75564389bad4064352274036e7e234730526d155f04d914", + "sha256:c9c58b0b84055d8bc27b7df5a9d141df4ee6ff59821f922dd73155861282f6a3", + "sha256:d03abec50df423b026a5aa09656bd9d37f1e6a49271f123f31f9b8aed5dc3ea3", + "sha256:d2cfac21e31e841d60dc28c0ec7d4ec47a35c608cb8906435d47ef83ffb22150", + "sha256:dcc119db14757b0c7bce64042158307b9b1c76471e655751a61b57f5a0e4d78e", + "sha256:df3a7b258cc230a65245167a202dd07320a5af05f3d41da1488ba0fa05bc9347", + "sha256:df48a623c58180874d7407b4d9ec06a19b84ed47f60a3884345b1a5099c1818b", + "sha256:e1b95972a0ae3f248a899cdbac92ba2e01d731225f566569311043ce2226f5e7", + "sha256:f326b3c1bbfda5b9308252ee0dcb30b612ee92b0e105d4abec70335fab5b1245", + "sha256:f411cb22115cb15452d099fec0ee636b06cf81bfb40ed9c02d30c8dc2bc2e3d1" ], "index": "pypi", - "version": "==3.6.2" + "version": "==3.7.3" + }, + "aioping": { + "hashes": [ + "sha256:8900ef2f5a589ba0c12aaa9c2d586f5371820d468d21b374ddb47ef5fc8f297c", + "sha256:f983d86acab3a04c322731ce88d42c55d04d2842565fc8532fe10c838abfd275" + ], + "index": "pypi", + "version": "==0.3.1" + }, + "aioredis": { + "hashes": [ + "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a", + "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3" + ], + "index": "pypi", + "version": "==1.3.1" }, "aiormq": { "hashes": [ - "sha256:286e0b0772075580466e45f98f051b9728a9316b9c36f0c14c7bc1409be375b0", - "sha256:7ed7d6df6b57af7f8bce7d1ebcbdfc32b676192e46703e81e9e217316e56b5bd" + "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", + "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" ], - "version": "==3.2.1" + "markers": "python_version >= '3.6'", + "version": "==3.3.1" }, "alabaster": { "hashes": [ @@ -64,74 +106,98 @@ ], "version": "==0.7.12" }, + "async-rediscache": { + "extras": [ + "fakeredis" + ], + "hashes": [ + "sha256:6be8a657d724ccbcfb1946d29a80c3478c5f9ecd2f78a0a26d2f4013a622258f", + "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af" + ], + "index": "pypi", + "markers": "python_version ~= '3.7'", + "version": "==0.1.4" + }, "async-timeout": { "hashes": [ "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" ], + "markers": "python_full_version >= '3.5.3'", "version": "==3.0.1" }, "attrs": { "hashes": [ - "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", - "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "version": "==19.3.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.3.0" }, "babel": { "hashes": [ - "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38", - "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4" + "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", + "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" ], - "version": "==2.8.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.9.0" }, "beautifulsoup4": { "hashes": [ - "sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a", - "sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887", - "sha256:e1505eeed31b0f4ce2dbb3bc8eb256c04cc2b3b72af7d551a4ab6efd5cbe5dae" + "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35", + "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25", + "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666" ], - "version": "==4.8.2" + "index": "pypi", + "version": "==4.9.3" }, "certifi": { "hashes": [ - "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3", - "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f" + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" ], - "version": "==2019.11.28" + "version": "==2020.12.5" }, "cffi": { "hashes": [ - "sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff", - "sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b", - "sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac", - "sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0", - "sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384", - "sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26", - "sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6", - "sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b", - "sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e", - "sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd", - "sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2", - "sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66", - "sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc", - "sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8", - "sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55", - "sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4", - "sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5", - "sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d", - "sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78", - "sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa", - "sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793", - "sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f", - "sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a", - "sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f", - "sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30", - "sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f", - "sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3", - "sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c" - ], - "version": "==1.14.0" + "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e", + "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d", + "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a", + "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec", + "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362", + "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668", + "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c", + "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b", + "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06", + "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698", + "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2", + "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c", + "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7", + "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009", + "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03", + "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b", + "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909", + "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53", + "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35", + "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26", + "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b", + "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01", + "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb", + "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293", + "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd", + "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d", + "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3", + "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d", + "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e", + "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca", + "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d", + "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775", + "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375", + "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b", + "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b", + "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f" + ], + "version": "==1.14.4" }, "chardet": { "hashes": [ @@ -140,28 +206,69 @@ ], "version": "==3.0.4" }, - "deepdiff": { + "colorama": { "hashes": [ - "sha256:b3fa588d1eac7fa318ec1fb4f2004568e04cb120a1989feda8e5e7164bcbf07a", - "sha256:ed7342d3ed3c0c2058a3fb05b477c943c9959ef62223dca9baa3375718a25d87" + "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", + "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.4" + }, + "coloredlogs": { + "hashes": [ + "sha256:7ef1a7219870c7f02c218a2f2877ce68f2f8e087bb3a55bd6fbaa2a4362b4d52", + "sha256:e244a892f9d97ffd2c60f15bf1d2582ef7f9ac0f848d132249004184785702b3" ], "index": "pypi", - "version": "==4.2.0" + "version": "==14.3" }, - "discord-py": { + "deepdiff": { "hashes": [ - "sha256:8bfe5628d31771744000f19135c386c74ac337479d7282c26cc1627b9d31f360" + "sha256:59fc1e3e7a28dd0147b0f2b00e3e27181f0f0ef4286b251d5f214a5bcd9a9bc4", + "sha256:91360be1d9d93b1d9c13ae9c5048fa83d9cff17a88eb30afaa0d7ff2d0fee17d" ], "index": "pypi", - "version": "==1.3.1" + "version": "==4.3.2" + }, + "discord-py": { + "git": "https://github.com/Rapptz/discord.py.git", + "ref": "93f102ca907af6722ee03638766afd53dfe93a7f" + }, + "discord.py": { + "git": "https://github.com/Rapptz/discord.py.git", + "ref": "93f102ca907af6722ee03638766afd53dfe93a7f" }, "docutils": { "hashes": [ "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, + "emoji": { + "hashes": [ + "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11" + ], + "index": "pypi", + "version": "==0.6.0" + }, + "fakeredis": { + "hashes": [ + "sha256:01cb47d2286825a171fb49c0e445b1fa9307087e07cbb3d027ea10dbff108b6a", + "sha256:2c6041cf0225889bc403f3949838b2c53470a95a9e2d4272422937786f5f8f73" + ], + "version": "==1.4.5" + }, + "feedparser": { + "hashes": [ + "sha256:bd030652c2d08532c034c27fcd7c85868e7fa3cb2b17f230a44a6bbc92519bf9", + "sha256:cd2485472e41471632ed3029d44033ee420ad0b57111db95c240c9160a85831c", + "sha256:ce875495c90ebd74b179855449040003a1beb40cd13d5f037a0654251e260b02" + ], + "index": "pypi", + "version": "==5.2.1" + }, "fuzzywuzzy": { "hashes": [ "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8", @@ -170,66 +277,140 @@ "index": "pypi", "version": "==0.18.0" }, + "hiredis": { + "hashes": [ + "sha256:06a039208f83744a702279b894c8cf24c14fd63c59cd917dcde168b79eef0680", + "sha256:0a909bf501459062aa1552be1461456518f367379fdc9fdb1f2ca5e4a1fdd7c0", + "sha256:18402d9e54fb278cb9a8c638df6f1550aca36a009d47ecf5aa263a38600f35b0", + "sha256:1e4cbbc3858ec7e680006e5ca590d89a5e083235988f26a004acf7244389ac01", + "sha256:23344e3c2177baf6975fbfa361ed92eb7d36d08f454636e5054b3faa7c2aff8a", + "sha256:289b31885b4996ce04cadfd5fc03d034dce8e2a8234479f7c9e23b9e245db06b", + "sha256:2c1c570ae7bf1bab304f29427e2475fe1856814312c4a1cf1cd0ee133f07a3c6", + "sha256:2c227c0ed371771ffda256034427320870e8ea2e4fd0c0a618c766e7c49aad73", + "sha256:3bb9b63d319402cead8bbd9dd55dca3b667d2997e9a0d8a1f9b6cc274db4baee", + "sha256:3ef2183de67b59930d2db8b8e8d4d58e00a50fcc5e92f4f678f6eed7a1c72d55", + "sha256:43b8ed3dbfd9171e44c554cb4acf4ee4505caa84c5e341858b50ea27dd2b6e12", + "sha256:47bcf3c5e6c1e87ceb86cdda2ee983fa0fe56a999e6185099b3c93a223f2fa9b", + "sha256:5263db1e2e1e8ae30500cdd75a979ff99dcc184201e6b4b820d0de74834d2323", + "sha256:5b1451727f02e7acbdf6aae4e06d75f66ee82966ff9114550381c3271a90f56c", + "sha256:6996883a8a6ff9117cbb3d6f5b0dcbbae6fb9e31e1a3e4e2f95e0214d9a1c655", + "sha256:6c96f64a54f030366657a54bb90b3093afc9c16c8e0dfa29fc0d6dbe169103a5", + "sha256:7332d5c3e35154cd234fd79573736ddcf7a0ade7a986db35b6196b9171493e75", + "sha256:7885b6f32c4a898e825bb7f56f36a02781ac4a951c63e4169f0afcf9c8c30dfb", + "sha256:7b0f63f10a166583ab744a58baad04e0f52cfea1ac27bfa1b0c21a48d1003c23", + "sha256:819f95d4eba3f9e484dd115ab7ab72845cf766b84286a00d4ecf76d33f1edca1", + "sha256:8968eeaa4d37a38f8ca1f9dbe53526b69628edc9c42229a5b2f56d98bb828c1f", + "sha256:89ebf69cb19a33d625db72d2ac589d26e936b8f7628531269accf4a3196e7872", + "sha256:8daecd778c1da45b8bd54fd41ffcd471a86beed3d8e57a43acf7a8d63bba4058", + "sha256:955ba8ea73cf3ed8bd2f963b4cb9f8f0dcb27becd2f4b3dd536fd24c45533454", + "sha256:964f18a59f5a64c0170f684c417f4fe3e695a536612e13074c4dd5d1c6d7c882", + "sha256:969843fbdfbf56cdb71da6f0bdf50f9985b8b8aeb630102945306cf10a9c6af2", + "sha256:996021ef33e0f50b97ff2d6b5f422a0fe5577de21a8873b58a779a5ddd1c3132", + "sha256:9e9c9078a7ce07e6fce366bd818be89365a35d2e4b163268f0ca9ba7e13bb2f6", + "sha256:a04901757cb0fb0f5602ac11dda48f5510f94372144d06c2563ba56c480b467c", + "sha256:a7bf1492429f18d205f3a818da3ff1f242f60aa59006e53dee00b4ef592a3363", + "sha256:aa0af2deb166a5e26e0d554b824605e660039b161e37ed4f01b8d04beec184f3", + "sha256:abfb15a6a7822f0fae681785cb38860e7a2cb1616a708d53df557b3d76c5bfd4", + "sha256:b253fe4df2afea4dfa6b1fa8c5fef212aff8bcaaeb4207e81eed05cb5e4a7919", + "sha256:b27f082f47d23cffc4cf1388b84fdc45c4ef6015f906cd7e0d988d9e35d36349", + "sha256:b33aea449e7f46738811fbc6f0b3177c6777a572207412bbbf6f525ffed001ae", + "sha256:b44f9421c4505c548435244d74037618f452844c5d3c67719d8a55e2613549da", + "sha256:bcc371151d1512201d0214c36c0c150b1dc64f19c2b1a8c9cb1d7c7c15ebd93f", + "sha256:c2851deeabd96d3f6283e9c6b26e0bfed4de2dc6fb15edf913e78b79fc5909ed", + "sha256:cdfd501c7ac5b198c15df800a3a34c38345f5182e5f80770caf362bccca65628", + "sha256:d2c0caffa47606d6d7c8af94ba42547bd2a441f06c74fd90a1ffe328524a6c64", + "sha256:dcb2db95e629962db5a355047fb8aefb012df6c8ae608930d391619dbd96fd86", + "sha256:e0eeb9c112fec2031927a1745788a181d0eecbacbed941fc5c4f7bc3f7b273bf", + "sha256:e154891263306200260d7f3051982774d7b9ef35af3509d5adbbe539afd2610c", + "sha256:e2e023a42dcbab8ed31f97c2bcdb980b7fbe0ada34037d87ba9d799664b58ded", + "sha256:e64be68255234bb489a574c4f2f8df7029c98c81ec4d160d6cd836e7f0679390", + "sha256:e82d6b930e02e80e5109b678c663a9ed210680ded81c1abaf54635d88d1da298" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.1.0" + }, + "humanfriendly": { + "hashes": [ + "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d", + "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==9.1" + }, "idna": { "hashes": [ - "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", - "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "version": "==2.9" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.10" }, "imagesize": { "hashes": [ "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.2.0" }, "jinja2": { "hashes": [ - "sha256:93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250", - "sha256:b0eaf100007721b5c16c1fc1eecb87409464edc10469ddc9a22a27a99123be49" + "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", + "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], - "version": "==2.11.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==2.11.2" }, "lxml": { "hashes": [ - "sha256:06d4e0bbb1d62e38ae6118406d7cdb4693a3fa34ee3762238bcb96c9e36a93cd", - "sha256:0701f7965903a1c3f6f09328c1278ac0eee8f56f244e66af79cb224b7ef3801c", - "sha256:1f2c4ec372bf1c4a2c7e4bb20845e8bcf8050365189d86806bad1e3ae473d081", - "sha256:4235bc124fdcf611d02047d7034164897ade13046bda967768836629bc62784f", - "sha256:5828c7f3e615f3975d48f40d4fe66e8a7b25f16b5e5705ffe1d22e43fb1f6261", - "sha256:585c0869f75577ac7a8ff38d08f7aac9033da2c41c11352ebf86a04652758b7a", - "sha256:5d467ce9c5d35b3bcc7172c06320dddb275fea6ac2037f72f0a4d7472035cea9", - "sha256:63dbc21efd7e822c11d5ddbedbbb08cd11a41e0032e382a0fd59b0b08e405a3a", - "sha256:7bc1b221e7867f2e7ff1933165c0cec7153dce93d0cdba6554b42a8beb687bdb", - "sha256:8620ce80f50d023d414183bf90cc2576c2837b88e00bea3f33ad2630133bbb60", - "sha256:8a0ebda56ebca1a83eb2d1ac266649b80af8dd4b4a3502b2c1e09ac2f88fe128", - "sha256:90ed0e36455a81b25b7034038e40880189169c308a3df360861ad74da7b68c1a", - "sha256:95e67224815ef86924fbc2b71a9dbd1f7262384bca4bc4793645794ac4200717", - "sha256:afdb34b715daf814d1abea0317b6d672476b498472f1e5aacbadc34ebbc26e89", - "sha256:b4b2c63cc7963aedd08a5f5a454c9f67251b1ac9e22fd9d72836206c42dc2a72", - "sha256:d068f55bda3c2c3fcaec24bd083d9e2eede32c583faf084d6e4b9daaea77dde8", - "sha256:d5b3c4b7edd2e770375a01139be11307f04341ec709cf724e0f26ebb1eef12c3", - "sha256:deadf4df349d1dcd7b2853a2c8796593cc346600726eff680ed8ed11812382a7", - "sha256:df533af6f88080419c5a604d0d63b2c33b1c0c4409aba7d0cb6de305147ea8c8", - "sha256:e4aa948eb15018a657702fee0b9db47e908491c64d36b4a90f59a64741516e77", - "sha256:e5d842c73e4ef6ed8c1bd77806bf84a7cb535f9c0cf9b2c74d02ebda310070e1", - "sha256:ebec08091a22c2be870890913bdadd86fcd8e9f0f22bcb398abd3af914690c15", - "sha256:edc15fcfd77395e24543be48871c251f38132bb834d9fdfdad756adb6ea37679", - "sha256:f2b74784ed7e0bc2d02bd53e48ad6ba523c9b36c194260b7a5045071abbb1012", - "sha256:fa071559f14bd1e92077b1b5f6c22cf09756c6de7139370249eb372854ce51e6", - "sha256:fd52e796fee7171c4361d441796b64df1acfceb51f29e545e812f16d023c4bbc", - "sha256:fe976a0f1ef09b3638778024ab9fb8cde3118f203364212c198f71341c0715ca" - ], - "index": "pypi", - "version": "==4.5.0" + "sha256:0448576c148c129594d890265b1a83b9cd76fd1f0a6a04620753d9a6bcfd0a4d", + "sha256:127f76864468d6630e1b453d3ffbbd04b024c674f55cf0a30dc2595137892d37", + "sha256:1471cee35eba321827d7d53d104e7b8c593ea3ad376aa2df89533ce8e1b24a01", + "sha256:2363c35637d2d9d6f26f60a208819e7eafc4305ce39dc1d5005eccc4593331c2", + "sha256:2e5cc908fe43fe1aa299e58046ad66981131a66aea3129aac7770c37f590a644", + "sha256:2e6fd1b8acd005bd71e6c94f30c055594bbd0aa02ef51a22bbfa961ab63b2d75", + "sha256:366cb750140f221523fa062d641393092813b81e15d0e25d9f7c6025f910ee80", + "sha256:42ebca24ba2a21065fb546f3e6bd0c58c3fe9ac298f3a320147029a4850f51a2", + "sha256:4e751e77006da34643ab782e4a5cc21ea7b755551db202bc4d3a423b307db780", + "sha256:4fb85c447e288df535b17ebdebf0ec1cf3a3f1a8eba7e79169f4f37af43c6b98", + "sha256:50c348995b47b5a4e330362cf39fc503b4a43b14a91c34c83b955e1805c8e308", + "sha256:535332fe9d00c3cd455bd3dd7d4bacab86e2d564bdf7606079160fa6251caacf", + "sha256:535f067002b0fd1a4e5296a8f1bf88193080ff992a195e66964ef2a6cfec5388", + "sha256:5be4a2e212bb6aa045e37f7d48e3e1e4b6fd259882ed5a00786f82e8c37ce77d", + "sha256:60a20bfc3bd234d54d49c388950195d23a5583d4108e1a1d47c9eef8d8c042b3", + "sha256:648914abafe67f11be7d93c1a546068f8eff3c5fa938e1f94509e4a5d682b2d8", + "sha256:681d75e1a38a69f1e64ab82fe4b1ed3fd758717bed735fb9aeaa124143f051af", + "sha256:68a5d77e440df94011214b7db907ec8f19e439507a70c958f750c18d88f995d2", + "sha256:69a63f83e88138ab7642d8f61418cf3180a4d8cd13995df87725cb8b893e950e", + "sha256:6e4183800f16f3679076dfa8abf2db3083919d7e30764a069fb66b2b9eff9939", + "sha256:6fd8d5903c2e53f49e99359b063df27fdf7acb89a52b6a12494208bf61345a03", + "sha256:791394449e98243839fa822a637177dd42a95f4883ad3dec2a0ce6ac99fb0a9d", + "sha256:7a7669ff50f41225ca5d6ee0a1ec8413f3a0d8aa2b109f86d540887b7ec0d72a", + "sha256:7e9eac1e526386df7c70ef253b792a0a12dd86d833b1d329e038c7a235dfceb5", + "sha256:7ee8af0b9f7de635c61cdd5b8534b76c52cd03536f29f51151b377f76e214a1a", + "sha256:8246f30ca34dc712ab07e51dc34fea883c00b7ccb0e614651e49da2c49a30711", + "sha256:8c88b599e226994ad4db29d93bc149aa1aff3dc3a4355dd5757569ba78632bdf", + "sha256:923963e989ffbceaa210ac37afc9b906acebe945d2723e9679b643513837b089", + "sha256:94d55bd03d8671686e3f012577d9caa5421a07286dd351dfef64791cf7c6c505", + "sha256:97db258793d193c7b62d4e2586c6ed98d51086e93f9a3af2b2034af01450a74b", + "sha256:a9d6bc8642e2c67db33f1247a77c53476f3a166e09067c0474facb045756087f", + "sha256:cd11c7e8d21af997ee8079037fff88f16fda188a9776eb4b81c7e4c9c0a7d7fc", + "sha256:d8d3d4713f0c28bdc6c806a278d998546e8efc3498949e3ace6e117462ac0a5e", + "sha256:e0bfe9bb028974a481410432dbe1b182e8191d5d40382e5b8ff39cdd2e5c5931", + "sha256:f4822c0660c3754f1a41a655e37cb4dbbc9be3d35b125a37fab6f82d47674ebc", + "sha256:f83d281bb2a6217cd806f4cf0ddded436790e66f393e124dfe9731f6b3fb9afe", + "sha256:fc37870d6716b137e80d19241d0e2cff7a7643b925dfa49b4c8ebd1295eb506e" + ], + "index": "pypi", + "version": "==4.6.2" }, "markdownify": { "hashes": [ - "sha256:28ce67d1888e4908faaab7b04d2193cda70ea4f902f156a21d0aaea55e63e0a1" + "sha256:30be8340724e706c9e811c27fe8c1542cf74a15b46827924fff5c54b40dd9b0d", + "sha256:a69588194fd76634f0139d6801b820fd652dc5eeba9530e90d323dfdc0155252" ], "index": "pypi", - "version": "==0.4.1" + "version": "==0.5.3" }, "markupsafe": { "hashes": [ @@ -267,50 +448,74 @@ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "more-itertools": { "hashes": [ - "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", - "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" + "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", + "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" ], "index": "pypi", - "version": "==7.2.0" + "version": "==8.6.0" }, "multidict": { "hashes": [ - "sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1", - "sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35", - "sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928", - "sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969", - "sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e", - "sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78", - "sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1", - "sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136", - "sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8", - "sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2", - "sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e", - "sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4", - "sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5", - "sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd", - "sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab", - "sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20", - "sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3" - ], - "version": "==4.7.5" + "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a", + "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93", + "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632", + "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656", + "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79", + "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7", + "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d", + "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5", + "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224", + "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26", + "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea", + "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348", + "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6", + "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76", + "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1", + "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f", + "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952", + "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a", + "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37", + "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9", + "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359", + "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8", + "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da", + "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3", + "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d", + "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf", + "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841", + "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d", + "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93", + "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f", + "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647", + "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635", + "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456", + "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda", + "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5", + "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", + "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80" + ], + "markers": "python_version >= '3.6'", + "version": "==5.1.0" }, "ordered-set": { "hashes": [ - "sha256:a7bfa858748c73b096e43db14eb23e2bc714a503f990c89fac8fab9b0ee79724" + "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95" ], - "version": "==3.1.1" + "markers": "python_version >= '3.5'", + "version": "==4.0.2" }, "packaging": { "hashes": [ - "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", - "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" + "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", + "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093" ], - "version": "==20.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.8" }, "pamqp": { "hashes": [ @@ -355,23 +560,27 @@ }, "pycparser": { "hashes": [ - "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" + "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", + "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], - "version": "==2.19" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.20" }, "pygments": { "hashes": [ - "sha256:2a3fe295e54a20164a9df49c75fa58526d3be48e14aceba6d6b1e8ac0bfd6f1b", - "sha256:98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe" + "sha256:ccf3acacf3782cbed4a989426012f1c535c9a90d3a7fc3f16d231b9372d2b716", + "sha256:f275b6c0909e5dafd2d6269a656aa90fa58ebf4a74f8fcf9053195d226b24a08" ], - "version": "==2.5.2" + "markers": "python_version >= '3.5'", + "version": "==2.7.3" }, "pyparsing": { "hashes": [ - "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", - "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "version": "==2.4.6" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.4.7" }, "python-dateutil": { "hashes": [ @@ -383,50 +592,61 @@ }, "pytz": { "hashes": [ - "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d", - "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be" + "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4", + "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5" ], - "version": "==2019.3" + "version": "==2020.5" }, "pyyaml": { "hashes": [ - "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6", - "sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf", - "sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5", - "sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e", - "sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811", - "sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e", - "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d", - "sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20", - "sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689", - "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994", - "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615" + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", + "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", - "version": "==5.3" + "version": "==5.3.1" + }, + "redis": { + "hashes": [ + "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", + "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==3.5.3" }, "requests": { "hashes": [ - "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", - "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" ], "index": "pypi", - "version": "==2.23.0" + "version": "==2.25.1" }, "sentry-sdk": { "hashes": [ - "sha256:b06dd27391fd11fb32f84fe054e6a64736c469514a718a99fb5ce1dff95d6b28", - "sha256:e023da07cfbead3868e1e2ba994160517885a32dfd994fc455b118e37989479b" + "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0", + "sha256:737a094e49a529dd0fdcaafa9e97cf7c3d5eb964bd229821d640bc77f3502b3f" ], "index": "pypi", - "version": "==0.14.1" + "version": "==0.19.5" }, "six": { "hashes": [ - "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", - "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "version": "==1.14.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.15.0" }, "snowballstemmer": { "hashes": [ @@ -435,40 +655,51 @@ ], "version": "==2.0.0" }, + "sortedcontainers": { + "hashes": [ + "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", + "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1" + ], + "version": "==2.3.0" + }, "soupsieve": { "hashes": [ - "sha256:bdb0d917b03a1369ce964056fc195cfdff8819c40de04695a80bc813c3cfa1f5", - "sha256:e2c1c5dee4a1c36bcb790e0fabd5492d874b8ebd4617622c4f6a731701060dda" + "sha256:4bb21a6ee4707bf43b61230e80740e71bfe56e55d1f1f50924b087bb2975c851", + "sha256:6dc52924dc0bc710a5d16794e6b3480b2c7c08b07729505feab2b2c16661ff6e" ], - "version": "==1.9.5" + "markers": "python_version >= '3.0'", + "version": "==2.1" }, "sphinx": { "hashes": [ - "sha256:776ff8333181138fae52df65be733127539623bb46cc692e7fa0fcfc80d7aa88", - "sha256:ca762da97c3b5107cbf0ab9e11d3ec7ab8d3c31377266fd613b962ed971df709" + "sha256:b4c750d546ab6d7e05bdff6ac24db8ae3e8b8253a3569b754e445110a0a12b66", + "sha256:fc312670b56cb54920d6cc2ced455a22a547910de10b3142276495ced49231cb" ], "index": "pypi", - "version": "==2.4.3" + "version": "==2.4.4" }, "sphinxcontrib-applehelp": { "hashes": [ - "sha256:edaa0ab2b2bc74403149cb0209d6775c96de797dfd5b5e2a71981309efab3897", - "sha256:fb8dee85af95e5c30c91f10e7eb3c8967308518e0f7488a2828ef7bc191d0d5d" + "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", + "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" ], - "version": "==1.0.1" + "markers": "python_version >= '3.5'", + "version": "==1.0.2" }, "sphinxcontrib-devhelp": { "hashes": [ - "sha256:6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34", - "sha256:9512ecb00a2b0821a146736b39f7aeb90759834b07e81e8cc23a9c70bacb9981" + "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", + "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" ], - "version": "==1.0.1" + "markers": "python_version >= '3.5'", + "version": "==1.0.2" }, "sphinxcontrib-htmlhelp": { "hashes": [ "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" ], + "markers": "python_version >= '3.5'", "version": "==1.0.3" }, "sphinxcontrib-jsmath": { @@ -476,115 +707,123 @@ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], + "markers": "python_version >= '3.5'", "version": "==1.0.1" }, "sphinxcontrib-qthelp": { "hashes": [ - "sha256:513049b93031beb1f57d4daea74068a4feb77aa5630f856fcff2e50de14e9a20", - "sha256:79465ce11ae5694ff165becda529a600c754f4bc459778778c7017374d4d406f" + "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", + "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" ], - "version": "==1.0.2" + "markers": "python_version >= '3.5'", + "version": "==1.0.3" }, "sphinxcontrib-serializinghtml": { "hashes": [ - "sha256:c0efb33f8052c04fd7a26c0a07f1678e8512e0faec19f4aa8f2473a8b81d5227", - "sha256:db6615af393650bf1151a6cd39120c29abaf93cc60db8c48eb2dddbfdc3a9768" + "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", + "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" ], - "version": "==1.1.3" + "markers": "python_version >= '3.5'", + "version": "==1.1.4" + }, + "statsd": { + "hashes": [ + "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa", + "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f" + ], + "index": "pypi", + "version": "==3.3.0" + }, + "typing-extensions": { + "hashes": [ + "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", + "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", + "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" + ], + "version": "==3.7.4.3" }, "urllib3": { "hashes": [ - "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", - "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" - ], - "index": "pypi", - "version": "==1.24.3" - }, - "websockets": { - "hashes": [ - "sha256:0e4fb4de42701340bd2353bb2eee45314651caa6ccee80dbd5f5d5978888fed5", - "sha256:1d3f1bf059d04a4e0eb4985a887d49195e15ebabc42364f4eb564b1d065793f5", - "sha256:20891f0dddade307ffddf593c733a3fdb6b83e6f9eef85908113e628fa5a8308", - "sha256:295359a2cc78736737dd88c343cd0747546b2174b5e1adc223824bcaf3e164cb", - "sha256:2db62a9142e88535038a6bcfea70ef9447696ea77891aebb730a333a51ed559a", - "sha256:3762791ab8b38948f0c4d281c8b2ddfa99b7e510e46bd8dfa942a5fff621068c", - "sha256:3db87421956f1b0779a7564915875ba774295cc86e81bc671631379371af1170", - "sha256:3ef56fcc7b1ff90de46ccd5a687bbd13a3180132268c4254fc0fa44ecf4fc422", - "sha256:4f9f7d28ce1d8f1295717c2c25b732c2bc0645db3215cf757551c392177d7cb8", - "sha256:5c01fd846263a75bc8a2b9542606927cfad57e7282965d96b93c387622487485", - "sha256:5c65d2da8c6bce0fca2528f69f44b2f977e06954c8512a952222cea50dad430f", - "sha256:751a556205d8245ff94aeef23546a1113b1dd4f6e4d102ded66c39b99c2ce6c8", - "sha256:7ff46d441db78241f4c6c27b3868c9ae71473fe03341340d2dfdbe8d79310acc", - "sha256:965889d9f0e2a75edd81a07592d0ced54daa5b0785f57dc429c378edbcffe779", - "sha256:9b248ba3dd8a03b1a10b19efe7d4f7fa41d158fdaa95e2cf65af5a7b95a4f989", - "sha256:9bef37ee224e104a413f0780e29adb3e514a5b698aabe0d969a6ba426b8435d1", - "sha256:c1ec8db4fac31850286b7cd3b9c0e1b944204668b8eb721674916d4e28744092", - "sha256:c8a116feafdb1f84607cb3b14aa1418424ae71fee131642fc568d21423b51824", - "sha256:ce85b06a10fc65e6143518b96d3dca27b081a740bae261c2fb20375801a9d56d", - "sha256:d705f8aeecdf3262379644e4b55107a3b55860eb812b673b28d0fbc347a60c55", - "sha256:e898a0863421650f0bebac8ba40840fc02258ef4714cb7e1fd76b6a6354bda36", - "sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b" - ], - "version": "==8.1" + "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", + "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.2" }, "yarl": { "hashes": [ - "sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce", - "sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6", - "sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce", - "sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae", - "sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d", - "sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f", - "sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b", - "sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b", - "sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb", - "sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462", - "sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea", - "sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70", - "sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1", - "sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a", - "sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b", - "sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080", - "sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2" - ], - "version": "==1.4.2" + "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e", + "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434", + "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366", + "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3", + "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec", + "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959", + "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e", + "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c", + "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6", + "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a", + "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6", + "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424", + "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e", + "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f", + "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50", + "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2", + "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc", + "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4", + "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970", + "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10", + "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0", + "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406", + "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896", + "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643", + "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721", + "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478", + "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724", + "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e", + "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8", + "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96", + "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25", + "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76", + "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2", + "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2", + "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c", + "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", + "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71" + ], + "markers": "python_version >= '3.6'", + "version": "==1.6.3" } }, "develop": { "appdirs": { "hashes": [ - "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", - "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e" - ], - "version": "==1.4.3" - }, - "aspy.yaml": { - "hashes": [ - "sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc", - "sha256:e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45" + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" ], - "version": "==1.3.0" + "version": "==1.4.4" }, "attrs": { "hashes": [ - "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", - "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "version": "==19.3.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.3.0" }, "certifi": { "hashes": [ - "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3", - "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f" + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" ], - "version": "==2019.11.28" + "version": "==2020.12.5" }, "cfgv": { "hashes": [ - "sha256:04b093b14ddf9fd4d17c53ebfd55582d27b76ed30050193c14e560770c5360eb", - "sha256:f22b426ed59cd2ab2b54ff96608d846c33dfb8766a67f0b4a6ce130ce244414f" + "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", + "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" ], - "version": "==3.0.0" + "markers": "python_full_version >= '3.6.1'", + "version": "==3.2.0" }, "chardet": { "hashes": [ @@ -593,79 +832,81 @@ ], "version": "==3.0.4" }, - "click": { - "hashes": [ - "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", - "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" - ], - "version": "==7.0" - }, "coverage": { "hashes": [ - "sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", - "sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", - "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", - "sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", - "sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", - "sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", - "sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", - "sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", - "sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", - "sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", - "sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", - "sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", - "sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", - "sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", - "sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", - "sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", - "sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", - "sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", - "sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", - "sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", - "sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", - "sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", - "sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", - "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", - "sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", - "sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", - "sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", - "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", - "sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", - "sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", - "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", - "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025" - ], - "index": "pypi", - "version": "==4.5.4" - }, - "distlib": { - "hashes": [ - "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21", - "sha256:3db50260f17a3479465fe376211c0816e6b0d1503a6c71caebe80360cab04828" + "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297", + "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1", + "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497", + "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606", + "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528", + "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b", + "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4", + "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830", + "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1", + "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f", + "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d", + "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3", + "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8", + "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500", + "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7", + "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb", + "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b", + "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059", + "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b", + "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72", + "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36", + "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277", + "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c", + "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631", + "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff", + "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8", + "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec", + "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b", + "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7", + "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105", + "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b", + "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c", + "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b", + "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98", + "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4", + "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879", + "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f", + "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4", + "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044", + "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e", + "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899", + "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f", + "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448", + "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714", + "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2", + "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d", + "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd", + "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7", + "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae" ], - "version": "==0.3.0" + "index": "pypi", + "version": "==5.3.1" }, - "dodgy": { + "coveralls": { "hashes": [ - "sha256:28323cbfc9352139fdd3d316fa17f325cc0e9ac74438cbba51d70f9b48f86c3a", - "sha256:51f54c0fd886fa3854387f354b19f429d38c04f984f38bc572558b703c0542a6" + "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc", + "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617" ], "index": "pypi", - "version": "==0.2.1" + "version": "==2.2.0" }, - "dparse": { + "distlib": { "hashes": [ - "sha256:00a5fdfa900629e5159bf3600d44905b333f4059a3366f28e0dbd13eeab17b19", - "sha256:cef95156fa0adedaf042cd42f9990974bec76f25dfeca4dc01f381a243d5aa5b" + "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb", + "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1" ], - "version": "==0.4.1" + "version": "==0.3.1" }, - "entrypoints": { + "docopt": { "hashes": [ - "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", - "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" + "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" ], - "version": "==0.3" + "version": "==0.6.2" }, "filelock": { "hashes": [ @@ -676,27 +917,27 @@ }, "flake8": { "hashes": [ - "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", - "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" + "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", + "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" ], "index": "pypi", - "version": "==3.7.9" + "version": "==3.8.4" }, "flake8-annotations": { "hashes": [ - "sha256:19a6637a5da1bb7ea7948483ca9e2b9e15b213e687e7bf5ff8c1bfc91c185006", - "sha256:bb033b72cdd3a2b0a530bbdf2081f12fbea7d70baeaaebb5899723a45f424b8e" + "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1", + "sha256:bebd11a850f6987a943ce8cdff4159767e0f5f89b3c88aca64680c2175ee02df" ], "index": "pypi", - "version": "==2.0.0" + "version": "==2.4.1" }, "flake8-bugbear": { "hashes": [ - "sha256:d8c466ea79d5020cb20bf9f11cf349026e09517a42264f313d3f6fddb83e0571", - "sha256:ded4d282778969b5ab5530ceba7aa1a9f1b86fa7618fc96a19a1d512331640f8" + "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538", + "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703" ], "index": "pypi", - "version": "==19.8.0" + "version": "==20.11.1" }, "flake8-docstrings": { "hashes": [ @@ -714,6 +955,13 @@ "index": "pypi", "version": "==0.18.1" }, + "flake8-polyfill": { + "hashes": [ + "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9", + "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda" + ], + "version": "==1.0.2" + }, "flake8-string-format": { "hashes": [ "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", @@ -724,11 +972,11 @@ }, "flake8-tidy-imports": { "hashes": [ - "sha256:1c476aabc6e8db26dc75278464a3a392dba0ea80562777c5f13fd5cdf2646154", - "sha256:b3f5b96affd0f57cacb6621ed28286ce67edaca807757b51227043ebf7b136a1" + "sha256:52e5f2f987d3d5597538d5941153409ebcab571635835b78f522c7bf03ca23bc", + "sha256:76e36fbbfdc8e3c5017f9a216c2855a298be85bc0631e66777f4e6a07a859dc4" ], "index": "pypi", - "version": "==2.0.0" + "version": "==4.2.1" }, "flake8-todo": { "hashes": [ @@ -739,25 +987,19 @@ }, "identify": { "hashes": [ - "sha256:1222b648251bdcb8deb240b294f450fbf704c7984e08baa92507e4ea10b436d5", - "sha256:d824ebe21f38325c771c41b08a95a761db1982f1fc0eee37c6c97df3f1636b96" + "sha256:7aef7a5104d6254c162990e54a203cdc0fd202046b6c415bd5d636472f6565c4", + "sha256:b2c71bf9f5c482c389cef816f3a15f1c9d7429ad70f497d4a2e522442d80c6de" ], - "version": "==1.4.11" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.5.11" }, "idna": { "hashes": [ - "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", - "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "version": "==2.9" - }, - "importlib-metadata": { - "hashes": [ - "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", - "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" - ], - "markers": "python_version < '3.8'", - "version": "==1.5.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.10" }, "mccabe": { "hashes": [ @@ -768,92 +1010,85 @@ }, "nodeenv": { "hashes": [ - "sha256:5b2438f2e42af54ca968dd1b374d14a1194848955187b0e5e4be1f73813a5212" + "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9", + "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c" ], - "version": "==1.3.5" + "version": "==1.5.0" }, - "packaging": { + "pep8-naming": { "hashes": [ - "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", - "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" + "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724", + "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738" ], - "version": "==20.1" + "index": "pypi", + "version": "==0.11.1" }, "pre-commit": { "hashes": [ - "sha256:8f48d8637bdae6fa70cc97db9c1dd5aa7c5c8bf71968932a380628c25978b850", - "sha256:f92a359477f3252452ae2e8d3029de77aec59415c16ae4189bcfba40b757e029" + "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0", + "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4" ], "index": "pypi", - "version": "==1.21.0" + "version": "==2.9.3" }, "pycodestyle": { "hashes": [ - "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", - "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" + "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", + "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" ], - "version": "==2.5.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.6.0" }, "pydocstyle": { "hashes": [ - "sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586", - "sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5" + "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325", + "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678" ], - "version": "==5.0.2" + "markers": "python_version >= '3.5'", + "version": "==5.1.1" }, "pyflakes": { "hashes": [ - "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", - "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2" + "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", + "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" ], - "version": "==2.1.1" - }, - "pyparsing": { - "hashes": [ - "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", - "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" - ], - "version": "==2.4.6" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.2.0" }, "pyyaml": { "hashes": [ - "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6", - "sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf", - "sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5", - "sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e", - "sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811", - "sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e", - "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d", - "sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20", - "sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689", - "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994", - "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615" + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", + "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", - "version": "==5.3" + "version": "==5.3.1" }, "requests": { "hashes": [ - "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", - "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" - ], - "index": "pypi", - "version": "==2.23.0" - }, - "safety": { - "hashes": [ - "sha256:0a3a8a178a9c96242b224f033ee8d1d130c0448b0e6622d12deaf37f6c3b4e59", - "sha256:5059f3ffab3648330548ea9c7403405bbfaf085b11235770825d14c58f24cb78" + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" ], "index": "pypi", - "version": "==1.8.5" + "version": "==2.25.1" }, "six": { "hashes": [ - "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", - "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "version": "==1.14.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.15.0" }, "snowballstemmer": { "hashes": [ @@ -864,67 +1099,27 @@ }, "toml": { "hashes": [ - "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", - "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "version": "==0.10.0" - }, - "typed-ast": { - "hashes": [ - "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", - "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", - "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", - "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", - "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", - "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", - "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", - "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", - "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", - "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", - "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", - "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", - "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", - "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", - "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", - "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", - "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", - "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", - "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", - "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", - "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" - ], - "markers": "python_version < '3.8'", - "version": "==1.4.1" - }, - "unittest-xml-reporting": { - "hashes": [ - "sha256:358bbdaf24a26d904cc1c26ef3078bca7fc81541e0a54c8961693cc96a6f35e0", - "sha256:9d28ddf6524cf0ff9293f61bd12e792de298f8561a5c945acea63fb437789e0e" - ], - "index": "pypi", - "version": "==2.5.2" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "version": "==0.10.2" }, "urllib3": { "hashes": [ - "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", - "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", + "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" ], - "index": "pypi", - "version": "==1.24.3" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.2" }, "virtualenv": { "hashes": [ - "sha256:531b142e300d405bb9faedad4adbeb82b4098b918e35209af2adef3129274aae", - "sha256:5dd42a9f56307542bddc446cfd10ef6576f11910366a07609fe8d0d88fa8fb7e" - ], - "version": "==20.0.5" - }, - "zipp": { - "hashes": [ - "sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2", - "sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a" + "sha256:54b05fc737ea9c9ee9f8340f579e5da5b09fb64fd010ab5757eb90268616907c", + "sha256:b7a8ec323ee02fb2312f098b6b4c9de99559b462775bc8fe3627a73706603c1b" ], - "version": "==3.0.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.2.2" } } } @@ -1,9 +1,10 @@ # Python Utility Bot -[](https://discord.gg/2B963hn) -[](https://dev.azure.com/python-discord/Python%20Discord/_build/latest?definitionId=1&branchName=master) -[](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master) -[](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master) +[![Discord][7]][8] +[![Lint & Test][1]][2] +[![Build][3]][4] +[![Deploy][5]][6] +[](https://coveralls.io/github/python-discord/bot) [](LICENSE) [](https://pythondiscord.com) @@ -11,3 +12,12 @@ This project is a Discord bot specifically for use with the Python Discord serve and other tools to help keep the server running like a well-oiled machine. Read the [Contributing Guide](https://pythondiscord.com/pages/contributing/bot/) on our website if you're interested in helping out. + +[1]: https://github.com/python-discord/bot/workflows/Lint%20&%20Test/badge.svg?branch=master +[2]: https://github.com/python-discord/bot/actions?query=workflow%3A%22Lint+%26+Test%22+branch%3Amaster +[3]: https://github.com/python-discord/bot/workflows/Build/badge.svg?branch=master +[4]: https://github.com/python-discord/bot/actions?query=workflow%3ABuild+branch%3Amaster +[5]: https://github.com/python-discord/bot/workflows/Deploy/badge.svg?branch=master +[6]: https://github.com/python-discord/bot/actions?query=workflow%3ADeploy+branch%3Amaster +[7]: https://img.shields.io/static/v1?label=Python%20Discord&logo=discord&message=%3E100k%20members&color=%237289DA&logoColor=white +[8]: https://discord.gg/2B963hn diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index 874364a6f..000000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,67 +0,0 @@ -# https://aka.ms/yaml - -variables: - PIPENV_HIDE_EMOJIS: 1 - PIPENV_IGNORE_VIRTUALENVS: 1 - PIPENV_NOSPIN: 1 - -jobs: - - job: test - displayName: 'Lint & Test' - pool: - vmImage: ubuntu-16.04 - - variables: - PIP_CACHE_DIR: ".cache/pip" - - steps: - - task: UsePythonVersion@0 - displayName: 'Set Python version' - inputs: - versionSpec: '3.7.x' - addToPath: true - - - script: pip install pipenv - displayName: 'Install pipenv' - - - script: pipenv install --dev --deploy --system - displayName: 'Install project using pipenv' - - - script: python -m flake8 - displayName: 'Run linter' - - - script: BOT_API_KEY=foo BOT_SENTRY_DSN=blah BOT_TOKEN=bar WOLFRAM_API_KEY=baz REDDIT_CLIENT_ID=spam REDDIT_SECRET=ham coverage run -m xmlrunner - displayName: Run tests - - - script: coverage report -m && coverage xml -o coverage.xml - displayName: Generate test coverage report - - - task: PublishCodeCoverageResults@1 - displayName: 'Publish Coverage Results' - condition: succeededOrFailed() - inputs: - codeCoverageTool: Cobertura - summaryFileLocation: coverage.xml - - - task: PublishTestResults@2 - condition: succeededOrFailed() - displayName: 'Publish Test Results' - inputs: - testResultsFiles: '**/TEST-*.xml' - testRunTitle: 'Bot Test Results' - - - job: build - displayName: 'Build & Push Container' - dependsOn: 'test' - condition: and(succeeded(), ne(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.SourceBranch'], 'refs/heads/master')) - - steps: - - task: Docker@2 - displayName: 'Build & Push Container' - inputs: - containerRegistry: 'DockerHub' - repository: 'pythondiscord/bot' - command: 'buildAndPush' - Dockerfile: 'Dockerfile' - buildContext: '.' - tags: 'latest' diff --git a/bot/__init__.py b/bot/__init__.py index 90ab3c348..8f880b8e6 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -1,45 +1,25 @@ -import logging +import asyncio import os -import sys -from logging import Logger, StreamHandler, handlers -from pathlib import Path +from functools import partial, partialmethod +from typing import TYPE_CHECKING -TRACE_LEVEL = logging.TRACE = 5 -logging.addLevelName(TRACE_LEVEL, "TRACE") +from discord.ext import commands +from bot import log +from bot.command import Command -def monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: - """ - Log 'msg % args' with severity 'TRACE'. +if TYPE_CHECKING: + from bot.bot import Bot - To pass exception information, use the keyword argument exc_info with - a true value, e.g. +log.setup() - logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) - """ - if self.isEnabledFor(TRACE_LEVEL): - self._log(TRACE_LEVEL, msg, args, **kwargs) +# On Windows, the selector event loop is required for aiodns. +if os.name == "nt": + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) +# Monkey-patch discord.py decorators to use the Command subclass which supports root aliases. +# Must be patched before any cogs are added. +commands.command = partial(commands.command, cls=Command) +commands.GroupMixin.command = partialmethod(commands.GroupMixin.command, cls=Command) -Logger.trace = monkeypatch_trace - -DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") - -log_format = logging.Formatter("%(asctime)s | %(name)s | %(levelname)s | %(message)s") - -stream_handler = StreamHandler(stream=sys.stdout) -stream_handler.setFormatter(log_format) - -log_file = Path("logs", "bot.log") -log_file.parent.mkdir(exist_ok=True) -file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7) -file_handler.setFormatter(log_format) - -root_log = logging.getLogger() -root_log.setLevel(TRACE_LEVEL if DEBUG_MODE else logging.INFO) -root_log.addHandler(stream_handler) -root_log.addHandler(file_handler) - -logging.getLogger("discord").setLevel(logging.WARNING) -logging.getLogger("websockets").setLevel(logging.WARNING) -logging.getLogger(__name__).setLevel(TRACE_LEVEL) +instance: "Bot" = None # Global Bot instance. diff --git a/bot/__main__.py b/bot/__main__.py index 490163739..257216fa7 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,73 +1,10 @@ -import logging - -import discord -import sentry_sdk -from discord.ext.commands import when_mentioned_or -from sentry_sdk.integrations.logging import LoggingIntegration - -from bot import patches +import bot +from bot import constants from bot.bot import Bot -from bot.constants import Bot as BotConfig, DEBUG_MODE - -sentry_logging = LoggingIntegration( - level=logging.TRACE, - event_level=logging.WARNING -) - -sentry_sdk.init( - dsn=BotConfig.sentry_dsn, - integrations=[sentry_logging] -) - -bot = Bot( - command_prefix=when_mentioned_or(BotConfig.prefix), - activity=discord.Game(name="Commands: !help"), - case_insensitive=True, - max_messages=10_000, -) - -# Internal/debug -bot.load_extension("bot.cogs.error_handler") -bot.load_extension("bot.cogs.filtering") -bot.load_extension("bot.cogs.logging") -bot.load_extension("bot.cogs.security") - -# Commands, etc -bot.load_extension("bot.cogs.antimalware") -bot.load_extension("bot.cogs.antispam") -bot.load_extension("bot.cogs.bot") -bot.load_extension("bot.cogs.clean") -bot.load_extension("bot.cogs.extensions") -bot.load_extension("bot.cogs.help") - -# Only load this in production -if not DEBUG_MODE: - bot.load_extension("bot.cogs.doc") - bot.load_extension("bot.cogs.verification") - -# Feature cogs -bot.load_extension("bot.cogs.alias") -bot.load_extension("bot.cogs.defcon") -bot.load_extension("bot.cogs.eval") -bot.load_extension("bot.cogs.duck_pond") -bot.load_extension("bot.cogs.free") -bot.load_extension("bot.cogs.information") -bot.load_extension("bot.cogs.jams") -bot.load_extension("bot.cogs.moderation") -bot.load_extension("bot.cogs.off_topic_names") -bot.load_extension("bot.cogs.reddit") -bot.load_extension("bot.cogs.reminders") -bot.load_extension("bot.cogs.site") -bot.load_extension("bot.cogs.snekbox") -bot.load_extension("bot.cogs.sync") -bot.load_extension("bot.cogs.tags") -bot.load_extension("bot.cogs.token_remover") -bot.load_extension("bot.cogs.utils") -bot.load_extension("bot.cogs.watchchannels") -bot.load_extension("bot.cogs.wolfram") +from bot.log import setup_sentry -# Apply `message_edited_at` patch if discord.py did not yet release a bug fix. -if not hasattr(discord.message.Message, '_handle_edited_timestamp'): - patches.message_edited_at.apply_patch() +setup_sentry() -bot.run(BotConfig.token) +bot.instance = Bot.create() +bot.instance.load_extensions() +bot.instance.run(constants.Bot.token) diff --git a/bot/api.py b/bot/api.py index fb126b384..d93f9f2ba 100644 --- a/bot/api.py +++ b/bot/api.py @@ -32,48 +32,32 @@ class ResponseCodeError(ValueError): class APIClient: """Django Site API wrapper.""" - def __init__(self, loop: asyncio.AbstractEventLoop, **kwargs): + # These are class attributes so they can be seen when being mocked for tests. + # See commit 22a55534ef13990815a6f69d361e2a12693075d5 for details. + session: Optional[aiohttp.ClientSession] = None + loop: asyncio.AbstractEventLoop = None + + def __init__(self, **session_kwargs): auth_headers = { 'Authorization': f"Token {Keys.site_api}" } - if 'headers' in kwargs: - kwargs['headers'].update(auth_headers) + if 'headers' in session_kwargs: + session_kwargs['headers'].update(auth_headers) else: - kwargs['headers'] = auth_headers - - self.session: Optional[aiohttp.ClientSession] = None - self.loop = loop - - self._ready = asyncio.Event(loop=loop) - self._creation_task = None - self._session_args = kwargs + session_kwargs['headers'] = auth_headers - self.recreate() + # aiohttp will complain if APIClient gets instantiated outside a coroutine. Thankfully, we + # don't and shouldn't need to do that, so we can avoid scheduling a task to create it. + self.session = aiohttp.ClientSession(**session_kwargs) @staticmethod def _url_for(endpoint: str) -> str: return f"{URLs.site_schema}{URLs.site_api}/{quote_url(endpoint)}" - async def _create_session(self) -> None: - """Create the aiohttp session and set the ready event.""" - self.session = aiohttp.ClientSession(**self._session_args) - self._ready.set() - async def close(self) -> None: - """Close the aiohttp session and unset the ready event.""" - if not self._ready.is_set(): - return - + """Close the aiohttp session.""" await self.session.close() - self._ready.clear() - - def recreate(self) -> None: - """Schedule the aiohttp session to be created if it's been closed.""" - if self.session is None or self.session.closed: - # Don't schedule a task if one is already in progress. - if self._creation_task is None or self._creation_task.done(): - self._creation_task = self.loop.create_task(self._create_session()) async def maybe_raise_for_status(self, response: aiohttp.ClientResponse, should_raise: bool) -> None: """Raise ResponseCodeError for non-OK response if an exception should be raised.""" @@ -85,59 +69,33 @@ class APIClient: response_text = await response.text() raise ResponseCodeError(response=response, response_text=response_text) - async def get(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict: - """Site API GET.""" - await self._ready.wait() - - async with self.session.get(self._url_for(endpoint), *args, **kwargs) as resp: + async def request(self, method: str, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict: + """Send an HTTP request to the site API and return the JSON response.""" + async with self.session.request(method.upper(), self._url_for(endpoint), **kwargs) as resp: await self.maybe_raise_for_status(resp, raise_for_status) return await resp.json() - async def patch(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict: - """Site API PATCH.""" - await self._ready.wait() + async def get(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict: + """Site API GET.""" + return await self.request("GET", endpoint, raise_for_status=raise_for_status, **kwargs) - async with self.session.patch(self._url_for(endpoint), *args, **kwargs) as resp: - await self.maybe_raise_for_status(resp, raise_for_status) - return await resp.json() + async def patch(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict: + """Site API PATCH.""" + return await self.request("PATCH", endpoint, raise_for_status=raise_for_status, **kwargs) - async def post(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict: + async def post(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict: """Site API POST.""" - await self._ready.wait() - - async with self.session.post(self._url_for(endpoint), *args, **kwargs) as resp: - await self.maybe_raise_for_status(resp, raise_for_status) - return await resp.json() + return await self.request("POST", endpoint, raise_for_status=raise_for_status, **kwargs) - async def put(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> dict: + async def put(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> dict: """Site API PUT.""" - await self._ready.wait() + return await self.request("PUT", endpoint, raise_for_status=raise_for_status, **kwargs) - async with self.session.put(self._url_for(endpoint), *args, **kwargs) as resp: - await self.maybe_raise_for_status(resp, raise_for_status) - return await resp.json() - - async def delete(self, endpoint: str, *args, raise_for_status: bool = True, **kwargs) -> Optional[dict]: + async def delete(self, endpoint: str, *, raise_for_status: bool = True, **kwargs) -> Optional[dict]: """Site API DELETE.""" - await self._ready.wait() - - async with self.session.delete(self._url_for(endpoint), *args, **kwargs) as resp: + async with self.session.delete(self._url_for(endpoint), **kwargs) as resp: if resp.status == 204: return None await self.maybe_raise_for_status(resp, raise_for_status) return await resp.json() - - -def loop_is_running() -> bool: - """ - Determine if there is a running asyncio event loop. - - This helps enable "call this when event loop is running" logic (see: Twisted's `callWhenRunning`), - which is currently not provided by asyncio. - """ - try: - asyncio.get_running_loop() - except RuntimeError: - return False - return True diff --git a/bot/async_stats.py b/bot/async_stats.py new file mode 100644 index 000000000..58a80f528 --- /dev/null +++ b/bot/async_stats.py @@ -0,0 +1,39 @@ +import asyncio +import socket + +from statsd.client.base import StatsClientBase + + +class AsyncStatsClient(StatsClientBase): + """An async transport method for statsd communication.""" + + def __init__( + self, + loop: asyncio.AbstractEventLoop, + host: str = 'localhost', + port: int = 8125, + prefix: str = None + ): + """Create a new client.""" + family, _, _, _, addr = socket.getaddrinfo( + host, port, socket.AF_INET, socket.SOCK_DGRAM)[0] + self._addr = addr + self._prefix = prefix + self._loop = loop + self._transport = None + + async def create_socket(self) -> None: + """Use the loop.create_datagram_endpoint method to create a socket.""" + self._transport, _ = await self._loop.create_datagram_endpoint( + asyncio.DatagramProtocol, + family=socket.AF_INET, + remote_addr=self._addr + ) + + def _send(self, data: str) -> None: + """Start an async task to send data to statsd.""" + self._loop.create_task(self._async_send(data)) + + async def _async_send(self, data: str) -> None: + """Send data to the statsd server using the async transport.""" + self._transport.sendto(data.encode('ascii'), self._addr) diff --git a/bot/bot.py b/bot/bot.py index cecee7b68..d5f108575 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -1,51 +1,322 @@ +import asyncio import logging import socket -from typing import Optional +import warnings +from collections import defaultdict +from contextlib import suppress +from typing import Dict, List, Optional import aiohttp +import discord +from async_rediscache import RedisSession from discord.ext import commands +from sentry_sdk import push_scope -from bot import api +from bot import api, constants +from bot.async_stats import AsyncStatsClient log = logging.getLogger('bot') +LOCALHOST = "127.0.0.1" class Bot(commands.Bot): """A subclass of `discord.ext.commands.Bot` with an aiohttp session and an API client.""" - def __init__(self, *args, **kwargs): - # Use asyncio for DNS resolution instead of threads so threads aren't spammed. - # Use AF_INET as its socket family to prevent HTTPS related problems both locally - # and in production. - self.connector = aiohttp.TCPConnector( - resolver=aiohttp.AsyncResolver(), - family=socket.AF_INET, - ) + def __init__(self, *args, redis_session: RedisSession, **kwargs): + if "connector" in kwargs: + warnings.warn( + "If login() is called (or the bot is started), the connector will be overwritten " + "with an internal one" + ) - super().__init__(*args, connector=self.connector, **kwargs) + super().__init__(*args, **kwargs) self.http_session: Optional[aiohttp.ClientSession] = None - self.api_client = api.APIClient(loop=self.loop, connector=self.connector) + self.redis_session = redis_session + self.api_client: Optional[api.APIClient] = None + self.filter_list_cache = defaultdict(dict) + + self._connector = None + self._resolver = None + self._statsd_timerhandle: asyncio.TimerHandle = None + self._guild_available = asyncio.Event() + + statsd_url = constants.Stats.statsd_host + + if constants.DEBUG_MODE: + # Since statsd is UDP, there are no errors for sending to a down port. + # For this reason, setting the statsd host to 127.0.0.1 for development + # will effectively disable stats. + statsd_url = LOCALHOST + + self.stats = AsyncStatsClient(self.loop, LOCALHOST) + self._connect_statsd(statsd_url) + + def _connect_statsd(self, statsd_url: str, retry_after: int = 2, attempt: int = 1) -> None: + """Callback used to retry a connection to statsd if it should fail.""" + if attempt >= 8: + log.error("Reached 8 attempts trying to reconnect AsyncStatsClient. Aborting") + return + + try: + self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot") + except socket.gaierror: + log.warning(f"Statsd client failed to connect (Attempt(s): {attempt})") + # Use a fallback strategy for retrying, up to 8 times. + self._statsd_timerhandle = self.loop.call_later( + retry_after, + self._connect_statsd, + statsd_url, + retry_after * 2, + attempt + 1 + ) + + # All tasks that need to block closing until finished + self.closing_tasks: List[asyncio.Task] = [] + + async def cache_filter_list_data(self) -> None: + """Cache all the data in the FilterList on the site.""" + full_cache = await self.api_client.get('bot/filter-lists') + + for item in full_cache: + self.insert_item_into_filter_list_cache(item) + + @classmethod + def create(cls) -> "Bot": + """Create and return an instance of a Bot.""" + loop = asyncio.get_event_loop() + allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES] + + intents = discord.Intents().all() + intents.presences = False + intents.dm_typing = False + intents.dm_reactions = False + intents.invites = False + intents.webhooks = False + intents.integrations = False + + return cls( + redis_session=_create_redis_session(loop), + loop=loop, + command_prefix=commands.when_mentioned_or(constants.Bot.prefix), + activity=discord.Game(name=f"Commands: {constants.Bot.prefix}help"), + case_insensitive=True, + max_messages=10_000, + allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles), + intents=intents, + ) + + def load_extensions(self) -> None: + """Load all enabled extensions.""" + # Must be done here to avoid a circular import. + from bot.utils.extensions import EXTENSIONS + + extensions = set(EXTENSIONS) # Create a mutable copy. + if not constants.HelpChannels.enable: + extensions.remove("bot.exts.help_channels") + + for extension in extensions: + self.load_extension(extension) def add_cog(self, cog: commands.Cog) -> None: """Adds a "cog" to the bot and logs the operation.""" super().add_cog(cog) log.info(f"Cog loaded: {cog.qualified_name}") + def add_command(self, command: commands.Command) -> None: + """Add `command` as normal and then add its root aliases to the bot.""" + super().add_command(command) + self._add_root_aliases(command) + + def remove_command(self, name: str) -> Optional[commands.Command]: + """ + Remove a command/alias as normal and then remove its root aliases from the bot. + + Individual root aliases cannot be removed by this function. + To remove them, either remove the entire command or manually edit `bot.all_commands`. + """ + command = super().remove_command(name) + if command is None: + # Even if it's a root alias, there's no way to get the Bot instance to remove the alias. + return + + self._remove_root_aliases(command) + return command + def clear(self) -> None: - """Clears the internal state of the bot and resets the API client.""" - super().clear() - self.api_client.recreate() + """Not implemented! Re-instantiate the bot instead of attempting to re-use a closed one.""" + raise NotImplementedError("Re-using a Bot object after closing it is not supported.") async def close(self) -> None: - """Close the aiohttp session after closing the Discord connection.""" + """Close the Discord connection and the aiohttp session, connector, statsd client, and resolver.""" + # Done before super().close() to allow tasks finish before the HTTP session closes. + for ext in list(self.extensions): + with suppress(Exception): + self.unload_extension(ext) + + for cog in list(self.cogs): + with suppress(Exception): + self.remove_cog(cog) + + # Wait until all tasks that have to be completed before bot is closing is done + log.trace("Waiting for tasks before closing.") + await asyncio.gather(*self.closing_tasks) + + # Now actually do full close of bot await super().close() - await self.http_session.close() - await self.api_client.close() + if self.api_client: + await self.api_client.close() + + if self.http_session: + await self.http_session.close() + + if self._connector: + await self._connector.close() + + if self._resolver: + await self._resolver.close() + + if self.stats._transport: + self.stats._transport.close() + + if self.redis_session: + await self.redis_session.close() + + if self._statsd_timerhandle: + self._statsd_timerhandle.cancel() + + def insert_item_into_filter_list_cache(self, item: Dict[str, str]) -> None: + """Add an item to the bots filter_list_cache.""" + type_ = item["type"] + allowed = item["allowed"] + content = item["content"] + + self.filter_list_cache[f"{type_}.{allowed}"][content] = { + "id": item["id"], + "comment": item["comment"], + "created_at": item["created_at"], + "updated_at": item["updated_at"], + } + + async def login(self, *args, **kwargs) -> None: + """Re-create the connector and set up sessions before logging into Discord.""" + # Use asyncio for DNS resolution instead of threads so threads aren't spammed. + self._resolver = aiohttp.AsyncResolver() + + # Use AF_INET as its socket family to prevent HTTPS related problems both locally + # and in production. + self._connector = aiohttp.TCPConnector( + resolver=self._resolver, + family=socket.AF_INET, + ) + + # Client.login() will call HTTPClient.static_login() which will create a session using + # this connector attribute. + self.http.connector = self._connector + + self.http_session = aiohttp.ClientSession(connector=self._connector) + self.api_client = api.APIClient(connector=self._connector) + + if self.redis_session.closed: + # If the RedisSession was somehow closed, we try to reconnect it + # here. Normally, this shouldn't happen. + await self.redis_session.connect() + + # Build the FilterList cache + await self.cache_filter_list_data() + + await self.stats.create_socket() + await super().login(*args, **kwargs) + + async def on_guild_available(self, guild: discord.Guild) -> None: + """ + Set the internal guild available event when constants.Guild.id becomes available. + + If the cache appears to still be empty (no members, no channels, or no roles), the event + will not be set. + """ + if guild.id != constants.Guild.id: + return + + if not guild.roles or not guild.members or not guild.channels: + msg = "Guild available event was dispatched but the cache appears to still be empty!" + log.warning(msg) + + try: + webhook = await self.fetch_webhook(constants.Webhooks.dev_log) + except discord.HTTPException as e: + log.error(f"Failed to fetch webhook to send empty cache warning: status {e.status}") + else: + await webhook.send(f"<@&{constants.Roles.admin}> {msg}") + + return + + self._guild_available.set() + + async def on_guild_unavailable(self, guild: discord.Guild) -> None: + """Clear the internal guild available event when constants.Guild.id becomes unavailable.""" + if guild.id != constants.Guild.id: + return + + self._guild_available.clear() + + async def wait_until_guild_available(self) -> None: + """ + Wait until the constants.Guild.id guild is available (and the cache is ready). + + The on_ready event is inadequate because it only waits 2 seconds for a GUILD_CREATE + gateway event before giving up and thus not populating the cache for unavailable guilds. + """ + await self._guild_available.wait() + + async def on_error(self, event: str, *args, **kwargs) -> None: + """Log errors raised in event listeners rather than printing them to stderr.""" + self.stats.incr(f"errors.event.{event}") + + with push_scope() as scope: + scope.set_tag("event", event) + scope.set_extra("args", args) + scope.set_extra("kwargs", kwargs) + + log.exception(f"Unhandled exception in {event}.") + + def _add_root_aliases(self, command: commands.Command) -> None: + """Recursively add root aliases for `command` and any of its subcommands.""" + if isinstance(command, commands.Group): + for subcommand in command.commands: + self._add_root_aliases(subcommand) + + for alias in getattr(command, "root_aliases", ()): + if alias in self.all_commands: + raise commands.CommandRegistrationError(alias, alias_conflict=True) + + self.all_commands[alias] = command + + def _remove_root_aliases(self, command: commands.Command) -> None: + """Recursively remove root aliases for `command` and any of its subcommands.""" + if isinstance(command, commands.Group): + for subcommand in command.commands: + self._remove_root_aliases(subcommand) + + for alias in getattr(command, "root_aliases", ()): + self.all_commands.pop(alias, None) + - async def start(self, *args, **kwargs) -> None: - """Open an aiohttp session before logging in and connecting to Discord.""" - self.http_session = aiohttp.ClientSession(connector=self.connector) +def _create_redis_session(loop: asyncio.AbstractEventLoop) -> RedisSession: + """ + Create and connect to a redis session. - await super().start(*args, **kwargs) + Ensure the connection is established before returning to prevent race conditions. + `loop` is the event loop on which to connect. The Bot should use this same event loop. + """ + redis_session = RedisSession( + address=(constants.Redis.host, constants.Redis.port), + password=constants.Redis.password, + minsize=1, + maxsize=20, + use_fakeredis=constants.Redis.use_fakeredis, + global_namespace="bot", + ) + loop.run_until_complete(redis_session.connect()) + return redis_session diff --git a/bot/cogs/alias.py b/bot/cogs/alias.py deleted file mode 100644 index 0b800575f..000000000 --- a/bot/cogs/alias.py +++ /dev/null @@ -1,153 +0,0 @@ -import inspect -import logging - -from discord import Colour, Embed -from discord.ext.commands import ( - Cog, Command, Context, Greedy, - clean_content, command, group, -) - -from bot.bot import Bot -from bot.cogs.extensions import Extension -from bot.converters import FetchedMember, TagNameConverter -from bot.pagination import LinePaginator - -log = logging.getLogger(__name__) - - -class Alias (Cog): - """Aliases for commonly used commands.""" - - def __init__(self, bot: Bot): - self.bot = bot - - async def invoke(self, ctx: Context, cmd_name: str, *args, **kwargs) -> None: - """Invokes a command with args and kwargs.""" - log.debug(f"{cmd_name} was invoked through an alias") - cmd = self.bot.get_command(cmd_name) - if not cmd: - return log.warning(f'Did not find command "{cmd_name}" to invoke.') - elif not await cmd.can_run(ctx): - return log.warning( - f'{str(ctx.author)} tried to run the command "{cmd_name}"' - ) - - await ctx.invoke(cmd, *args, **kwargs) - - @command(name='aliases') - async def aliases_command(self, ctx: Context) -> None: - """Show configured aliases on the bot.""" - embed = Embed( - title='Configured aliases', - colour=Colour.blue() - ) - await LinePaginator.paginate( - ( - f"• `{ctx.prefix}{value.name}` " - f"=> `{ctx.prefix}{name[:-len('_alias')].replace('_', ' ')}`" - for name, value in inspect.getmembers(self) - if isinstance(value, Command) and name.endswith('_alias') - ), - ctx, embed, empty=False, max_lines=20 - ) - - @command(name="resources", aliases=("resource",), hidden=True) - async def site_resources_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>site resources.""" - await self.invoke(ctx, "site resources") - - @command(name="tools", hidden=True) - async def site_tools_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>site tools.""" - await self.invoke(ctx, "site tools") - - @command(name="watch", hidden=True) - async def bigbrother_watch_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: - """Alias for invoking <prefix>bigbrother watch [user] [reason].""" - await self.invoke(ctx, "bigbrother watch", user, reason=reason) - - @command(name="unwatch", hidden=True) - async def bigbrother_unwatch_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: - """Alias for invoking <prefix>bigbrother unwatch [user] [reason].""" - await self.invoke(ctx, "bigbrother unwatch", user, reason=reason) - - @command(name="home", hidden=True) - async def site_home_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>site home.""" - await self.invoke(ctx, "site home") - - @command(name="faq", hidden=True) - async def site_faq_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>site faq.""" - await self.invoke(ctx, "site faq") - - @command(name="rules", aliases=("rule",), hidden=True) - async def site_rules_alias(self, ctx: Context, rules: Greedy[int], *_: str) -> None: - """Alias for invoking <prefix>site rules.""" - await self.invoke(ctx, "site rules", *rules) - - @command(name="reload", hidden=True) - async def extensions_reload_alias(self, ctx: Context, *extensions: Extension) -> None: - """Alias for invoking <prefix>extensions reload [extensions...].""" - await self.invoke(ctx, "extensions reload", *extensions) - - @command(name="defon", hidden=True) - async def defcon_enable_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>defcon enable.""" - await self.invoke(ctx, "defcon enable") - - @command(name="defoff", hidden=True) - async def defcon_disable_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>defcon disable.""" - await self.invoke(ctx, "defcon disable") - - @command(name="exception", hidden=True) - async def tags_get_traceback_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>tags get traceback.""" - await self.invoke(ctx, "tags get", tag_name="traceback") - - @group(name="get", - aliases=("show", "g"), - hidden=True, - invoke_without_command=True) - async def get_group_alias(self, ctx: Context) -> None: - """Group for reverse aliases for commands like `tags get`, allowing for `get tags` or `get docs`.""" - pass - - @get_group_alias.command(name="tags", aliases=("tag", "t"), hidden=True) - async def tags_get_alias( - self, ctx: Context, *, tag_name: TagNameConverter = None - ) -> None: - """ - Alias for invoking <prefix>tags get [tag_name]. - - tag_name: str - tag to be viewed. - """ - await self.invoke(ctx, "tags get", tag_name=tag_name) - - @get_group_alias.command(name="docs", aliases=("doc", "d"), hidden=True) - async def docs_get_alias( - self, ctx: Context, symbol: clean_content = None - ) -> None: - """Alias for invoking <prefix>docs get [symbol].""" - await self.invoke(ctx, "docs get", symbol) - - @command(name="nominate", hidden=True) - async def nomination_add_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: - """Alias for invoking <prefix>talentpool add [user] [reason].""" - await self.invoke(ctx, "talentpool add", user, reason=reason) - - @command(name="unnominate", hidden=True) - async def nomination_end_alias(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: - """Alias for invoking <prefix>nomination end [user] [reason].""" - await self.invoke(ctx, "nomination end", user, reason=reason) - - @command(name="nominees", hidden=True) - async def nominees_alias(self, ctx: Context) -> None: - """Alias for invoking <prefix>tp watched.""" - await self.invoke(ctx, "talentpool watched") - - -def setup(bot: Bot) -> None: - """Load the Alias cog.""" - bot.add_cog(Alias(bot)) diff --git a/bot/cogs/antimalware.py b/bot/cogs/antimalware.py deleted file mode 100644 index 28e3e5d96..000000000 --- a/bot/cogs/antimalware.py +++ /dev/null @@ -1,54 +0,0 @@ -import logging - -from discord import Embed, Message, NotFound -from discord.ext.commands import Cog - -from bot.bot import Bot -from bot.constants import AntiMalware as AntiMalwareConfig, Channels, URLs - -log = logging.getLogger(__name__) - - -class AntiMalware(Cog): - """Delete messages which contain attachments with non-whitelisted file extensions.""" - - def __init__(self, bot: Bot): - self.bot = bot - - @Cog.listener() - async def on_message(self, message: Message) -> None: - """Identify messages with prohibited attachments.""" - if not message.attachments: - return - - embed = Embed() - for attachment in message.attachments: - filename = attachment.filename.lower() - if filename.endswith('.py'): - embed.description = ( - f"It looks like you tried to attach a Python file - please " - f"use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}" - ) - break # Other detections irrelevant because we prioritize the .py message. - if not filename.endswith(tuple(AntiMalwareConfig.whitelist)): - whitelisted_types = ', '.join(AntiMalwareConfig.whitelist) - meta_channel = self.bot.get_channel(Channels.meta) - embed.description = ( - f"It looks like you tried to attach a file type that we " - f"do not allow. We currently allow the following file " - f"types: **{whitelisted_types}**. \n\n Feel free to ask " - f"in {meta_channel.mention} if you think this is a mistake." - ) - if embed.description: - await message.channel.send(f"Hey {message.author.mention}!", embed=embed) - - # Delete the offending message: - try: - await message.delete() - except NotFound: - log.info(f"Tried to delete message `{message.id}`, but message could not be found.") - - -def setup(bot: Bot) -> None: - """Load the AntiMalware cog.""" - bot.add_cog(AntiMalware(bot)) diff --git a/bot/cogs/bot.py b/bot/cogs/bot.py deleted file mode 100644 index 73b1e8f41..000000000 --- a/bot/cogs/bot.py +++ /dev/null @@ -1,384 +0,0 @@ -import ast -import logging -import re -import time -from typing import Optional, Tuple - -from discord import Embed, Message, RawMessageUpdateEvent, TextChannel -from discord.ext.commands import Cog, Context, command, group - -from bot.bot import Bot -from bot.cogs.token_remover import TokenRemover -from bot.constants import Channels, DEBUG_MODE, Guild, MODERATION_ROLES, Roles, URLs -from bot.decorators import with_role -from bot.utils.messages import wait_for_deletion - -log = logging.getLogger(__name__) - -RE_MARKDOWN = re.compile(r'([*_~`|>])') - - -class BotCog(Cog, name="Bot"): - """Bot information commands.""" - - def __init__(self, bot: Bot): - self.bot = bot - - # Stores allowed channels plus epoch time since last call. - self.channel_cooldowns = { - Channels.help_0: 0, - Channels.help_1: 0, - Channels.help_2: 0, - Channels.help_3: 0, - Channels.help_4: 0, - Channels.help_5: 0, - Channels.help_6: 0, - Channels.help_7: 0, - Channels.python: 0, - } - - # These channels will also work, but will not be subject to cooldown - self.channel_whitelist = ( - Channels.bot, - Channels.devtest, - ) - - # Stores improperly formatted Python codeblock message ids and the corresponding bot message - self.codeblock_message_ids = {} - - @group(invoke_without_command=True, name="bot", hidden=True) - @with_role(Roles.verified) - async def botinfo_group(self, ctx: Context) -> None: - """Bot informational commands.""" - await ctx.invoke(self.bot.get_command("help"), "bot") - - @botinfo_group.command(name='about', aliases=('info',), hidden=True) - @with_role(Roles.verified) - async def about_command(self, ctx: Context) -> None: - """Get information about the bot.""" - embed = Embed( - description="A utility bot designed just for the Python server! Try `!help` for more info.", - url="https://github.com/python-discord/bot" - ) - - embed.add_field(name="Total Users", value=str(len(self.bot.get_guild(Guild.id).members))) - embed.set_author( - name="Python Bot", - url="https://github.com/python-discord/bot", - icon_url=URLs.bot_avatar - ) - - log.info(f"{ctx.author} called !about. Returning information about the bot.") - await ctx.send(embed=embed) - - @command(name='echo', aliases=('print',)) - @with_role(*MODERATION_ROLES) - async def echo_command(self, ctx: Context, channel: Optional[TextChannel], *, text: str) -> None: - """Repeat the given message in either a specified channel or the current channel.""" - if channel is None: - await ctx.send(text) - else: - await channel.send(text) - - @command(name='embed') - @with_role(*MODERATION_ROLES) - async def embed_command(self, ctx: Context, *, text: str) -> None: - """Send the input within an embed to the current channel.""" - embed = Embed(description=text) - await ctx.send(embed=embed) - - def codeblock_stripping(self, msg: str, bad_ticks: bool) -> Optional[Tuple[Tuple[str, ...], str]]: - """ - Strip msg in order to find Python code. - - Tries to strip out Python code out of msg and returns the stripped block or - None if the block is a valid Python codeblock. - """ - if msg.count("\n") >= 3: - # Filtering valid Python codeblocks and exiting if a valid Python codeblock is found. - if re.search("```(?:py|python)\n(.*?)```", msg, re.IGNORECASE | re.DOTALL) and not bad_ticks: - log.trace( - "Someone wrote a message that was already a " - "valid Python syntax highlighted code block. No action taken." - ) - return None - - else: - # Stripping backticks from every line of the message. - log.trace(f"Stripping backticks from message.\n\n{msg}\n\n") - content = "" - for line in msg.splitlines(keepends=True): - content += line.strip("`") - - content = content.strip() - - # Remove "Python" or "Py" from start of the message if it exists. - log.trace(f"Removing 'py' or 'python' from message.\n\n{content}\n\n") - pycode = False - if content.lower().startswith("python"): - content = content[6:] - pycode = True - elif content.lower().startswith("py"): - content = content[2:] - pycode = True - - if pycode: - content = content.splitlines(keepends=True) - - # Check if there might be code in the first line, and preserve it. - first_line = content[0] - if " " in content[0]: - first_space = first_line.index(" ") - content[0] = first_line[first_space:] - content = "".join(content) - - # If there's no code we can just get rid of the first line. - else: - content = "".join(content[1:]) - - # Strip it again to remove any leading whitespace. This is neccessary - # if the first line of the message looked like ```python <code> - old = content.strip() - - # Strips REPL code out of the message if there is any. - content, repl_code = self.repl_stripping(old) - if old != content: - return (content, old), repl_code - - # Try to apply indentation fixes to the code. - content = self.fix_indentation(content) - - # Check if the code contains backticks, if it does ignore the message. - if "`" in content: - log.trace("Detected ` inside the code, won't reply") - return None - else: - log.trace(f"Returning message.\n\n{content}\n\n") - return (content,), repl_code - - def fix_indentation(self, msg: str) -> str: - """Attempts to fix badly indented code.""" - def unindent(code: str, skip_spaces: int = 0) -> str: - """Unindents all code down to the number of spaces given in skip_spaces.""" - final = "" - current = code[0] - leading_spaces = 0 - - # Get numbers of spaces before code in the first line. - while current == " ": - current = code[leading_spaces + 1] - leading_spaces += 1 - leading_spaces -= skip_spaces - - # If there are any, remove that number of spaces from every line. - if leading_spaces > 0: - for line in code.splitlines(keepends=True): - line = line[leading_spaces:] - final += line - return final - else: - return code - - # Apply fix for "all lines are overindented" case. - msg = unindent(msg) - - # If the first line does not end with a colon, we can be - # certain the next line will be on the same indentation level. - # - # If it does end with a colon, we will need to indent all successive - # lines one additional level. - first_line = msg.splitlines()[0] - code = "".join(msg.splitlines(keepends=True)[1:]) - if not first_line.endswith(":"): - msg = f"{first_line}\n{unindent(code)}" - else: - msg = f"{first_line}\n{unindent(code, 4)}" - return msg - - def repl_stripping(self, msg: str) -> Tuple[str, bool]: - """ - Strip msg in order to extract Python code out of REPL output. - - Tries to strip out REPL Python code out of msg and returns the stripped msg. - - Returns True for the boolean if REPL code was found in the input msg. - """ - final = "" - for line in msg.splitlines(keepends=True): - if line.startswith(">>>") or line.startswith("..."): - final += line[4:] - log.trace(f"Formatted: \n\n{msg}\n\n to \n\n{final}\n\n") - if not final: - log.trace(f"Found no REPL code in \n\n{msg}\n\n") - return msg, False - else: - log.trace(f"Found REPL code in \n\n{msg}\n\n") - return final.rstrip(), True - - def has_bad_ticks(self, msg: Message) -> bool: - """Check to see if msg contains ticks that aren't '`'.""" - not_backticks = [ - "'''", '"""', "\u00b4\u00b4\u00b4", "\u2018\u2018\u2018", "\u2019\u2019\u2019", - "\u2032\u2032\u2032", "\u201c\u201c\u201c", "\u201d\u201d\u201d", "\u2033\u2033\u2033", - "\u3003\u3003\u3003" - ] - - return msg.content[:3] in not_backticks - - @Cog.listener() - async def on_message(self, msg: Message) -> None: - """ - Detect poorly formatted Python code in new messages. - - If poorly formatted code is detected, send the user a helpful message explaining how to do - properly formatted Python syntax highlighting codeblocks. - """ - parse_codeblock = ( - ( - msg.channel.id in self.channel_cooldowns - or msg.channel.id in self.channel_whitelist - ) - and not msg.author.bot - and len(msg.content.splitlines()) > 3 - and not TokenRemover.is_token_in_message(msg) - ) - - if parse_codeblock: # no token in the msg - on_cooldown = (time.time() - self.channel_cooldowns.get(msg.channel.id, 0)) < 300 - if not on_cooldown or DEBUG_MODE: - try: - if self.has_bad_ticks(msg): - ticks = msg.content[:3] - content = self.codeblock_stripping(f"```{msg.content[3:-3]}```", True) - if content is None: - return - - content, repl_code = content - - if len(content) == 2: - content = content[1] - else: - content = content[0] - - space_left = 204 - if len(content) >= space_left: - current_length = 0 - lines_walked = 0 - for line in content.splitlines(keepends=True): - if current_length + len(line) > space_left or lines_walked == 10: - break - current_length += len(line) - lines_walked += 1 - content = content[:current_length] + "#..." - content_escaped_markdown = RE_MARKDOWN.sub(r'\\\1', content) - howto = ( - "It looks like you are trying to paste code into this channel.\n\n" - "You seem to be using the wrong symbols to indicate where the codeblock should start. " - f"The correct symbols would be \\`\\`\\`, not `{ticks}`.\n\n" - "**Here is an example of how it should look:**\n" - f"\\`\\`\\`python\n{content_escaped_markdown}\n\\`\\`\\`\n\n" - "**This will result in the following:**\n" - f"```python\n{content}\n```" - ) - - else: - howto = "" - content = self.codeblock_stripping(msg.content, False) - if content is None: - return - - content, repl_code = content - # Attempts to parse the message into an AST node. - # Invalid Python code will raise a SyntaxError. - tree = ast.parse(content[0]) - - # Multiple lines of single words could be interpreted as expressions. - # This check is to avoid all nodes being parsed as expressions. - # (e.g. words over multiple lines) - if not all(isinstance(node, ast.Expr) for node in tree.body) or repl_code: - # Shorten the code to 10 lines and/or 204 characters. - space_left = 204 - if content and repl_code: - content = content[1] - else: - content = content[0] - - if len(content) >= space_left: - current_length = 0 - lines_walked = 0 - for line in content.splitlines(keepends=True): - if current_length + len(line) > space_left or lines_walked == 10: - break - current_length += len(line) - lines_walked += 1 - content = content[:current_length] + "#..." - - content_escaped_markdown = RE_MARKDOWN.sub(r'\\\1', content) - howto += ( - "It looks like you're trying to paste code into this channel.\n\n" - "Discord has support for Markdown, which allows you to post code with full " - "syntax highlighting. Please use these whenever you paste code, as this " - "helps improve the legibility and makes it easier for us to help you.\n\n" - f"**To do this, use the following method:**\n" - f"\\`\\`\\`python\n{content_escaped_markdown}\n\\`\\`\\`\n\n" - "**This will result in the following:**\n" - f"```python\n{content}\n```" - ) - - log.debug(f"{msg.author} posted something that needed to be put inside python code " - "blocks. Sending the user some instructions.") - else: - log.trace("The code consists only of expressions, not sending instructions") - - if howto != "": - howto_embed = Embed(description=howto) - bot_message = await msg.channel.send(f"Hey {msg.author.mention}!", embed=howto_embed) - self.codeblock_message_ids[msg.id] = bot_message.id - - self.bot.loop.create_task( - wait_for_deletion(bot_message, user_ids=(msg.author.id,), client=self.bot) - ) - else: - return - - if msg.channel.id not in self.channel_whitelist: - self.channel_cooldowns[msg.channel.id] = time.time() - - except SyntaxError: - log.trace( - f"{msg.author} posted in a help channel, and when we tried to parse it as Python code, " - "ast.parse raised a SyntaxError. This probably just means it wasn't Python code. " - f"The message that was posted was:\n\n{msg.content}\n\n" - ) - - @Cog.listener() - async def on_raw_message_edit(self, payload: RawMessageUpdateEvent) -> None: - """Check to see if an edited message (previously called out) still contains poorly formatted code.""" - if ( - # Checks to see if the message was called out by the bot - payload.message_id not in self.codeblock_message_ids - # Makes sure that there is content in the message - or payload.data.get("content") is None - # Makes sure there's a channel id in the message payload - or payload.data.get("channel_id") is None - ): - return - - # Retrieve channel and message objects for use later - channel = self.bot.get_channel(int(payload.data.get("channel_id"))) - user_message = await channel.fetch_message(payload.message_id) - - # Checks to see if the user has corrected their codeblock. If it's fixed, has_fixed_codeblock will be None - has_fixed_codeblock = self.codeblock_stripping(payload.data.get("content"), self.has_bad_ticks(user_message)) - - # If the message is fixed, delete the bot message and the entry from the id dictionary - if has_fixed_codeblock is None: - bot_message = await channel.fetch_message(self.codeblock_message_ids[payload.message_id]) - await bot_message.delete() - del self.codeblock_message_ids[payload.message_id] - log.trace("User's incorrect code block has been fixed. Removing bot formatting message.") - - -def setup(bot: Bot) -> None: - """Load the Bot cog.""" - bot.add_cog(BotCog(bot)) diff --git a/bot/cogs/error_handler.py b/bot/cogs/error_handler.py deleted file mode 100644 index dab3102cf..000000000 --- a/bot/cogs/error_handler.py +++ /dev/null @@ -1,206 +0,0 @@ -import difflib -import logging - -from discord import Embed -from discord.ext.commands import ( - BadArgument, - BotMissingPermissions, - CheckFailure, - CommandError, - CommandInvokeError, - CommandNotFound, - CommandOnCooldown, - DisabledCommand, - MissingPermissions, - NoPrivateMessage, - UserInputError, -) -from discord.ext.commands import Cog, Context -from sentry_sdk import push_scope - -from bot.api import ResponseCodeError -from bot.bot import Bot -from bot.constants import Channels, Icons -from bot.decorators import InChannelCheckFailure - -log = logging.getLogger(__name__) - - -class ErrorHandler(Cog): - """Handles errors emitted from commands.""" - - def __init__(self, bot: Bot): - self.bot = bot - - @Cog.listener() - async def on_command_error(self, ctx: Context, e: CommandError) -> None: - """ - Provide generic command error handling. - - Error handling is deferred to any local error handler, if present. - - Error handling emits a single error response, prioritized as follows: - 1. If the name fails to match a command but matches a tag, the tag is invoked - 2. Send a BadArgument error message to the invoking context & invoke the command's help - 3. Send a UserInputError error message to the invoking context & invoke the command's help - 4. Send a NoPrivateMessage error message to the invoking context - 5. Send a BotMissingPermissions error message to the invoking context - 6. Log a MissingPermissions error, no message is sent - 7. Send a InChannelCheckFailure error message to the invoking context - 8. Log CheckFailure, CommandOnCooldown, and DisabledCommand errors, no message is sent - 9. For CommandInvokeErrors, response is based on the type of error: - * 404: Error message is sent to the invoking context - * 400: Log the resopnse JSON, no message is sent - * 500 <= status <= 600: Error message is sent to the invoking context - 10. Otherwise, handling is deferred to `handle_unexpected_error` - """ - command = ctx.command - parent = None - - if command is not None: - parent = command.parent - - # Retrieve the help command for the invoked command. - if parent and command: - help_command = (self.bot.get_command("help"), parent.name, command.name) - elif command: - help_command = (self.bot.get_command("help"), command.name) - else: - help_command = (self.bot.get_command("help"),) - - if hasattr(e, "handled"): - log.trace(f"Command {command} had its error already handled locally; ignoring.") - return - - # Try to look for a tag with the command's name if the command isn't found. - if isinstance(e, CommandNotFound) and not hasattr(ctx, "invoked_from_error_handler"): - if not ctx.channel.id == Channels.verification: - tags_cog = self.bot.get_cog("Tags") - tags_get_command = self.bot.get_command("tags get") - if not tags_cog or not tags_get_command: - return - - ctx.invoked_from_error_handler = True - command_name = ctx.invoked_with - log_msg = "Cancelling attempt to fall back to a tag due to failed checks." - try: - if not await tags_get_command.can_run(ctx): - log.debug(log_msg) - return - except CommandError as tag_error: - log.debug(log_msg) - await self.on_command_error(ctx, tag_error) - return - - sent = await tags_cog.display_tag(ctx, command_name) - if sent: - return - - # No similar tag found, or tag on cooldown - - # searching for a similar command - raw_commands = [] - for cmd in self.bot.walk_commands(): - if not cmd.hidden: - raw_commands += (cmd.name, *cmd.aliases) - similar_command_data = difflib.get_close_matches(command_name, raw_commands, 1) - similar_command_name = similar_command_data[0] - similar_command = self.bot.get_command(similar_command_name) - - log_msg = "Cancelling attempt to suggest a command due to failed checks." - try: - if not await similar_command.can_run(ctx): - log.debug(log_msg) - return - except CommandError as cmd_error: - log.debug(log_msg) - await self.on_command_error(ctx, cmd_error) - return - - misspelled_content = ctx.message.content - e = Embed() - e.set_author(name="Did you mean:", icon_url=Icons.questionmark) - e.description = f"{misspelled_content.replace(command_name, similar_command_name, 1)}" - await ctx.send(embed=e, delete_after=7.0) - - elif isinstance(e, BadArgument): - await ctx.send(f"Bad argument: {e}\n") - await ctx.invoke(*help_command) - elif isinstance(e, UserInputError): - await ctx.send("Something about your input seems off. Check the arguments:") - await ctx.invoke(*help_command) - log.debug( - f"Command {command} invoked by {ctx.message.author} with error " - f"{e.__class__.__name__}: {e}" - ) - elif isinstance(e, NoPrivateMessage): - await ctx.send("Sorry, this command can't be used in a private message!") - elif isinstance(e, BotMissingPermissions): - await ctx.send(f"Sorry, it looks like I don't have the permissions I need to do that.") - log.warning( - f"The bot is missing permissions to execute command {command}: {e.missing_perms}" - ) - elif isinstance(e, MissingPermissions): - log.debug( - f"{ctx.message.author} is missing permissions to invoke command {command}: " - f"{e.missing_perms}" - ) - elif isinstance(e, InChannelCheckFailure): - await ctx.send(e) - elif isinstance(e, (CheckFailure, CommandOnCooldown, DisabledCommand)): - log.debug( - f"Command {command} invoked by {ctx.message.author} with error " - f"{e.__class__.__name__}: {e}" - ) - elif isinstance(e, CommandInvokeError): - if isinstance(e.original, ResponseCodeError): - status = e.original.response.status - - if status == 404: - await ctx.send("There does not seem to be anything matching your query.") - elif status == 400: - content = await e.original.response.json() - log.debug(f"API responded with 400 for command {command}: %r.", content) - await ctx.send("According to the API, your request is malformed.") - elif 500 <= status < 600: - await ctx.send("Sorry, there seems to be an internal issue with the API.") - log.warning(f"API responded with {status} for command {command}") - else: - await ctx.send(f"Got an unexpected status code from the API (`{status}`).") - log.warning(f"Unexpected API response for command {command}: {status}") - else: - await self.handle_unexpected_error(ctx, e.original) - else: - await self.handle_unexpected_error(ctx, e) - - @staticmethod - async def handle_unexpected_error(ctx: Context, e: CommandError) -> None: - """Generic handler for errors without an explicit handler.""" - await ctx.send( - f"Sorry, an unexpected error occurred. Please let us know!\n\n" - f"```{e.__class__.__name__}: {e}```" - ) - - with push_scope() as scope: - scope.user = { - "id": ctx.author.id, - "username": str(ctx.author) - } - - scope.set_tag("command", ctx.command.qualified_name) - scope.set_tag("message_id", ctx.message.id) - scope.set_tag("channel_id", ctx.channel.id) - - scope.set_extra("full_message", ctx.message.content) - - if ctx.guild is not None: - scope.set_extra( - "jump_to", - f"https://discordapp.com/channels/{ctx.guild.id}/{ctx.channel.id}/{ctx.message.id}" - ) - - log.error(f"Error executing command invoked by {ctx.message.author}: {ctx.message.content}", exc_info=e) - - -def setup(bot: Bot) -> None: - """Load the ErrorHandler cog.""" - bot.add_cog(ErrorHandler(bot)) diff --git a/bot/cogs/filtering.py b/bot/cogs/filtering.py deleted file mode 100644 index 74538542a..000000000 --- a/bot/cogs/filtering.py +++ /dev/null @@ -1,375 +0,0 @@ -import logging -import re -from typing import Optional, Union - -import discord.errors -from dateutil.relativedelta import relativedelta -from discord import Colour, DMChannel, Member, Message, TextChannel -from discord.ext.commands import Cog - -from bot.bot import Bot -from bot.cogs.moderation import ModLog -from bot.constants import ( - Channels, Colours, - Filter, Icons, URLs -) - -log = logging.getLogger(__name__) - -INVITE_RE = re.compile( - r"(?:discord(?:[\.,]|dot)gg|" # Could be discord.gg/ - r"discord(?:[\.,]|dot)com(?:\/|slash)invite|" # or discord.com/invite/ - r"discordapp(?:[\.,]|dot)com(?:\/|slash)invite|" # or discordapp.com/invite/ - r"discord(?:[\.,]|dot)me|" # or discord.me - r"discord(?:[\.,]|dot)io" # or discord.io. - r")(?:[\/]|slash)" # / or 'slash' - r"([a-zA-Z0-9]+)", # the invite code itself - flags=re.IGNORECASE -) - -URL_RE = re.compile(r"(https?://[^\s]+)", flags=re.IGNORECASE) -ZALGO_RE = re.compile(r"[\u0300-\u036F\u0489]") - -WORD_WATCHLIST_PATTERNS = [ - re.compile(fr'\b{expression}\b', flags=re.IGNORECASE) for expression in Filter.word_watchlist -] -TOKEN_WATCHLIST_PATTERNS = [ - re.compile(fr'{expression}', flags=re.IGNORECASE) for expression in Filter.token_watchlist -] - - -class Filtering(Cog): - """Filtering out invites, blacklisting domains, and warning us of certain regular expressions.""" - - def __init__(self, bot: Bot): - self.bot = bot - - staff_mistake_str = "If you believe this was a mistake, please let staff know!" - self.filters = { - "filter_zalgo": { - "enabled": Filter.filter_zalgo, - "function": self._has_zalgo, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_zalgo, - "notification_msg": ( - "Your post has been removed for abusing Unicode character rendering (aka Zalgo text). " - f"{staff_mistake_str}" - ) - }, - "filter_invites": { - "enabled": Filter.filter_invites, - "function": self._has_invites, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_invites, - "notification_msg": ( - f"Per Rule 6, your invite link has been removed. {staff_mistake_str}\n\n" - r"Our server rules can be found here: <https://pythondiscord.com/pages/rules>" - ) - }, - "filter_domains": { - "enabled": Filter.filter_domains, - "function": self._has_urls, - "type": "filter", - "content_only": True, - "user_notification": Filter.notify_user_domains, - "notification_msg": ( - f"Your URL has been removed because it matched a blacklisted domain. {staff_mistake_str}" - ) - }, - "watch_rich_embeds": { - "enabled": Filter.watch_rich_embeds, - "function": self._has_rich_embed, - "type": "watchlist", - "content_only": False, - }, - "watch_words": { - "enabled": Filter.watch_words, - "function": self._has_watchlist_words, - "type": "watchlist", - "content_only": True, - }, - "watch_tokens": { - "enabled": Filter.watch_tokens, - "function": self._has_watchlist_tokens, - "type": "watchlist", - "content_only": True, - }, - } - - @property - def mod_log(self) -> ModLog: - """Get currently loaded ModLog cog instance.""" - return self.bot.get_cog("ModLog") - - @Cog.listener() - async def on_message(self, msg: Message) -> None: - """Invoke message filter for new messages.""" - await self._filter_message(msg) - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message) -> None: - """ - Invoke message filter for message edits. - - If there have been multiple edits, calculate the time delta from the previous edit. - """ - if not before.edited_at: - delta = relativedelta(after.edited_at, before.created_at).microseconds - else: - delta = relativedelta(after.edited_at, before.edited_at).microseconds - await self._filter_message(after, delta) - - async def _filter_message(self, msg: Message, delta: Optional[int] = None) -> None: - """Filter the input message to see if it violates any of our rules, and then respond accordingly.""" - # Should we filter this message? - role_whitelisted = False - - if type(msg.author) is Member: # Only Member has roles, not User. - for role in msg.author.roles: - if role.id in Filter.role_whitelist: - role_whitelisted = True - - filter_message = ( - msg.channel.id not in Filter.channel_whitelist # Channel not in whitelist - and not role_whitelisted # Role not in whitelist - and not msg.author.bot # Author not a bot - ) - - # If none of the above, we can start filtering. - if filter_message: - for filter_name, _filter in self.filters.items(): - # Is this specific filter enabled in the config? - if _filter["enabled"]: - # Double trigger check for the embeds filter - if filter_name == "watch_rich_embeds": - # If the edit delta is less than 0.001 seconds, then we're probably dealing - # with a double filter trigger. - if delta is not None and delta < 100: - continue - - # Does the filter only need the message content or the full message? - if _filter["content_only"]: - match = await _filter["function"](msg.content) - else: - match = await _filter["function"](msg) - - if match: - # If this is a filter (not a watchlist), we should delete the message. - if _filter["type"] == "filter": - try: - # Embeds (can?) trigger both the `on_message` and `on_message_edit` - # event handlers, triggering filtering twice for the same message. - # - # If `on_message`-triggered filtering already deleted the message - # then `on_message_edit`-triggered filtering will raise exception - # since the message no longer exists. - # - # In addition, to avoid sending two notifications to the user, the - # logs, and mod_alert, we return if the message no longer exists. - await msg.delete() - except discord.errors.NotFound: - return - - # Notify the user if the filter specifies - if _filter["user_notification"]: - await self.notify_member(msg.author, _filter["notification_msg"], msg.channel) - - if isinstance(msg.channel, DMChannel): - channel_str = "via DM" - else: - channel_str = f"in {msg.channel.mention}" - - # Word and match stats for watch_words and watch_tokens - if filter_name in ("watch_words", "watch_tokens"): - surroundings = match.string[max(match.start() - 10, 0): match.end() + 10] - message_content = ( - f"**Match:** '{match[0]}'\n" - f"**Location:** '...{surroundings}...'\n" - f"\n**Original Message:**\n{msg.content}" - ) - else: # Use content of discord Message - message_content = msg.content - - message = ( - f"The {filter_name} {_filter['type']} was triggered " - f"by **{msg.author}** " - f"(`{msg.author.id}`) {channel_str} with [the " - f"following message]({msg.jump_url}):\n\n" - f"{message_content}" - ) - - log.debug(message) - - additional_embeds = None - additional_embeds_msg = None - - if filter_name == "filter_invites": - additional_embeds = [] - for invite, data in match.items(): - embed = discord.Embed(description=( - f"**Members:**\n{data['members']}\n" - f"**Active:**\n{data['active']}" - )) - embed.set_author(name=data["name"]) - embed.set_thumbnail(url=data["icon"]) - embed.set_footer(text=f"Guild Invite Code: {invite}") - additional_embeds.append(embed) - additional_embeds_msg = "For the following guild(s):" - - elif filter_name == "watch_rich_embeds": - additional_embeds = msg.embeds - additional_embeds_msg = "With the following embed(s):" - - # Send pretty mod log embed to mod-alerts - await self.mod_log.send_log_message( - icon_url=Icons.filtering, - colour=Colour(Colours.soft_red), - title=f"{_filter['type'].title()} triggered!", - text=message, - thumbnail=msg.author.avatar_url_as(static_format="png"), - channel_id=Channels.mod_alerts, - ping_everyone=Filter.ping_everyone, - additional_embeds=additional_embeds, - additional_embeds_msg=additional_embeds_msg - ) - - break # We don't want multiple filters to trigger - - @staticmethod - async def _has_watchlist_words(text: str) -> Union[bool, re.Match]: - """ - Returns True if the text contains one of the regular expressions from the word_watchlist in our filter config. - - Only matches words with boundaries before and after the expression. - """ - for regex_pattern in WORD_WATCHLIST_PATTERNS: - match = regex_pattern.search(text) - if match: - return match # match objects always have a boolean value of True - - return False - - @staticmethod - async def _has_watchlist_tokens(text: str) -> Union[bool, re.Match]: - """ - Returns True if the text contains one of the regular expressions from the token_watchlist in our filter config. - - This will match the expression even if it does not have boundaries before and after. - """ - for regex_pattern in TOKEN_WATCHLIST_PATTERNS: - match = regex_pattern.search(text) - if match: - - # Make sure it's not a URL - if not URL_RE.search(text): - return match # match objects always have a boolean value of True - - return False - - @staticmethod - async def _has_urls(text: str) -> bool: - """Returns True if the text contains one of the blacklisted URLs from the config file.""" - if not URL_RE.search(text): - return False - - text = text.lower() - - for url in Filter.domain_blacklist: - if url.lower() in text: - return True - - return False - - @staticmethod - async def _has_zalgo(text: str) -> bool: - """ - Returns True if the text contains zalgo characters. - - Zalgo range is \u0300 – \u036F and \u0489. - """ - return bool(ZALGO_RE.search(text)) - - async def _has_invites(self, text: str) -> Union[dict, bool]: - """ - Checks if there's any invites in the text content that aren't in the guild whitelist. - - If any are detected, a dictionary of invite data is returned, with a key per invite. - If none are detected, False is returned. - - Attempts to catch some of common ways to try to cheat the system. - """ - # Remove backslashes to prevent escape character aroundfuckery like - # discord\.gg/gdudes-pony-farm - text = text.replace("\\", "") - - invites = INVITE_RE.findall(text) - invite_data = dict() - for invite in invites: - if invite in invite_data: - continue - - response = await self.bot.http_session.get( - f"{URLs.discord_invite_api}/{invite}", params={"with_counts": "true"} - ) - response = await response.json() - guild = response.get("guild") - if guild is None: - # Lack of a "guild" key in the JSON response indicates either an group DM invite, an - # expired invite, or an invalid invite. The API does not currently differentiate - # between invalid and expired invites - return True - - guild_id = int(guild.get("id")) - - if guild_id not in Filter.guild_invite_whitelist: - guild_icon_hash = guild["icon"] - guild_icon = ( - "https://cdn.discordapp.com/icons/" - f"{guild_id}/{guild_icon_hash}.png?size=512" - ) - - invite_data[invite] = { - "name": guild["name"], - "icon": guild_icon, - "members": response["approximate_member_count"], - "active": response["approximate_presence_count"] - } - - return invite_data if invite_data else False - - @staticmethod - async def _has_rich_embed(msg: Message) -> bool: - """Determines if `msg` contains any rich embeds not auto-generated from a URL.""" - if msg.embeds: - for embed in msg.embeds: - if embed.type == "rich": - urls = URL_RE.findall(msg.content) - if not embed.url or embed.url not in urls: - # If `embed.url` does not exist or if `embed.url` is not part of the content - # of the message, it's unlikely to be an auto-generated embed by Discord. - return True - else: - log.trace( - "Found a rich embed sent by a regular user account, " - "but it was likely just an automatic URL embed." - ) - return False - return False - - async def notify_member(self, filtered_member: Member, reason: str, channel: TextChannel) -> None: - """ - Notify filtered_member about a moderation action with the reason str. - - First attempts to DM the user, fall back to in-channel notification if user has DMs disabled - """ - try: - await filtered_member.send(reason) - except discord.errors.Forbidden: - await channel.send(f"{filtered_member.mention} {reason}") - - -def setup(bot: Bot) -> None: - """Load the Filtering cog.""" - bot.add_cog(Filtering(bot)) diff --git a/bot/cogs/free.py b/bot/cogs/free.py deleted file mode 100644 index 49cab6172..000000000 --- a/bot/cogs/free.py +++ /dev/null @@ -1,103 +0,0 @@ -import logging -from datetime import datetime -from operator import itemgetter - -from discord import Colour, Embed, Member, utils -from discord.ext.commands import Cog, Context, command - -from bot.bot import Bot -from bot.constants import Categories, Channels, Free, STAFF_ROLES -from bot.decorators import redirect_output - -log = logging.getLogger(__name__) - -TIMEOUT = Free.activity_timeout -RATE = Free.cooldown_rate -PER = Free.cooldown_per - - -class Free(Cog): - """Tries to figure out which help channels are free.""" - - PYTHON_HELP_ID = Categories.python_help - - @command(name="free", aliases=('f',)) - @redirect_output(destination_channel=Channels.bot, bypass_roles=STAFF_ROLES) - async def free(self, ctx: Context, user: Member = None, seek: int = 2) -> None: - """ - Lists free help channels by likeliness of availability. - - seek is used only when this command is invoked in a help channel. - You cannot override seek without mentioning a user first. - - When seek is 2, we are avoiding considering the last active message - in a channel to be the one that invoked this command. - - When seek is 3 or more, a user has been mentioned on the assumption - that they asked if the channel is free or they asked their question - in an active channel, and we want the message before that happened. - """ - free_channels = [] - python_help = utils.get(ctx.guild.categories, id=self.PYTHON_HELP_ID) - - if user is not None and seek == 2: - seek = 3 - elif not 0 < seek < 10: - seek = 3 - - # Iterate through all the help channels - # to check latest activity - for channel in python_help.channels: - # Seek further back in the help channel - # the command was invoked in - if channel.id == ctx.channel.id: - messages = await channel.history(limit=seek).flatten() - msg = messages[seek - 1] - # Otherwise get last message - else: - msg = await channel.history(limit=1).next() # noqa (False positive) - - inactive = (datetime.utcnow() - msg.created_at).seconds - if inactive > TIMEOUT: - free_channels.append((inactive, channel)) - - embed = Embed() - embed.colour = Colour.blurple() - embed.title = "**Looking for a free help channel?**" - - if user is not None: - embed.description = f"**Hey {user.mention}!**\n\n" - else: - embed.description = "" - - # Display all potentially inactive channels - # in descending order of inactivity - if free_channels: - # Sort channels in descending order by seconds - # Get position in list, inactivity, and channel object - # For each channel, add to embed.description - sorted_channels = sorted(free_channels, key=itemgetter(0), reverse=True) - - for (inactive, channel) in sorted_channels[:3]: - minutes, seconds = divmod(inactive, 60) - if minutes > 59: - hours, minutes = divmod(minutes, 60) - embed.description += f"{channel.mention} **{hours}h {minutes}m {seconds}s** inactive\n" - else: - embed.description += f"{channel.mention} **{minutes}m {seconds}s** inactive\n" - - embed.set_footer(text="Please confirm these channels are free before posting") - else: - embed.description = ( - "Doesn't look like any channels are available right now. " - "You're welcome to check for yourself to be sure. " - "If all channels are truly busy, please be patient " - "as one will likely be available soon." - ) - - await ctx.send(embed=embed) - - -def setup(bot: Bot) -> None: - """Load the Free cog.""" - bot.add_cog(Free()) diff --git a/bot/cogs/help.py b/bot/cogs/help.py deleted file mode 100644 index fd5bbc3ca..000000000 --- a/bot/cogs/help.py +++ /dev/null @@ -1,568 +0,0 @@ -import asyncio -import itertools -from collections import namedtuple -from contextlib import suppress -from typing import Union - -from discord import Colour, Embed, HTTPException, Message, Reaction, User -from discord.ext import commands -from discord.ext.commands import CheckFailure, Cog as DiscordCog, Command, Context -from fuzzywuzzy import fuzz, process - -from bot import constants -from bot.bot import Bot -from bot.constants import Channels, Emojis, STAFF_ROLES -from bot.decorators import redirect_output -from bot.pagination import ( - FIRST_EMOJI, LAST_EMOJI, - LEFT_EMOJI, LinePaginator, RIGHT_EMOJI, -) - -DELETE_EMOJI = Emojis.trashcan - -REACTIONS = { - FIRST_EMOJI: 'first', - LEFT_EMOJI: 'back', - RIGHT_EMOJI: 'next', - LAST_EMOJI: 'end', - DELETE_EMOJI: 'stop', -} - -Cog = namedtuple('Cog', ['name', 'description', 'commands']) - - -class HelpQueryNotFound(ValueError): - """ - Raised when a HelpSession Query doesn't match a command or cog. - - Contains the custom attribute of ``possible_matches``. - - Instances of this object contain a dictionary of any command(s) that were close to matching the - query, where keys are the possible matched command names and values are the likeness match scores. - """ - - def __init__(self, arg: str, possible_matches: dict = None): - super().__init__(arg) - self.possible_matches = possible_matches - - -class HelpSession: - """ - An interactive session for bot and command help output. - - Expected attributes include: - * title: str - The title of the help message. - * query: Union[discord.ext.commands.Bot, discord.ext.commands.Command] - * description: str - The description of the query. - * pages: list[str] - A list of the help content split into manageable pages. - * message: `discord.Message` - The message object that's showing the help contents. - * destination: `discord.abc.Messageable` - Where the help message is to be sent to. - - Cogs can be grouped into custom categories. All cogs with the same category will be displayed - under a single category name in the help output. Custom categories are defined inside the cogs - as a class attribute named `category`. A description can also be specified with the attribute - `category_description`. If a description is not found in at least one cog, the default will be - the regular description (class docstring) of the first cog found in the category. - """ - - def __init__( - self, - ctx: Context, - *command, - cleanup: bool = False, - only_can_run: bool = True, - show_hidden: bool = False, - max_lines: int = 15 - ): - """Creates an instance of the HelpSession class.""" - self._ctx = ctx - self._bot = ctx.bot - self.title = "Command Help" - - # set the query details for the session - if command: - query_str = ' '.join(command) - self.query = self._get_query(query_str) - self.description = self.query.description or self.query.help - else: - self.query = ctx.bot - self.description = self.query.description - self.author = ctx.author - self.destination = ctx.channel - - # set the config for the session - self._cleanup = cleanup - self._only_can_run = only_can_run - self._show_hidden = show_hidden - self._max_lines = max_lines - - # init session states - self._pages = None - self._current_page = 0 - self.message = None - self._timeout_task = None - self.reset_timeout() - - def _get_query(self, query: str) -> Union[Command, Cog]: - """Attempts to match the provided query with a valid command or cog.""" - command = self._bot.get_command(query) - if command: - return command - - # Find all cog categories that match. - cog_matches = [] - description = None - for cog in self._bot.cogs.values(): - if hasattr(cog, "category") and cog.category == query: - cog_matches.append(cog) - if hasattr(cog, "category_description"): - description = cog.category_description - - # Try to search by cog name if no categories match. - if not cog_matches: - cog = self._bot.cogs.get(query) - - # Don't consider it a match if the cog has a category. - if cog and not hasattr(cog, "category"): - cog_matches = [cog] - - if cog_matches: - cog = cog_matches[0] - cmds = (cog.get_commands() for cog in cog_matches) # Commands of all cogs - - return Cog( - name=cog.category if hasattr(cog, "category") else cog.qualified_name, - description=description or cog.description, - commands=tuple(itertools.chain.from_iterable(cmds)) # Flatten the list - ) - - self._handle_not_found(query) - - def _handle_not_found(self, query: str) -> None: - """ - Handles when a query does not match a valid command or cog. - - Will pass on possible close matches along with the `HelpQueryNotFound` exception. - """ - # Combine command and cog names - choices = list(self._bot.all_commands) + list(self._bot.cogs) - - result = process.extractBests(query, choices, scorer=fuzz.ratio, score_cutoff=90) - - raise HelpQueryNotFound(f'Query "{query}" not found.', dict(result)) - - async def timeout(self, seconds: int = 30) -> None: - """Waits for a set number of seconds, then stops the help session.""" - await asyncio.sleep(seconds) - await self.stop() - - def reset_timeout(self) -> None: - """Cancels the original timeout task and sets it again from the start.""" - # cancel original if it exists - if self._timeout_task: - if not self._timeout_task.cancelled(): - self._timeout_task.cancel() - - # recreate the timeout task - self._timeout_task = self._bot.loop.create_task(self.timeout()) - - async def on_reaction_add(self, reaction: Reaction, user: User) -> None: - """Event handler for when reactions are added on the help message.""" - # ensure it was the relevant session message - if reaction.message.id != self.message.id: - return - - # ensure it was the session author who reacted - if user.id != self.author.id: - return - - emoji = str(reaction.emoji) - - # check if valid action - if emoji not in REACTIONS: - return - - self.reset_timeout() - - # Run relevant action method - action = getattr(self, f'do_{REACTIONS[emoji]}', None) - if action: - await action() - - # remove the added reaction to prep for re-use - with suppress(HTTPException): - await self.message.remove_reaction(reaction, user) - - async def on_message_delete(self, message: Message) -> None: - """Closes the help session when the help message is deleted.""" - if message.id == self.message.id: - await self.stop() - - async def prepare(self) -> None: - """Sets up the help session pages, events, message and reactions.""" - # create paginated content - await self.build_pages() - - # setup listeners - self._bot.add_listener(self.on_reaction_add) - self._bot.add_listener(self.on_message_delete) - - # Send the help message - await self.update_page() - self.add_reactions() - - def add_reactions(self) -> None: - """Adds the relevant reactions to the help message based on if pagination is required.""" - # if paginating - if len(self._pages) > 1: - for reaction in REACTIONS: - self._bot.loop.create_task(self.message.add_reaction(reaction)) - - # if single-page - else: - self._bot.loop.create_task(self.message.add_reaction(DELETE_EMOJI)) - - def _category_key(self, cmd: Command) -> str: - """ - Returns a cog name of a given command for use as a key for `sorted` and `groupby`. - - A zero width space is used as a prefix for results with no cogs to force them last in ordering. - """ - if cmd.cog: - try: - if cmd.cog.category: - return f'**{cmd.cog.category}**' - except AttributeError: - pass - - return f'**{cmd.cog_name}**' - else: - return "**\u200bNo Category:**" - - def _get_command_params(self, cmd: Command) -> str: - """ - Returns the command usage signature. - - This is a custom implementation of `command.signature` in order to format the command - signature without aliases. - """ - results = [] - for name, param in cmd.clean_params.items(): - - # if argument has a default value - if param.default is not param.empty: - - if isinstance(param.default, str): - show_default = param.default - else: - show_default = param.default is not None - - # if default is not an empty string or None - if show_default: - results.append(f'[{name}={param.default}]') - else: - results.append(f'[{name}]') - - # if variable length argument - elif param.kind == param.VAR_POSITIONAL: - results.append(f'[{name}...]') - - # if required - else: - results.append(f'<{name}>') - - return f"{cmd.name} {' '.join(results)}" - - async def build_pages(self) -> None: - """Builds the list of content pages to be paginated through in the help message, as a list of str.""" - # Use LinePaginator to restrict embed line height - paginator = LinePaginator(prefix='', suffix='', max_lines=self._max_lines) - - prefix = constants.Bot.prefix - - # show signature if query is a command - if isinstance(self.query, commands.Command): - signature = self._get_command_params(self.query) - parent = self.query.full_parent_name + ' ' if self.query.parent else '' - paginator.add_line(f'**```{prefix}{parent}{signature}```**') - - # show command aliases - aliases = ', '.join(f'`{a}`' for a in self.query.aliases) - if aliases: - paginator.add_line(f'**Can also use:** {aliases}\n') - - if not await self.query.can_run(self._ctx): - paginator.add_line('***You cannot run this command.***\n') - - # show name if query is a cog - if isinstance(self.query, Cog): - paginator.add_line(f'**{self.query.name}**') - - if self.description: - paginator.add_line(f'*{self.description}*') - - # list all children commands of the queried object - if isinstance(self.query, (commands.GroupMixin, Cog)): - - # remove hidden commands if session is not wanting hiddens - if not self._show_hidden: - filtered = [c for c in self.query.commands if not c.hidden] - else: - filtered = self.query.commands - - # if after filter there are no commands, finish up - if not filtered: - self._pages = paginator.pages - return - - # set category to Commands if cog - if isinstance(self.query, Cog): - grouped = (('**Commands:**', self.query.commands),) - - # set category to Subcommands if command - elif isinstance(self.query, commands.Command): - grouped = (('**Subcommands:**', self.query.commands),) - - # don't show prefix for subcommands - prefix = '' - - # otherwise sort and organise all commands into categories - else: - cat_sort = sorted(filtered, key=self._category_key) - grouped = itertools.groupby(cat_sort, key=self._category_key) - - # process each category - for category, cmds in grouped: - cmds = sorted(cmds, key=lambda c: c.name) - - # if there are no commands, skip category - if len(cmds) == 0: - continue - - cat_cmds = [] - - # format details for each child command - for command in cmds: - - # skip if hidden and hide if session is set to - if command.hidden and not self._show_hidden: - continue - - # see if the user can run the command - strikeout = '' - - # Patch to make the !help command work outside of #bot-commands again - # This probably needs a proper rewrite, but this will make it work in - # the mean time. - try: - can_run = await command.can_run(self._ctx) - except CheckFailure: - can_run = False - - if not can_run: - # skip if we don't show commands they can't run - if self._only_can_run: - continue - strikeout = '~~' - - signature = self._get_command_params(command) - info = f"{strikeout}**`{prefix}{signature}`**{strikeout}" - - # handle if the command has no docstring - if command.short_doc: - cat_cmds.append(f'{info}\n*{command.short_doc}*') - else: - cat_cmds.append(f'{info}\n*No details provided.*') - - # state var for if the category should be added next - print_cat = 1 - new_page = True - - for details in cat_cmds: - - # keep details together, paginating early if it won't fit - lines_adding = len(details.split('\n')) + print_cat - if paginator._linecount + lines_adding > self._max_lines: - paginator._linecount = 0 - new_page = True - paginator.close_page() - - # new page so print category title again - print_cat = 1 - - if print_cat: - if new_page: - paginator.add_line('') - paginator.add_line(category) - print_cat = 0 - - paginator.add_line(details) - - # save organised pages to session - self._pages = paginator.pages - - def embed_page(self, page_number: int = 0) -> Embed: - """Returns an Embed with the requested page formatted within.""" - embed = Embed() - - # if command or cog, add query to title for pages other than first - if isinstance(self.query, (commands.Command, Cog)) and page_number > 0: - title = f'Command Help | "{self.query.name}"' - else: - title = self.title - - embed.set_author(name=title, icon_url=constants.Icons.questionmark) - embed.description = self._pages[page_number] - - # add page counter to footer if paginating - page_count = len(self._pages) - if page_count > 1: - embed.set_footer(text=f'Page {self._current_page+1} / {page_count}') - - return embed - - async def update_page(self, page_number: int = 0) -> None: - """Sends the intial message, or changes the existing one to the given page number.""" - self._current_page = page_number - embed_page = self.embed_page(page_number) - - if not self.message: - self.message = await self.destination.send(embed=embed_page) - else: - await self.message.edit(embed=embed_page) - - @classmethod - async def start(cls, ctx: Context, *command, **options) -> "HelpSession": - """ - Create and begin a help session based on the given command context. - - Available options kwargs: - * cleanup: Optional[bool] - Set to `True` to have the message deleted on session end. Defaults to `False`. - * only_can_run: Optional[bool] - Set to `True` to hide commands the user can't run. Defaults to `False`. - * show_hidden: Optional[bool] - Set to `True` to include hidden commands. Defaults to `False`. - * max_lines: Optional[int] - Sets the max number of lines the paginator will add to a single page. Defaults to 20. - """ - session = cls(ctx, *command, **options) - await session.prepare() - - return session - - async def stop(self) -> None: - """Stops the help session, removes event listeners and attempts to delete the help message.""" - self._bot.remove_listener(self.on_reaction_add) - self._bot.remove_listener(self.on_message_delete) - - # ignore if permission issue, or the message doesn't exist - with suppress(HTTPException, AttributeError): - if self._cleanup: - await self.message.delete() - else: - await self.message.clear_reactions() - - @property - def is_first_page(self) -> bool: - """Check if session is currently showing the first page.""" - return self._current_page == 0 - - @property - def is_last_page(self) -> bool: - """Check if the session is currently showing the last page.""" - return self._current_page == (len(self._pages)-1) - - async def do_first(self) -> None: - """Event that is called when the user requests the first page.""" - if not self.is_first_page: - await self.update_page(0) - - async def do_back(self) -> None: - """Event that is called when the user requests the previous page.""" - if not self.is_first_page: - await self.update_page(self._current_page-1) - - async def do_next(self) -> None: - """Event that is called when the user requests the next page.""" - if not self.is_last_page: - await self.update_page(self._current_page+1) - - async def do_end(self) -> None: - """Event that is called when the user requests the last page.""" - if not self.is_last_page: - await self.update_page(len(self._pages)-1) - - async def do_stop(self) -> None: - """Event that is called when the user requests to stop the help session.""" - await self.message.delete() - - -class Help(DiscordCog): - """Custom Embed Pagination Help feature.""" - - @commands.command('help') - @redirect_output(destination_channel=Channels.bot, bypass_roles=STAFF_ROLES) - async def new_help(self, ctx: Context, *commands) -> None: - """Shows Command Help.""" - try: - await HelpSession.start(ctx, *commands) - except HelpQueryNotFound as error: - embed = Embed() - embed.colour = Colour.red() - embed.title = str(error) - - if error.possible_matches: - matches = '\n'.join(error.possible_matches.keys()) - embed.description = f'**Did you mean:**\n`{matches}`' - - await ctx.send(embed=embed) - - -def unload(bot: Bot) -> None: - """ - Reinstates the original help command. - - This is run if the cog raises an exception on load, or if the extension is unloaded. - """ - bot.remove_command('help') - bot.add_command(bot._old_help) - - -def setup(bot: Bot) -> None: - """ - The setup for the help extension. - - This is called automatically on `bot.load_extension` being run. - - Stores the original help command instance on the `bot._old_help` attribute for later - reinstatement, before removing it from the command registry so the new help command can be - loaded successfully. - - If an exception is raised during the loading of the cog, `unload` will be called in order to - reinstate the original help command. - """ - bot._old_help = bot.get_command('help') - bot.remove_command('help') - - try: - bot.add_cog(Help()) - except Exception: - unload(bot) - raise - - -def teardown(bot: Bot) -> None: - """ - The teardown for the help extension. - - This is called automatically on `bot.unload_extension` being run. - - Calls `unload` in order to reinstate the original help command. - """ - unload(bot) diff --git a/bot/cogs/moderation/__init__.py b/bot/cogs/moderation/__init__.py deleted file mode 100644 index 5243cb92d..000000000 --- a/bot/cogs/moderation/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from bot.bot import Bot -from .infractions import Infractions -from .management import ModManagement -from .modlog import ModLog -from .superstarify import Superstarify - - -def setup(bot: Bot) -> None: - """Load the Infractions, ModManagement, ModLog, and Superstarify cogs.""" - bot.add_cog(Infractions(bot)) - bot.add_cog(ModLog(bot)) - bot.add_cog(ModManagement(bot)) - bot.add_cog(Superstarify(bot)) diff --git a/bot/cogs/reminders.py b/bot/cogs/reminders.py deleted file mode 100644 index 45bf9a8f4..000000000 --- a/bot/cogs/reminders.py +++ /dev/null @@ -1,295 +0,0 @@ -import asyncio -import logging -import random -import textwrap -from datetime import datetime, timedelta -from operator import itemgetter -from typing import Optional - -from dateutil.relativedelta import relativedelta -from discord import Colour, Embed, Message -from discord.ext.commands import Cog, Context, group - -from bot.bot import Bot -from bot.constants import Channels, Icons, NEGATIVE_REPLIES, POSITIVE_REPLIES, STAFF_ROLES -from bot.converters import Duration -from bot.pagination import LinePaginator -from bot.utils.checks import without_role_check -from bot.utils.scheduling import Scheduler -from bot.utils.time import humanize_delta, wait_until - -log = logging.getLogger(__name__) - -WHITELISTED_CHANNELS = (Channels.bot,) -MAXIMUM_REMINDERS = 5 - - -class Reminders(Scheduler, Cog): - """Provide in-channel reminder functionality.""" - - def __init__(self, bot: Bot): - self.bot = bot - super().__init__() - - self.bot.loop.create_task(self.reschedule_reminders()) - - async def reschedule_reminders(self) -> None: - """Get all current reminders from the API and reschedule them.""" - await self.bot.wait_until_ready() - response = await self.bot.api_client.get( - 'bot/reminders', - params={'active': 'true'} - ) - - now = datetime.utcnow() - loop = asyncio.get_event_loop() - - for reminder in response: - remind_at = datetime.fromisoformat(reminder['expiration'][:-1]) - - # If the reminder is already overdue ... - if remind_at < now: - late = relativedelta(now, remind_at) - await self.send_reminder(reminder, late) - - else: - self.schedule_task(loop, reminder["id"], reminder) - - @staticmethod - async def _send_confirmation(ctx: Context, on_success: str) -> None: - """Send an embed confirming the reminder change was made successfully.""" - embed = Embed() - embed.colour = Colour.green() - embed.title = random.choice(POSITIVE_REPLIES) - embed.description = on_success - await ctx.send(embed=embed) - - async def _scheduled_task(self, reminder: dict) -> None: - """A coroutine which sends the reminder once the time is reached, and cancels the running task.""" - reminder_id = reminder["id"] - reminder_datetime = datetime.fromisoformat(reminder['expiration'][:-1]) - - # Send the reminder message once the desired duration has passed - await wait_until(reminder_datetime) - await self.send_reminder(reminder) - - log.debug(f"Deleting reminder {reminder_id} (the user has been reminded).") - await self._delete_reminder(reminder_id) - - # Now we can begone with it from our schedule list. - self.cancel_task(reminder_id) - - async def _delete_reminder(self, reminder_id: str) -> None: - """Delete a reminder from the database, given its ID, and cancel the running task.""" - await self.bot.api_client.delete('bot/reminders/' + str(reminder_id)) - - # Now we can remove it from the schedule list - self.cancel_task(reminder_id) - - async def _reschedule_reminder(self, reminder: dict) -> None: - """Reschedule a reminder object.""" - loop = asyncio.get_event_loop() - - self.cancel_task(reminder["id"]) - self.schedule_task(loop, reminder["id"], reminder) - - async def send_reminder(self, reminder: dict, late: relativedelta = None) -> None: - """Send the reminder.""" - channel = self.bot.get_channel(reminder["channel_id"]) - user = self.bot.get_user(reminder["author"]) - - embed = Embed() - embed.colour = Colour.blurple() - embed.set_author( - icon_url=Icons.remind_blurple, - name="It has arrived!" - ) - - embed.description = f"Here's your reminder: `{reminder['content']}`." - - if reminder.get("jump_url"): # keep backward compatibility - embed.description += f"\n[Jump back to when you created the reminder]({reminder['jump_url']})" - - if late: - embed.colour = Colour.red() - embed.set_author( - icon_url=Icons.remind_red, - name=f"Sorry it arrived {humanize_delta(late, max_units=2)} late!" - ) - - await channel.send( - content=user.mention, - embed=embed - ) - await self._delete_reminder(reminder["id"]) - - @group(name="remind", aliases=("reminder", "reminders"), invoke_without_command=True) - async def remind_group(self, ctx: Context, expiration: Duration, *, content: str) -> None: - """Commands for managing your reminders.""" - await ctx.invoke(self.new_reminder, expiration=expiration, content=content) - - @remind_group.command(name="new", aliases=("add", "create")) - async def new_reminder(self, ctx: Context, expiration: Duration, *, content: str) -> Optional[Message]: - """ - Set yourself a simple reminder. - - Expiration is parsed per: http://strftime.org/ - """ - embed = Embed() - - # If the user is not staff, we need to verify whether or not to make a reminder at all. - if without_role_check(ctx, *STAFF_ROLES): - - # If they don't have permission to set a reminder in this channel - if ctx.channel.id not in WHITELISTED_CHANNELS: - embed.colour = Colour.red() - embed.title = random.choice(NEGATIVE_REPLIES) - embed.description = "Sorry, you can't do that here!" - - return await ctx.send(embed=embed) - - # Get their current active reminders - active_reminders = await self.bot.api_client.get( - 'bot/reminders', - params={ - 'author__id': str(ctx.author.id) - } - ) - - # Let's limit this, so we don't get 10 000 - # reminders from kip or something like that :P - if len(active_reminders) > MAXIMUM_REMINDERS: - embed.colour = Colour.red() - embed.title = random.choice(NEGATIVE_REPLIES) - embed.description = "You have too many active reminders!" - - return await ctx.send(embed=embed) - - # Now we can attempt to actually set the reminder. - reminder = await self.bot.api_client.post( - 'bot/reminders', - json={ - 'author': ctx.author.id, - 'channel_id': ctx.message.channel.id, - 'jump_url': ctx.message.jump_url, - 'content': content, - 'expiration': expiration.isoformat() - } - ) - - now = datetime.utcnow() - timedelta(seconds=1) - - # Confirm to the user that it worked. - await self._send_confirmation( - ctx, - on_success=f"Your reminder will arrive in {humanize_delta(relativedelta(expiration, now))}!" - ) - - loop = asyncio.get_event_loop() - self.schedule_task(loop, reminder["id"], reminder) - - @remind_group.command(name="list") - async def list_reminders(self, ctx: Context) -> Optional[Message]: - """View a paginated embed of all reminders for your user.""" - # Get all the user's reminders from the database. - data = await self.bot.api_client.get( - 'bot/reminders', - params={'author__id': str(ctx.author.id)} - ) - - now = datetime.utcnow() - - # Make a list of tuples so it can be sorted by time. - reminders = sorted( - ( - (rem['content'], rem['expiration'], rem['id']) - for rem in data - ), - key=itemgetter(1) - ) - - lines = [] - - for content, remind_at, id_ in reminders: - # Parse and humanize the time, make it pretty :D - remind_datetime = datetime.fromisoformat(remind_at[:-1]) - time = humanize_delta(relativedelta(remind_datetime, now)) - - text = textwrap.dedent(f""" - **Reminder #{id_}:** *expires in {time}* (ID: {id_}) - {content} - """).strip() - - lines.append(text) - - embed = Embed() - embed.colour = Colour.blurple() - embed.title = f"Reminders for {ctx.author}" - - # Remind the user that they have no reminders :^) - if not lines: - embed.description = "No active reminders could be found." - return await ctx.send(embed=embed) - - # Construct the embed and paginate it. - embed.colour = Colour.blurple() - - await LinePaginator.paginate( - lines, - ctx, embed, - max_lines=3, - empty=True - ) - - @remind_group.group(name="edit", aliases=("change", "modify"), invoke_without_command=True) - async def edit_reminder_group(self, ctx: Context) -> None: - """Commands for modifying your current reminders.""" - await ctx.invoke(self.bot.get_command("help"), "reminders", "edit") - - @edit_reminder_group.command(name="duration", aliases=("time",)) - async def edit_reminder_duration(self, ctx: Context, id_: int, expiration: Duration) -> None: - """ - Edit one of your reminder's expiration. - - Expiration is parsed per: http://strftime.org/ - """ - # Send the request to update the reminder in the database - reminder = await self.bot.api_client.patch( - 'bot/reminders/' + str(id_), - json={'expiration': expiration.isoformat()} - ) - - # Send a confirmation message to the channel - await self._send_confirmation( - ctx, on_success="That reminder has been edited successfully!" - ) - - await self._reschedule_reminder(reminder) - - @edit_reminder_group.command(name="content", aliases=("reason",)) - async def edit_reminder_content(self, ctx: Context, id_: int, *, content: str) -> None: - """Edit one of your reminder's content.""" - # Send the request to update the reminder in the database - reminder = await self.bot.api_client.patch( - 'bot/reminders/' + str(id_), - json={'content': content} - ) - - # Send a confirmation message to the channel - await self._send_confirmation( - ctx, on_success="That reminder has been edited successfully!" - ) - await self._reschedule_reminder(reminder) - - @remind_group.command("delete", aliases=("remove",)) - async def delete_reminder(self, ctx: Context, id_: int) -> None: - """Delete one of your active reminders.""" - await self._delete_reminder(id_) - await self._send_confirmation( - ctx, on_success="That reminder has been deleted successfully!" - ) - - -def setup(bot: Bot) -> None: - """Load the Reminders cog.""" - bot.add_cog(Reminders(bot)) diff --git a/bot/cogs/snekbox.py b/bot/cogs/snekbox.py deleted file mode 100644 index da33e27b2..000000000 --- a/bot/cogs/snekbox.py +++ /dev/null @@ -1,232 +0,0 @@ -import datetime -import logging -import re -import textwrap -from signal import Signals -from typing import Optional, Tuple - -from discord.ext.commands import Cog, Context, command, guild_only - -from bot.bot import Bot -from bot.constants import Channels, Roles, URLs -from bot.decorators import in_channel -from bot.utils.messages import wait_for_deletion - -log = logging.getLogger(__name__) - -ESCAPE_REGEX = re.compile("[`\u202E\u200B]{3,}") -FORMATTED_CODE_REGEX = re.compile( - r"^\s*" # any leading whitespace from the beginning of the string - r"(?P<delim>(?P<block>```)|``?)" # code delimiter: 1-3 backticks; (?P=block) only matches if it's a block - r"(?(block)(?:(?P<lang>[a-z]+)\n)?)" # if we're in a block, match optional language (only letters plus newline) - r"(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code - r"(?P<code>.*?)" # extract all code inside the markup - r"\s*" # any more whitespace before the end of the code markup - r"(?P=delim)" # match the exact same delimiter from the start again - r"\s*$", # any trailing whitespace until the end of the string - re.DOTALL | re.IGNORECASE # "." also matches newlines, case insensitive -) -RAW_CODE_REGEX = re.compile( - r"^(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code - r"(?P<code>.*?)" # extract all the rest as code - r"\s*$", # any trailing whitespace until the end of the string - re.DOTALL # "." also matches newlines -) - -MAX_PASTE_LEN = 1000 -EVAL_ROLES = (Roles.helpers, Roles.moderator, Roles.admin, Roles.owner, Roles.rockstars, Roles.partners) - - -class Snekbox(Cog): - """Safe evaluation of Python code using Snekbox.""" - - def __init__(self, bot: Bot): - self.bot = bot - self.jobs = {} - - async def post_eval(self, code: str) -> dict: - """Send a POST request to the Snekbox API to evaluate code and return the results.""" - url = URLs.snekbox_eval_api - data = {"input": code} - async with self.bot.http_session.post(url, json=data, raise_for_status=True) as resp: - return await resp.json() - - async def upload_output(self, output: str) -> Optional[str]: - """Upload the eval output to a paste service and return a URL to it if successful.""" - log.trace("Uploading full output to paste service...") - - if len(output) > MAX_PASTE_LEN: - log.info("Full output is too long to upload") - return "too long to upload" - - url = URLs.paste_service.format(key="documents") - try: - async with self.bot.http_session.post(url, data=output, raise_for_status=True) as resp: - data = await resp.json() - - if "key" in data: - return URLs.paste_service.format(key=data["key"]) - except Exception: - # 400 (Bad Request) means there are too many characters - log.exception("Failed to upload full output to paste service!") - - @staticmethod - def prepare_input(code: str) -> str: - """Extract code from the Markdown, format it, and insert it into the code template.""" - match = FORMATTED_CODE_REGEX.fullmatch(code) - if match: - code, block, lang, delim = match.group("code", "block", "lang", "delim") - code = textwrap.dedent(code) - if block: - info = (f"'{lang}' highlighted" if lang else "plain") + " code block" - else: - info = f"{delim}-enclosed inline code" - log.trace(f"Extracted {info} for evaluation:\n{code}") - else: - code = textwrap.dedent(RAW_CODE_REGEX.fullmatch(code).group("code")) - log.trace( - f"Eval message contains unformatted or badly formatted code, " - f"stripping whitespace only:\n{code}" - ) - - return code - - @staticmethod - def get_results_message(results: dict) -> Tuple[str, str]: - """Return a user-friendly message and error corresponding to the process's return code.""" - stdout, returncode = results["stdout"], results["returncode"] - msg = f"Your eval job has completed with return code {returncode}" - error = "" - - if returncode is None: - msg = "Your eval job has failed" - error = stdout.strip() - elif returncode == 128 + Signals.SIGKILL: - msg = "Your eval job timed out or ran out of memory" - elif returncode == 255: - msg = "Your eval job has failed" - error = "A fatal NsJail error occurred" - else: - # Try to append signal's name if one exists - try: - name = Signals(returncode - 128).name - msg = f"{msg} ({name})" - except ValueError: - pass - - return msg, error - - @staticmethod - def get_status_emoji(results: dict) -> str: - """Return an emoji corresponding to the status code or lack of output in result.""" - if not results["stdout"].strip(): # No output - return ":warning:" - elif results["returncode"] == 0: # No error - return ":white_check_mark:" - else: # Exception - return ":x:" - - async def format_output(self, output: str) -> Tuple[str, Optional[str]]: - """ - Format the output and return a tuple of the formatted output and a URL to the full output. - - Prepend each line with a line number. Truncate if there are over 10 lines or 1000 characters - and upload the full output to a paste service. - """ - log.trace("Formatting output...") - - output = output.strip(" \n") - original_output = output # To be uploaded to a pasting service if needed - paste_link = None - - if "<@" in output: - output = output.replace("<@", "<@\u200B") # Zero-width space - - if "<!@" in output: - output = output.replace("<!@", "<!@\u200B") # Zero-width space - - if ESCAPE_REGEX.findall(output): - return "Code block escape attempt detected; will not output result", paste_link - - truncated = False - lines = output.count("\n") - - if lines > 0: - output = output.split("\n")[:10] # Only first 10 cause the rest is truncated anyway - output = (f"{i:03d} | {line}" for i, line in enumerate(output, 1)) - output = "\n".join(output) - - if lines > 10: - truncated = True - if len(output) >= 1000: - output = f"{output[:1000]}\n... (truncated - too long, too many lines)" - else: - output = f"{output}\n... (truncated - too many lines)" - elif len(output) >= 1000: - truncated = True - output = f"{output[:1000]}\n... (truncated - too long)" - - if truncated: - paste_link = await self.upload_output(original_output) - - output = output.strip() - if not output: - output = "[No output]" - - return output, paste_link - - @command(name="eval", aliases=("e",)) - @guild_only() - @in_channel(Channels.bot, hidden_channels=(Channels.esoteric,), bypass_roles=EVAL_ROLES) - async def eval_command(self, ctx: Context, *, code: str = None) -> None: - """ - Run Python code and get the results. - - This command supports multiple lines of code, including code wrapped inside a formatted code - block. We've done our best to make this safe, but do let us know if you manage to find an - issue with it! - """ - if ctx.author.id in self.jobs: - await ctx.send( - f"{ctx.author.mention} You've already got a job running - " - "please wait for it to finish!" - ) - return - - if not code: # None or empty string - await ctx.invoke(self.bot.get_command("help"), "eval") - return - - log.info(f"Received code from {ctx.author} for evaluation:\n{code}") - - self.jobs[ctx.author.id] = datetime.datetime.now() - code = self.prepare_input(code) - - try: - async with ctx.typing(): - results = await self.post_eval(code) - msg, error = self.get_results_message(results) - - if error: - output, paste_link = error, None - else: - output, paste_link = await self.format_output(results["stdout"]) - - icon = self.get_status_emoji(results) - msg = f"{ctx.author.mention} {icon} {msg}.\n\n```py\n{output}\n```" - if paste_link: - msg = f"{msg}\nFull output: {paste_link}" - - response = await ctx.send(msg) - self.bot.loop.create_task( - wait_for_deletion(response, user_ids=(ctx.author.id,), client=ctx.bot) - ) - - log.info(f"{ctx.author}'s job had a return code of {results['returncode']}") - finally: - del self.jobs[ctx.author.id] - - -def setup(bot: Bot) -> None: - """Load the Snekbox cog.""" - bot.add_cog(Snekbox(bot)) diff --git a/bot/cogs/sync/__init__.py b/bot/cogs/sync/__init__.py deleted file mode 100644 index fe7df4e9b..000000000 --- a/bot/cogs/sync/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from bot.bot import Bot -from .cog import Sync - - -def setup(bot: Bot) -> None: - """Load the Sync cog.""" - bot.add_cog(Sync(bot)) diff --git a/bot/cogs/sync/syncers.py b/bot/cogs/sync/syncers.py deleted file mode 100644 index 14cf51383..000000000 --- a/bot/cogs/sync/syncers.py +++ /dev/null @@ -1,235 +0,0 @@ -from collections import namedtuple -from typing import Dict, Set, Tuple - -from discord import Guild - -from bot.bot import Bot - -# These objects are declared as namedtuples because tuples are hashable, -# something that we make use of when diffing site roles against guild roles. -Role = namedtuple('Role', ('id', 'name', 'colour', 'permissions', 'position')) -User = namedtuple('User', ('id', 'name', 'discriminator', 'avatar_hash', 'roles', 'in_guild')) - - -def get_roles_for_sync( - guild_roles: Set[Role], api_roles: Set[Role] -) -> Tuple[Set[Role], Set[Role], Set[Role]]: - """ - Determine which roles should be created or updated on the site. - - Arguments: - guild_roles (Set[Role]): - Roles that were found on the guild at startup. - - api_roles (Set[Role]): - Roles that were retrieved from the API at startup. - - Returns: - Tuple[Set[Role], Set[Role]. Set[Role]]: - A tuple with three elements. The first element represents - roles to be created on the site, meaning that they were - present on the cached guild but not on the API. The second - element represents roles to be updated, meaning they were - present on both the cached guild and the API but non-ID - fields have changed inbetween. The third represents roles - to be deleted on the site, meaning the roles are present on - the API but not in the cached guild. - """ - guild_role_ids = {role.id for role in guild_roles} - api_role_ids = {role.id for role in api_roles} - new_role_ids = guild_role_ids - api_role_ids - deleted_role_ids = api_role_ids - guild_role_ids - - # New roles are those which are on the cached guild but not on the - # API guild, going by the role ID. We need to send them in for creation. - roles_to_create = {role for role in guild_roles if role.id in new_role_ids} - roles_to_update = guild_roles - api_roles - roles_to_create - roles_to_delete = {role for role in api_roles if role.id in deleted_role_ids} - return roles_to_create, roles_to_update, roles_to_delete - - -async def sync_roles(bot: Bot, guild: Guild) -> Tuple[int, int, int]: - """ - Synchronize roles found on the given `guild` with the ones on the API. - - Arguments: - bot (bot.bot.Bot): - The bot instance that we're running with. - - guild (discord.Guild): - The guild instance from the bot's cache - to synchronize roles with. - - Returns: - Tuple[int, int, int]: - A tuple with three integers representing how many roles were created - (element `0`) , how many roles were updated (element `1`), and how many - roles were deleted (element `2`) on the API. - """ - roles = await bot.api_client.get('bot/roles') - - # Pack API roles and guild roles into one common format, - # which is also hashable. We need hashability to be able - # to compare these easily later using sets. - api_roles = {Role(**role_dict) for role_dict in roles} - guild_roles = { - Role( - id=role.id, name=role.name, - colour=role.colour.value, permissions=role.permissions.value, - position=role.position, - ) - for role in guild.roles - } - roles_to_create, roles_to_update, roles_to_delete = get_roles_for_sync(guild_roles, api_roles) - - for role in roles_to_create: - await bot.api_client.post( - 'bot/roles', - json={ - 'id': role.id, - 'name': role.name, - 'colour': role.colour, - 'permissions': role.permissions, - 'position': role.position, - } - ) - - for role in roles_to_update: - await bot.api_client.put( - f'bot/roles/{role.id}', - json={ - 'id': role.id, - 'name': role.name, - 'colour': role.colour, - 'permissions': role.permissions, - 'position': role.position, - } - ) - - for role in roles_to_delete: - await bot.api_client.delete(f'bot/roles/{role.id}') - - return len(roles_to_create), len(roles_to_update), len(roles_to_delete) - - -def get_users_for_sync( - guild_users: Dict[int, User], api_users: Dict[int, User] -) -> Tuple[Set[User], Set[User]]: - """ - Determine which users should be created or updated on the website. - - Arguments: - guild_users (Dict[int, User]): - A mapping of user IDs to user data, populated from the - guild cached on the running bot instance. - - api_users (Dict[int, User]): - A mapping of user IDs to user data, populated from the API's - current inventory of all users. - - Returns: - Tuple[Set[User], Set[User]]: - Two user sets as a tuple. The first element represents users - to be created on the website, these are users that are present - in the cached guild data but not in the API at all, going by - their ID. The second element represents users to update. It is - populated by users which are present on both the API and the - guild, but where the attribute of a user on the API is not - equal to the attribute of the user on the guild. - """ - users_to_create = set() - users_to_update = set() - - for api_user in api_users.values(): - guild_user = guild_users.get(api_user.id) - if guild_user is not None: - if api_user != guild_user: - users_to_update.add(guild_user) - - elif api_user.in_guild: - # The user is known on the API but not the guild, and the - # API currently specifies that the user is a member of the guild. - # This means that the user has left since the last sync. - # Update the `in_guild` attribute of the user on the site - # to signify that the user left. - new_api_user = api_user._replace(in_guild=False) - users_to_update.add(new_api_user) - - new_user_ids = set(guild_users.keys()) - set(api_users.keys()) - for user_id in new_user_ids: - # The user is known on the guild but not on the API. This means - # that the user has joined since the last sync. Create it. - new_user = guild_users[user_id] - users_to_create.add(new_user) - - return users_to_create, users_to_update - - -async def sync_users(bot: Bot, guild: Guild) -> Tuple[int, int, None]: - """ - Synchronize users found in the given `guild` with the ones in the API. - - Arguments: - bot (bot.bot.Bot): - The bot instance that we're running with. - - guild (discord.Guild): - The guild instance from the bot's cache - to synchronize roles with. - - Returns: - Tuple[int, int, None]: - A tuple with two integers, representing how many users were created - (element `0`) and how many users were updated (element `1`), and `None` - to indicate that a user sync never deletes entries from the API. - """ - current_users = await bot.api_client.get('bot/users') - - # Pack API users and guild users into one common format, - # which is also hashable. We need hashability to be able - # to compare these easily later using sets. - api_users = { - user_dict['id']: User( - roles=tuple(sorted(user_dict.pop('roles'))), - **user_dict - ) - for user_dict in current_users - } - guild_users = { - member.id: User( - id=member.id, name=member.name, - discriminator=int(member.discriminator), avatar_hash=member.avatar, - roles=tuple(sorted(role.id for role in member.roles)), in_guild=True - ) - for member in guild.members - } - - users_to_create, users_to_update = get_users_for_sync(guild_users, api_users) - - for user in users_to_create: - await bot.api_client.post( - 'bot/users', - json={ - 'avatar_hash': user.avatar_hash, - 'discriminator': user.discriminator, - 'id': user.id, - 'in_guild': user.in_guild, - 'name': user.name, - 'roles': list(user.roles) - } - ) - - for user in users_to_update: - await bot.api_client.put( - f'bot/users/{user.id}', - json={ - 'avatar_hash': user.avatar_hash, - 'discriminator': user.discriminator, - 'id': user.id, - 'in_guild': user.in_guild, - 'name': user.name, - 'roles': list(user.roles) - } - ) - - return len(users_to_create), len(users_to_update), None diff --git a/bot/cogs/tags.py b/bot/cogs/tags.py deleted file mode 100644 index 90747add4..000000000 --- a/bot/cogs/tags.py +++ /dev/null @@ -1,254 +0,0 @@ -import logging -import re -import time -from typing import Dict, List, Optional - -from discord import Colour, Embed -from discord.ext.commands import Cog, Context, group - -from bot.bot import Bot -from bot.constants import Channels, Cooldowns, MODERATION_ROLES, Roles -from bot.converters import TagContentConverter, TagNameConverter -from bot.decorators import with_role -from bot.pagination import LinePaginator - -log = logging.getLogger(__name__) - -TEST_CHANNELS = ( - Channels.devtest, - Channels.bot, - Channels.helpers -) - -REGEX_NON_ALPHABET = re.compile(r"[^a-z]", re.MULTILINE & re.IGNORECASE) - - -class Tags(Cog): - """Save new tags and fetch existing tags.""" - - def __init__(self, bot: Bot): - self.bot = bot - self.tag_cooldowns = {} - - self._cache = {} - self._last_fetch: float = 0.0 - - async def _get_tags(self, is_forced: bool = False) -> None: - """Get all tags.""" - # refresh only when there's a more than 5m gap from last call. - time_now: float = time.time() - if is_forced or not self._last_fetch or time_now - self._last_fetch > 5 * 60: - tags = await self.bot.api_client.get('bot/tags') - self._cache = {tag['title'].lower(): tag for tag in tags} - self._last_fetch = time_now - - @staticmethod - def _fuzzy_search(search: str, target: str) -> int: - """A simple scoring algorithm based on how many letters are found / total, with order in mind.""" - current, index = 0, 0 - _search = REGEX_NON_ALPHABET.sub('', search.lower()) - _targets = iter(REGEX_NON_ALPHABET.split(target.lower())) - _target = next(_targets) - try: - while True: - while index < len(_target) and _search[current] == _target[index]: - current += 1 - index += 1 - index, _target = 0, next(_targets) - except (StopIteration, IndexError): - pass - return current / len(_search) * 100 - - def _get_suggestions(self, tag_name: str, thresholds: Optional[List[int]] = None) -> List[str]: - """Return a list of suggested tags.""" - scores: Dict[str, int] = { - tag_title: Tags._fuzzy_search(tag_name, tag['title']) - for tag_title, tag in self._cache.items() - } - - thresholds = thresholds or [100, 90, 80, 70, 60] - - for threshold in thresholds: - suggestions = [ - self._cache[tag_title] - for tag_title, matching_score in scores.items() - if matching_score >= threshold - ] - if suggestions: - return suggestions - - return [] - - async def _get_tag(self, tag_name: str) -> list: - """Get a specific tag.""" - await self._get_tags() - found = [self._cache.get(tag_name.lower(), None)] - if not found[0]: - return self._get_suggestions(tag_name) - return found - - @group(name='tags', aliases=('tag', 't'), invoke_without_command=True) - async def tags_group(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None: - """Show all known tags, a single tag, or run a subcommand.""" - await ctx.invoke(self.get_command, tag_name=tag_name) - - def command_on_cooldown(self, ctx: Context, tag_name: str) -> bool: - """ - Check if the command is currently on cooldown, on a per-tag, per-channel basis. - - The cooldown duration is set in constants.py. - """ - now = time.time() - - is_on_cooldown = ( - tag_name - and tag_name in self.tag_cooldowns - and (now - self.tag_cooldowns[tag_name]["time"]) < Cooldowns.tags - and self.tag_cooldowns[tag_name]["channel"] == ctx.channel.id - ) - - return is_on_cooldown - - async def display_tag(self, ctx: Context, tag_name: str = None) -> bool: - """ - Show contents of the tag `tag_name` in `ctx` and return True if something is shown. - - If a tag is not found, display similar tag names as suggestions. If a tag is not specified, - display a paginated embed of all tags. - - Tags are on cooldowns on a per-tag, per-channel basis. If a tag is on cooldown, display - nothing and return False. - """ - if self.command_on_cooldown(ctx, tag_name): - time_left = Cooldowns.tags - (time.time() - self.tag_cooldowns[tag_name]["time"]) - log.warning(f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. " - f"Cooldown ends in {time_left:.1f} seconds.") - return False - - await self._get_tags() - - if tag_name is not None: - founds = await self._get_tag(tag_name) - - if len(founds) == 1: - tag = founds[0] - if ctx.channel.id not in TEST_CHANNELS: - self.tag_cooldowns[tag_name] = { - "time": time.time(), - "channel": ctx.channel.id - } - await ctx.send(embed=Embed.from_dict(tag['embed'])) - return True - elif founds and len(tag_name) >= 3: - await ctx.send(embed=Embed( - title='Did you mean ...', - description='\n'.join(tag['title'] for tag in founds[:10]) - )) - return True - - else: - tags = self._cache.values() - if not tags: - await ctx.send(embed=Embed( - description="**There are no tags in the database!**", - colour=Colour.red() - )) - return True - else: - embed: Embed = Embed(title="**Current tags**") - await LinePaginator.paginate( - sorted(f"**»** {tag['title']}" for tag in tags), - ctx, - embed, - footer_text="To show a tag, type !tags <tagname>.", - empty=False, - max_lines=15 - ) - return True - - return False - - @tags_group.command(name='get', aliases=('show', 'g')) - async def get_command(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None: - """Get a specified tag, or a list of all tags if no tag is specified.""" - await self.display_tag(ctx, tag_name) - - @tags_group.command(name='set', aliases=('add', 's')) - @with_role(*MODERATION_ROLES) - async def set_command( - self, - ctx: Context, - tag_name: TagNameConverter, - *, - tag_content: TagContentConverter, - ) -> None: - """Create a new tag.""" - body = { - 'title': tag_name.lower().strip(), - 'embed': { - 'title': tag_name, - 'description': tag_content - } - } - - await self.bot.api_client.post('bot/tags', json=body) - self._cache[tag_name.lower()] = await self.bot.api_client.get(f'bot/tags/{tag_name}') - - log.debug(f"{ctx.author} successfully added the following tag to our database: \n" - f"tag_name: {tag_name}\n" - f"tag_content: '{tag_content}'\n") - - await ctx.send(embed=Embed( - title="Tag successfully added", - description=f"**{tag_name}** added to tag database.", - colour=Colour.blurple() - )) - - @tags_group.command(name='edit', aliases=('e', )) - @with_role(*MODERATION_ROLES) - async def edit_command( - self, - ctx: Context, - tag_name: TagNameConverter, - *, - tag_content: TagContentConverter, - ) -> None: - """Edit an existing tag.""" - body = { - 'embed': { - 'title': tag_name, - 'description': tag_content - } - } - - await self.bot.api_client.patch(f'bot/tags/{tag_name}', json=body) - self._cache[tag_name.lower()] = await self.bot.api_client.get(f'bot/tags/{tag_name}') - - log.debug(f"{ctx.author} successfully edited the following tag in our database: \n" - f"tag_name: {tag_name}\n" - f"tag_content: '{tag_content}'\n") - - await ctx.send(embed=Embed( - title="Tag successfully edited", - description=f"**{tag_name}** edited in the database.", - colour=Colour.blurple() - )) - - @tags_group.command(name='delete', aliases=('remove', 'rm', 'd')) - @with_role(Roles.admin, Roles.owner) - async def delete_command(self, ctx: Context, *, tag_name: TagNameConverter) -> None: - """Remove a tag from the database.""" - await self.bot.api_client.delete(f'bot/tags/{tag_name}') - self._cache.pop(tag_name.lower(), None) - - log.debug(f"{ctx.author} successfully deleted the tag called '{tag_name}'") - await ctx.send(embed=Embed( - title=tag_name, - description=f"Tag successfully removed: {tag_name}.", - colour=Colour.blurple() - )) - - -def setup(bot: Bot) -> None: - """Load the Tags cog.""" - bot.add_cog(Tags(bot)) diff --git a/bot/cogs/token_remover.py b/bot/cogs/token_remover.py deleted file mode 100644 index 82c01ae96..000000000 --- a/bot/cogs/token_remover.py +++ /dev/null @@ -1,145 +0,0 @@ -import base64 -import binascii -import logging -import re -import struct -from datetime import datetime - -from discord import Colour, Message -from discord.ext.commands import Cog -from discord.utils import snowflake_time - -from bot.bot import Bot -from bot.cogs.moderation import ModLog -from bot.constants import Channels, Colours, Event, Icons - -log = logging.getLogger(__name__) - -DELETION_MESSAGE_TEMPLATE = ( - "Hey {mention}! I noticed you posted a seemingly valid Discord API " - "token in your message and have removed your message. " - "This means that your token has been **compromised**. " - "Please change your token **immediately** at: " - "<https://discordapp.com/developers/applications/me>\n\n" - "Feel free to re-post it with the token removed. " - "If you believe this was a mistake, please let us know!" -) -DISCORD_EPOCH_TIMESTAMP = datetime(2017, 1, 1) -TOKEN_EPOCH = 1_293_840_000 -TOKEN_RE = re.compile( - r"[^\s\.()\"']+" # Matches token part 1: The user ID string, encoded as base64 - r"\." # Matches a literal dot between the token parts - r"[^\s\.()\"']+" # Matches token part 2: The creation timestamp, as an integer - r"\." # Matches a literal dot between the token parts - r"[^\s\.()\"']+" # Matches token part 3: The HMAC, unused by us, but check that it isn't empty -) - - -class TokenRemover(Cog): - """Scans messages for potential discord.py bot tokens and removes them.""" - - def __init__(self, bot: Bot): - self.bot = bot - - @property - def mod_log(self) -> ModLog: - """Get currently loaded ModLog cog instance.""" - return self.bot.get_cog("ModLog") - - @Cog.listener() - async def on_message(self, msg: Message) -> None: - """ - Check each message for a string that matches Discord's token pattern. - - See: https://discordapp.com/developers/docs/reference#snowflakes - """ - if self.is_token_in_message(msg): - await self.take_action(msg) - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message) -> None: - """ - Check each edit for a string that matches Discord's token pattern. - - See: https://discordapp.com/developers/docs/reference#snowflakes - """ - if self.is_token_in_message(after): - await self.take_action(after) - - async def take_action(self, msg: Message) -> None: - """Remove the `msg` containing a token an send a mod_log message.""" - user_id, creation_timestamp, hmac = TOKEN_RE.search(msg.content).group(0).split('.') - self.mod_log.ignore(Event.message_delete, msg.id) - await msg.delete() - await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) - - message = ( - "Censored a seemingly valid token sent by " - f"{msg.author} (`{msg.author.id}`) in {msg.channel.mention}, token was " - f"`{user_id}.{creation_timestamp}.{'x' * len(hmac)}`" - ) - log.debug(message) - - # Send pretty mod log embed to mod-alerts - await self.mod_log.send_log_message( - icon_url=Icons.token_removed, - colour=Colour(Colours.soft_red), - title="Token removed!", - text=message, - thumbnail=msg.author.avatar_url_as(static_format="png"), - channel_id=Channels.mod_alerts, - ) - - @classmethod - def is_token_in_message(cls, msg: Message) -> bool: - """Check if `msg` contains a seemly valid token.""" - if msg.author.bot: - return False - - maybe_match = TOKEN_RE.search(msg.content) - if maybe_match is None: - return False - - try: - user_id, creation_timestamp, hmac = maybe_match.group(0).split('.') - except ValueError: - return False - - if cls.is_valid_user_id(user_id) and cls.is_valid_timestamp(creation_timestamp): - return True - - @staticmethod - def is_valid_user_id(b64_content: str) -> bool: - """ - Check potential token to see if it contains a valid Discord user ID. - - See: https://discordapp.com/developers/docs/reference#snowflakes - """ - b64_content += '=' * (-len(b64_content) % 4) - - try: - content: bytes = base64.b64decode(b64_content) - return content.decode('utf-8').isnumeric() - except (binascii.Error, UnicodeDecodeError): - return False - - @staticmethod - def is_valid_timestamp(b64_content: str) -> bool: - """ - Check potential token to see if it contains a valid timestamp. - - See: https://discordapp.com/developers/docs/reference#snowflakes - """ - b64_content += '=' * (-len(b64_content) % 4) - - try: - content = base64.urlsafe_b64decode(b64_content) - snowflake = struct.unpack('i', content)[0] - except (binascii.Error, struct.error): - return False - return snowflake_time(snowflake + TOKEN_EPOCH) < DISCORD_EPOCH_TIMESTAMP - - -def setup(bot: Bot) -> None: - """Load the TokenRemover cog.""" - bot.add_cog(TokenRemover(bot)) diff --git a/bot/cogs/utils.py b/bot/cogs/utils.py deleted file mode 100644 index da278011a..000000000 --- a/bot/cogs/utils.py +++ /dev/null @@ -1,179 +0,0 @@ -import logging -import re -import unicodedata -from asyncio import TimeoutError, sleep -from email.parser import HeaderParser -from io import StringIO -from typing import Tuple - -from dateutil import relativedelta -from discord import Colour, Embed, Message, Role -from discord.ext.commands import Cog, Context, command - -from bot.bot import Bot -from bot.constants import Channels, MODERATION_ROLES, Mention, STAFF_ROLES -from bot.decorators import in_channel, with_role -from bot.utils.time import humanize_delta - -log = logging.getLogger(__name__) - - -class Utils(Cog): - """A selection of utilities which don't have a clear category.""" - - def __init__(self, bot: Bot): - self.bot = bot - - self.base_pep_url = "http://www.python.org/dev/peps/pep-" - self.base_github_pep_url = "https://raw.githubusercontent.com/python/peps/master/pep-" - - @command(name='pep', aliases=('get_pep', 'p')) - async def pep_command(self, ctx: Context, pep_number: str) -> None: - """Fetches information about a PEP and sends it to the channel.""" - if pep_number.isdigit(): - pep_number = int(pep_number) - else: - await ctx.invoke(self.bot.get_command("help"), "pep") - return - - possible_extensions = ['.txt', '.rst'] - found_pep = False - for extension in possible_extensions: - # Attempt to fetch the PEP - pep_url = f"{self.base_github_pep_url}{pep_number:04}{extension}" - log.trace(f"Requesting PEP {pep_number} with {pep_url}") - response = await self.bot.http_session.get(pep_url) - - if response.status == 200: - log.trace("PEP found") - found_pep = True - - pep_content = await response.text() - - # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179 - pep_header = HeaderParser().parse(StringIO(pep_content)) - - # Assemble the embed - pep_embed = Embed( - title=f"**PEP {pep_number} - {pep_header['Title']}**", - description=f"[Link]({self.base_pep_url}{pep_number:04})", - ) - - pep_embed.set_thumbnail(url="https://www.python.org/static/opengraph-icon-200x200.png") - - # Add the interesting information - fields_to_check = ("Status", "Python-Version", "Created", "Type") - for field in fields_to_check: - # Check for a PEP metadata field that is present but has an empty value - # embed field values can't contain an empty string - if pep_header.get(field, ""): - pep_embed.add_field(name=field, value=pep_header[field]) - - elif response.status != 404: - # any response except 200 and 404 is expected - found_pep = True # actually not, but it's easier to display this way - log.trace(f"The user requested PEP {pep_number}, but the response had an unexpected status code: " - f"{response.status}.\n{response.text}") - - error_message = "Unexpected HTTP error during PEP search. Please let us know." - pep_embed = Embed(title="Unexpected error", description=error_message) - pep_embed.colour = Colour.red() - break - - if not found_pep: - log.trace("PEP was not found") - not_found = f"PEP {pep_number} does not exist." - pep_embed = Embed(title="PEP not found", description=not_found) - pep_embed.colour = Colour.red() - - await ctx.message.channel.send(embed=pep_embed) - - @command() - @in_channel(Channels.bot, bypass_roles=STAFF_ROLES) - async def charinfo(self, ctx: Context, *, characters: str) -> None: - """Shows you information on up to 25 unicode characters.""" - match = re.match(r"<(a?):(\w+):(\d+)>", characters) - if match: - embed = Embed( - title="Non-Character Detected", - description=( - "Only unicode characters can be processed, but a custom Discord emoji " - "was found. Please remove it and try again." - ) - ) - embed.colour = Colour.red() - await ctx.send(embed=embed) - return - - if len(characters) > 25: - embed = Embed(title=f"Too many characters ({len(characters)}/25)") - embed.colour = Colour.red() - await ctx.send(embed=embed) - return - - def get_info(char: str) -> Tuple[str, str]: - digit = f"{ord(char):x}" - if len(digit) <= 4: - u_code = f"\\u{digit:>04}" - else: - u_code = f"\\U{digit:>08}" - url = f"https://www.compart.com/en/unicode/U+{digit:>04}" - name = f"[{unicodedata.name(char, '')}]({url})" - info = f"`{u_code.ljust(10)}`: {name} - {char}" - return info, u_code - - charlist, rawlist = zip(*(get_info(c) for c in characters)) - - embed = Embed(description="\n".join(charlist)) - embed.set_author(name="Character Info") - - if len(characters) > 1: - embed.add_field(name='Raw', value=f"`{''.join(rawlist)}`", inline=False) - - await ctx.send(embed=embed) - - @command() - @with_role(*MODERATION_ROLES) - async def mention(self, ctx: Context, *, role: Role) -> None: - """Set a role to be mentionable for a limited time.""" - if role.mentionable: - await ctx.send(f"{role} is already mentionable!") - return - - await role.edit(reason=f"Role unlocked by {ctx.author}", mentionable=True) - - human_time = humanize_delta(relativedelta.relativedelta(seconds=Mention.message_timeout)) - await ctx.send( - f"{role} has been made mentionable. I will reset it in {human_time}, or when someone mentions this role." - ) - - def check(m: Message) -> bool: - """Checks that the message contains the role mention.""" - return role in m.role_mentions - - try: - msg = await self.bot.wait_for("message", check=check, timeout=Mention.message_timeout) - except TimeoutError: - await role.edit(mentionable=False, reason="Automatic role lock - timeout.") - await ctx.send(f"{ctx.author.mention}, you took too long. I have reset {role} to be unmentionable.") - return - - if any(r.id in MODERATION_ROLES for r in msg.author.roles): - await sleep(Mention.reset_delay) - await role.edit(mentionable=False, reason=f"Automatic role lock by {msg.author}") - await ctx.send( - f"{ctx.author.mention}, I have reset {role} to be unmentionable as " - f"{msg.author if msg.author != ctx.author else 'you'} sent a message mentioning it." - ) - return - - await role.edit(mentionable=False, reason=f"Automatic role lock - unauthorised use by {msg.author}") - await ctx.send( - f"{ctx.author.mention}, I have reset {role} to be unmentionable " - f"as I detected unauthorised use by {msg.author} (ID: {msg.author.id})." - ) - - -def setup(bot: Bot) -> None: - """Load the Utils cog.""" - bot.add_cog(Utils(bot)) diff --git a/bot/cogs/verification.py b/bot/cogs/verification.py deleted file mode 100644 index 988e0d49a..000000000 --- a/bot/cogs/verification.py +++ /dev/null @@ -1,235 +0,0 @@ -import logging -from datetime import datetime - -from discord import Colour, Message, NotFound, Object -from discord.ext import tasks -from discord.ext.commands import Cog, Context, command - -from bot.bot import Bot -from bot.cogs.moderation import ModLog -from bot.constants import ( - Bot as BotConfig, - Channels, Colours, Event, - Filter, Icons, MODERATION_ROLES, Roles -) -from bot.decorators import InChannelCheckFailure, in_channel, without_role -from bot.utils.checks import without_role_check - -log = logging.getLogger(__name__) - -WELCOME_MESSAGE = f""" -Hello! Welcome to the server, and thanks for verifying yourself! - -For your records, these are the documents you accepted: - -`1)` Our rules, here: <https://pythondiscord.com/pages/rules> -`2)` Our privacy policy, here: <https://pythondiscord.com/pages/privacy> - you can find information on how to have \ -your information removed here as well. - -Feel free to review them at any point! - -Additionally, if you'd like to receive notifications for the announcements we post in <#{Channels.announcements}> \ -from time to time, you can send `!subscribe` to <#{Channels.bot}> at any time to assign yourself the \ -**Announcements** role. We'll mention this role every time we make an announcement. - -If you'd like to unsubscribe from the announcement notifications, simply send `!unsubscribe` to <#{Channels.bot}>. -""" - -PERIODIC_PING = ( - f"@everyone To verify that you have read our rules, please type `{BotConfig.prefix}accept`." - f" If you encounter any problems during the verification process, ping the <@&{Roles.admin}> role in this channel." -) -BOT_MESSAGE_DELETE_DELAY = 10 - - -class Verification(Cog): - """User verification and role self-management.""" - - def __init__(self, bot: Bot): - self.bot = bot - self.periodic_ping.start() - - @property - def mod_log(self) -> ModLog: - """Get currently loaded ModLog cog instance.""" - return self.bot.get_cog("ModLog") - - @Cog.listener() - async def on_message(self, message: Message) -> None: - """Check new message event for messages to the checkpoint channel & process.""" - if message.channel.id != Channels.verification: - return # Only listen for #checkpoint messages - - if message.author.bot: - # They're a bot, delete their message after the delay. - # But not the periodic ping; we like that one. - if message.content != PERIODIC_PING: - await message.delete(delay=BOT_MESSAGE_DELETE_DELAY) - return - - # if a user mentions a role or guild member - # alert the mods in mod-alerts channel - if message.mentions or message.role_mentions: - log.debug( - f"{message.author} mentioned one or more users " - f"and/or roles in {message.channel.name}" - ) - - embed_text = ( - f"{message.author.mention} sent a message in " - f"{message.channel.mention} that contained user and/or role mentions." - f"\n\n**Original message:**\n>>> {message.content}" - ) - - # Send pretty mod log embed to mod-alerts - await self.mod_log.send_log_message( - icon_url=Icons.filtering, - colour=Colour(Colours.soft_red), - title=f"User/Role mentioned in {message.channel.name}", - text=embed_text, - thumbnail=message.author.avatar_url_as(static_format="png"), - channel_id=Channels.mod_alerts, - ping_everyone=Filter.ping_everyone, - ) - - ctx = await self.bot.get_context(message) # type: Context - - if ctx.command is not None and ctx.command.name == "accept": - return # They used the accept command - - for role in ctx.author.roles: - if role.id == Roles.verified: - log.warning(f"{ctx.author} posted '{ctx.message.content}' " - "in the verification channel, but is already verified.") - return # They're already verified - - log.debug(f"{ctx.author} posted '{ctx.message.content}' in the verification " - "channel. We are providing instructions how to verify.") - await ctx.send( - f"{ctx.author.mention} Please type `!accept` to verify that you accept our rules, " - f"and gain access to the rest of the server.", - delete_after=20 - ) - - log.trace(f"Deleting the message posted by {ctx.author}") - - try: - await ctx.message.delete() - except NotFound: - log.trace("No message found, it must have been deleted by another bot.") - - @command(name='accept', aliases=('verify', 'verified', 'accepted'), hidden=True) - @without_role(Roles.verified) - @in_channel(Channels.verification) - async def accept_command(self, ctx: Context, *_) -> None: # We don't actually care about the args - """Accept our rules and gain access to the rest of the server.""" - log.debug(f"{ctx.author} called !accept. Assigning the 'Developer' role.") - await ctx.author.add_roles(Object(Roles.verified), reason="Accepted the rules") - try: - await ctx.author.send(WELCOME_MESSAGE) - except Exception: - # Catch the exception, in case they have DMs off or something - log.exception(f"Unable to send welcome message to user {ctx.author}.") - - log.trace(f"Deleting the message posted by {ctx.author}.") - - try: - self.mod_log.ignore(Event.message_delete, ctx.message.id) - await ctx.message.delete() - except NotFound: - log.trace("No message found, it must have been deleted by another bot.") - - @command(name='subscribe') - @in_channel(Channels.bot) - async def subscribe_command(self, ctx: Context, *_) -> None: # We don't actually care about the args - """Subscribe to announcement notifications by assigning yourself the role.""" - has_role = False - - for role in ctx.author.roles: - if role.id == Roles.announcements: - has_role = True - break - - if has_role: - await ctx.send(f"{ctx.author.mention} You're already subscribed!") - return - - log.debug(f"{ctx.author} called !subscribe. Assigning the 'Announcements' role.") - await ctx.author.add_roles(Object(Roles.announcements), reason="Subscribed to announcements") - - log.trace(f"Deleting the message posted by {ctx.author}.") - - await ctx.send( - f"{ctx.author.mention} Subscribed to <#{Channels.announcements}> notifications.", - ) - - @command(name='unsubscribe') - @in_channel(Channels.bot) - async def unsubscribe_command(self, ctx: Context, *_) -> None: # We don't actually care about the args - """Unsubscribe from announcement notifications by removing the role from yourself.""" - has_role = False - - for role in ctx.author.roles: - if role.id == Roles.announcements: - has_role = True - break - - if not has_role: - await ctx.send(f"{ctx.author.mention} You're already unsubscribed!") - return - - log.debug(f"{ctx.author} called !unsubscribe. Removing the 'Announcements' role.") - await ctx.author.remove_roles(Object(Roles.announcements), reason="Unsubscribed from announcements") - - log.trace(f"Deleting the message posted by {ctx.author}.") - - await ctx.send( - f"{ctx.author.mention} Unsubscribed from <#{Channels.announcements}> notifications." - ) - - # This cannot be static (must have a __func__ attribute). - async def cog_command_error(self, ctx: Context, error: Exception) -> None: - """Check for & ignore any InChannelCheckFailure.""" - if isinstance(error, InChannelCheckFailure): - error.handled = True - - @staticmethod - def bot_check(ctx: Context) -> bool: - """Block any command within the verification channel that is not !accept.""" - if ctx.channel.id == Channels.verification and without_role_check(ctx, *MODERATION_ROLES): - return ctx.command.name == "accept" - else: - return True - - @tasks.loop(hours=12) - async def periodic_ping(self) -> None: - """Every week, mention @everyone to remind them to verify.""" - messages = self.bot.get_channel(Channels.verification).history(limit=10) - need_to_post = True # True if a new message needs to be sent. - - async for message in messages: - if message.author == self.bot.user and message.content == PERIODIC_PING: - delta = datetime.utcnow() - message.created_at # Time since last message. - if delta.days >= 7: # Message is older than a week. - await message.delete() - else: - need_to_post = False - - break - - if need_to_post: - await self.bot.get_channel(Channels.verification).send(PERIODIC_PING) - - @periodic_ping.before_loop - async def before_ping(self) -> None: - """Only start the loop when the bot is ready.""" - await self.bot.wait_until_ready() - - def cog_unload(self) -> None: - """Cancel the periodic ping task when the cog is unloaded.""" - self.periodic_ping.cancel() - - -def setup(bot: Bot) -> None: - """Load the Verification cog.""" - bot.add_cog(Verification(bot)) diff --git a/bot/cogs/watchchannels/__init__.py b/bot/cogs/watchchannels/__init__.py deleted file mode 100644 index 69d118df6..000000000 --- a/bot/cogs/watchchannels/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from bot.bot import Bot -from .bigbrother import BigBrother -from .talentpool import TalentPool - - -def setup(bot: Bot) -> None: - """Load the BigBrother and TalentPool cogs.""" - bot.add_cog(BigBrother(bot)) - bot.add_cog(TalentPool(bot)) diff --git a/bot/cogs/wolfram.py b/bot/cogs/wolfram.py deleted file mode 100644 index 5d6b4630b..000000000 --- a/bot/cogs/wolfram.py +++ /dev/null @@ -1,272 +0,0 @@ -import logging -from io import BytesIO -from typing import Callable, List, Optional, Tuple -from urllib import parse - -import discord -from dateutil.relativedelta import relativedelta -from discord import Embed -from discord.ext import commands -from discord.ext.commands import BucketType, Cog, Context, check, group - -from bot.bot import Bot -from bot.constants import Colours, STAFF_ROLES, Wolfram -from bot.pagination import ImagePaginator -from bot.utils.time import humanize_delta - -log = logging.getLogger(__name__) - -APPID = Wolfram.key -DEFAULT_OUTPUT_FORMAT = "JSON" -QUERY = "http://api.wolframalpha.com/v2/{request}?{data}" -WOLF_IMAGE = "https://www.symbols.com/gi.php?type=1&id=2886&i=1" - -MAX_PODS = 20 - -# Allows for 10 wolfram calls pr user pr day -usercd = commands.CooldownMapping.from_cooldown(Wolfram.user_limit_day, 60*60*24, BucketType.user) - -# Allows for max api requests / days in month per day for the entire guild (Temporary) -guildcd = commands.CooldownMapping.from_cooldown(Wolfram.guild_limit_day, 60*60*24, BucketType.guild) - - -async def send_embed( - ctx: Context, - message_txt: str, - colour: int = Colours.soft_red, - footer: str = None, - img_url: str = None, - f: discord.File = None -) -> None: - """Generate & send a response embed with Wolfram as the author.""" - embed = Embed(colour=colour) - embed.description = message_txt - embed.set_author(name="Wolfram Alpha", - icon_url=WOLF_IMAGE, - url="https://www.wolframalpha.com/") - if footer: - embed.set_footer(text=footer) - - if img_url: - embed.set_image(url=img_url) - - await ctx.send(embed=embed, file=f) - - -def custom_cooldown(*ignore: List[int]) -> Callable: - """ - Implement per-user and per-guild cooldowns for requests to the Wolfram API. - - A list of roles may be provided to ignore the per-user cooldown - """ - async def predicate(ctx: Context) -> bool: - user_bucket = usercd.get_bucket(ctx.message) - - if all(role.id not in ignore for role in ctx.author.roles): - user_rate = user_bucket.update_rate_limit() - - if user_rate: - # Can't use api; cause: member limit - delta = relativedelta(seconds=int(user_rate)) - cooldown = humanize_delta(delta) - message = ( - "You've used up your limit for Wolfram|Alpha requests.\n" - f"Cooldown: {cooldown}" - ) - await send_embed(ctx, message) - return False - - guild_bucket = guildcd.get_bucket(ctx.message) - guild_rate = guild_bucket.update_rate_limit() - - # Repr has a token attribute to read requests left - log.debug(guild_bucket) - - if guild_rate: - # Can't use api; cause: guild limit - message = ( - "The max limit of requests for the server has been reached for today.\n" - f"Cooldown: {int(guild_rate)}" - ) - await send_embed(ctx, message) - return False - - return True - return check(predicate) - - -async def get_pod_pages(ctx: Context, bot: Bot, query: str) -> Optional[List[Tuple]]: - """Get the Wolfram API pod pages for the provided query.""" - async with ctx.channel.typing(): - url_str = parse.urlencode({ - "input": query, - "appid": APPID, - "output": DEFAULT_OUTPUT_FORMAT, - "format": "image,plaintext" - }) - request_url = QUERY.format(request="query", data=url_str) - - async with bot.http_session.get(request_url) as response: - json = await response.json(content_type='text/plain') - - result = json["queryresult"] - - if result["error"]: - # API key not set up correctly - if result["error"]["msg"] == "Invalid appid": - message = "Wolfram API key is invalid or missing." - log.warning( - "API key seems to be missing, or invalid when " - f"processing a wolfram request: {url_str}, Response: {json}" - ) - await send_embed(ctx, message) - return - - message = "Something went wrong internally with your request, please notify staff!" - log.warning(f"Something went wrong getting a response from wolfram: {url_str}, Response: {json}") - await send_embed(ctx, message) - return - - if not result["success"]: - message = f"I couldn't find anything for {query}." - await send_embed(ctx, message) - return - - if not result["numpods"]: - message = "Could not find any results." - await send_embed(ctx, message) - return - - pods = result["pods"] - pages = [] - for pod in pods[:MAX_PODS]: - subs = pod.get("subpods") - - for sub in subs: - title = sub.get("title") or sub.get("plaintext") or sub.get("id", "") - img = sub["img"]["src"] - pages.append((title, img)) - return pages - - -class Wolfram(Cog): - """Commands for interacting with the Wolfram|Alpha API.""" - - def __init__(self, bot: Bot): - self.bot = bot - - @group(name="wolfram", aliases=("wolf", "wa"), invoke_without_command=True) - @custom_cooldown(*STAFF_ROLES) - async def wolfram_command(self, ctx: Context, *, query: str) -> None: - """Requests all answers on a single image, sends an image of all related pods.""" - url_str = parse.urlencode({ - "i": query, - "appid": APPID, - }) - query = QUERY.format(request="simple", data=url_str) - - # Give feedback that the bot is working. - async with ctx.channel.typing(): - async with self.bot.http_session.get(query) as response: - status = response.status - image_bytes = await response.read() - - f = discord.File(BytesIO(image_bytes), filename="image.png") - image_url = "attachment://image.png" - - if status == 501: - message = "Failed to get response" - footer = "" - color = Colours.soft_red - elif status == 400: - message = "No input found" - footer = "" - color = Colours.soft_red - elif status == 403: - message = "Wolfram API key is invalid or missing." - footer = "" - color = Colours.soft_red - else: - message = "" - footer = "View original for a bigger picture." - color = Colours.soft_orange - - # Sends a "blank" embed if no request is received, unsure how to fix - await send_embed(ctx, message, color, footer=footer, img_url=image_url, f=f) - - @wolfram_command.command(name="page", aliases=("pa", "p")) - @custom_cooldown(*STAFF_ROLES) - async def wolfram_page_command(self, ctx: Context, *, query: str) -> None: - """ - Requests a drawn image of given query. - - Keywords worth noting are, "like curve", "curve", "graph", "pokemon", etc. - """ - pages = await get_pod_pages(ctx, self.bot, query) - - if not pages: - return - - embed = Embed() - embed.set_author(name="Wolfram Alpha", - icon_url=WOLF_IMAGE, - url="https://www.wolframalpha.com/") - embed.colour = Colours.soft_orange - - await ImagePaginator.paginate(pages, ctx, embed) - - @wolfram_command.command(name="cut", aliases=("c",)) - @custom_cooldown(*STAFF_ROLES) - async def wolfram_cut_command(self, ctx: Context, *, query: str) -> None: - """ - Requests a drawn image of given query. - - Keywords worth noting are, "like curve", "curve", "graph", "pokemon", etc. - """ - pages = await get_pod_pages(ctx, self.bot, query) - - if not pages: - return - - if len(pages) >= 2: - page = pages[1] - else: - page = pages[0] - - await send_embed(ctx, page[0], colour=Colours.soft_orange, img_url=page[1]) - - @wolfram_command.command(name="short", aliases=("sh", "s")) - @custom_cooldown(*STAFF_ROLES) - async def wolfram_short_command(self, ctx: Context, *, query: str) -> None: - """Requests an answer to a simple question.""" - url_str = parse.urlencode({ - "i": query, - "appid": APPID, - }) - query = QUERY.format(request="result", data=url_str) - - # Give feedback that the bot is working. - async with ctx.channel.typing(): - async with self.bot.http_session.get(query) as response: - status = response.status - response_text = await response.text() - - if status == 501: - message = "Failed to get response" - color = Colours.soft_red - elif status == 400: - message = "No input found" - color = Colours.soft_red - elif response_text == "Error 1: Invalid appid": - message = "Wolfram API key is invalid or missing." - color = Colours.soft_red - else: - message = response_text - color = Colours.soft_orange - - await send_embed(ctx, message, color) - - -def setup(bot: Bot) -> None: - """Load the Wolfram cog.""" - bot.add_cog(Wolfram(bot)) diff --git a/bot/command.py b/bot/command.py new file mode 100644 index 000000000..0fb900f7b --- /dev/null +++ b/bot/command.py @@ -0,0 +1,18 @@ +from discord.ext import commands + + +class Command(commands.Command): + """ + A `discord.ext.commands.Command` subclass which supports root aliases. + + A `root_aliases` keyword argument is added, which is a sequence of alias names that will act as + top-level commands rather than being aliases of the command's group. It's stored as an attribute + also named `root_aliases`. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.root_aliases = kwargs.get("root_aliases", []) + + if not isinstance(self.root_aliases, (list, tuple)): + raise TypeError("Root aliases of a command must be a list or a tuple of strings.") diff --git a/bot/constants.py b/bot/constants.py index a4c65a1f8..6bfda160b 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -15,7 +15,7 @@ import os from collections.abc import Mapping from enum import Enum from pathlib import Path -from typing import Dict, List +from typing import Dict, List, Optional import yaml @@ -186,6 +186,11 @@ class YAMLGetter(type): def __getitem__(cls, name): return cls.__getattr__(name) + def __iter__(cls): + """Return generator of key: value pairs of current constants class' config values.""" + for name in cls.__annotations__: + yield name, getattr(cls, name) + # Dataclasses class Bot(metaclass=YAMLGetter): @@ -193,7 +198,18 @@ class Bot(metaclass=YAMLGetter): prefix: str token: str - sentry_dsn: str + sentry_dsn: Optional[str] + + +class Redis(metaclass=YAMLGetter): + section = "bot" + subsection = "redis" + + host: str + port: int + password: Optional[str] + use_fakeredis: bool # If this is True, Bot will use fakeredis.aioredis + class Filter(metaclass=YAMLGetter): section = "filter" @@ -201,20 +217,18 @@ class Filter(metaclass=YAMLGetter): filter_zalgo: bool filter_invites: bool filter_domains: bool + filter_everyone_ping: bool + watch_regex: bool watch_rich_embeds: bool - watch_words: bool - watch_tokens: bool # Notifications are not expected for "watchlist" type filters notify_user_zalgo: bool notify_user_invites: bool notify_user_domains: bool + notify_user_everyone_ping: bool ping_everyone: bool - guild_invite_whitelist: List[int] - domain_blacklist: List[str] - word_watchlist: List[str] - token_watchlist: List[str] + offensive_msg_delete_days: int channel_whitelist: List[int] role_whitelist: List[int] @@ -234,13 +248,14 @@ class Colours(metaclass=YAMLGetter): soft_red: int soft_green: int soft_orange: int + bright_green: int class DuckPond(metaclass=YAMLGetter): section = "duck_pond" threshold: int - custom_emojis: List[int] + channel_blacklist: List[int] class Emojis(metaclass=YAMLGetter): @@ -256,6 +271,21 @@ class Emojis(metaclass=YAMLGetter): status_idle: str status_dnd: str + badge_staff: str + badge_partner: str + badge_hypesquad: str + badge_bug_hunter: str + badge_hypesquad_bravery: str + badge_hypesquad_brilliance: str + badge_hypesquad_balance: str + badge_early_supporter: str + badge_bug_hunter_level_2: str + badge_verified_bot_developer: str + + incident_actioned: str + incident_unactioned: str + incident_investigating: str + failmail: str trashcan: str @@ -263,20 +293,7 @@ class Emojis(metaclass=YAMLGetter): new: str pencil: str cross_mark: str - - ducky_yellow: int - ducky_blurple: int - ducky_regal: int - ducky_camo: int - ducky_ninja: int - ducky_devil: int - ducky_tube: int - ducky_hunt: int - ducky_wizard: int - ducky_party: int - ducky_angel: int - ducky_maul: int - ducky_santa: int + check_mark: str upvotes: str comments: str @@ -338,6 +355,8 @@ class Icons(metaclass=YAMLGetter): voice_state_green: str voice_state_red: str + green_checkmark: str + class CleanMessages(metaclass=YAMLGetter): section = "bot" @@ -346,53 +365,78 @@ class CleanMessages(metaclass=YAMLGetter): message_limit: int +class Stats(metaclass=YAMLGetter): + section = "bot" + subsection = "stats" + + presence_update_timeout: int + statsd_host: str + + class Categories(metaclass=YAMLGetter): section = "guild" subsection = "categories" - python_help: int + help_available: int + help_in_use: int + help_dormant: int + modmail: int + voice: int class Channels(metaclass=YAMLGetter): section = "guild" subsection = "channels" - admins: int + admin_announcements: int admin_spam: int + admins: int + admins_voice: int announcements: int attachment_log: int big_brother_logs: int - bot: int - checkpoint_test: int + bot_commands: int + change_log: int + code_help_chat_1: int + code_help_chat_2: int + code_help_voice_1: int + code_help_voice_2: int + cooldown: int defcon: int - devlog: int - devtest: int + dev_contrib: int + dev_core: int + dev_log: int + dm_log: int esoteric: int - help_0: int - help_1: int - help_2: int - help_3: int - help_4: int - help_5: int - help_6: int - help_7: int + general_voice: int helpers: int + incidents: int + incidents_archive: int + mailing_lists: int message_log: int meta: int + mod_alerts: int + mod_announcements: int + mod_log: int mod_spam: int mods: int - mod_alerts: int - modlog: int off_topic_0: int off_topic_1: int off_topic_2: int organisation: int - python: int + python_discussion: int + python_events: int + python_news: int reddit: int + staff_announcements: int + staff_voice: int + staff_voice_chat: int talent_pool: int - userlog: int - user_event_a: int + user_event_announcements: int + user_log: int verification: int + voice_chat: int + voice_gate: int voice_log: int @@ -400,44 +444,56 @@ class Webhooks(metaclass=YAMLGetter): section = "guild" subsection = "webhooks" - talent_pool: int big_brother: int - reddit: int + dev_log: int + dm_log: int duck_pond: int + incidents_archive: int + reddit: int + talent_pool: int class Roles(metaclass=YAMLGetter): section = "guild" subsection = "roles" - admin: int + admins: int announcements: int - champion: int - contributor: int - core_developer: int + contributors: int + core_developers: int + help_cooldown: int helpers: int - jammer: int - moderator: int + jammers: int + moderators: int muted: int - owner: int + owners: int partners: int - rockstars: int - team_leader: int + python_community: int + sprinters: int + team_leaders: int + unverified: int verified: int # This is the Developers role on PyDis, here named verified for readability reasons. + voice_verified: int class Guild(metaclass=YAMLGetter): section = "guild" id: int - ignored: List[int] - staff_channels: List[int] + invite: str # Discord invite, gets embedded in chat + moderation_channels: List[int] + moderation_categories: List[int] + moderation_roles: List[int] + modlog_blacklist: List[int] + reminder_whitelist: List[int] + staff_roles: List[int] class Keys(metaclass=YAMLGetter): section = "keys" - site_api: str + site_api: Optional[str] + github: Optional[str] class URLs(metaclass=YAMLGetter): @@ -454,25 +510,13 @@ class URLs(metaclass=YAMLGetter): bot_avatar: str github_bot_repo: str - # Site endpoints + # Base site vars site: str site_api: str - site_superstarify_api: str - site_logs_api: str - site_logs_view: str - site_reminders_api: str - site_reminders_user_api: str site_schema: str - site_settings_api: str - site_tags_api: str - site_user_api: str - site_user_complete_api: str - site_infractions: str - site_infractions_user: str - site_infractions_type: str - site_infractions_by_id: str - site_infractions_user_type_current: str - site_infractions_user_type: str + + # Site endpoints + site_logs_view: str paste_service: str @@ -480,16 +524,8 @@ class Reddit(metaclass=YAMLGetter): section = "reddit" subreddits: list - client_id: str - secret: str - - -class Wolfram(metaclass=YAMLGetter): - section = "wolfram" - - user_limit_day: int - guild_limit_day: int - key: str + client_id: Optional[str] + secret: Optional[str] class AntiSpam(metaclass=YAMLGetter): @@ -502,12 +538,6 @@ class AntiSpam(metaclass=YAMLGetter): rules: Dict[str, Dict[str, int]] -class AntiMalware(metaclass=YAMLGetter): - section = "anti_malware" - - whitelist: list - - class BigBrother(metaclass=YAMLGetter): section = 'big_brother' @@ -515,6 +545,15 @@ class BigBrother(metaclass=YAMLGetter): header_message_limit: int +class CodeBlock(metaclass=YAMLGetter): + section = 'code_block' + + channel_whitelist: List[int] + cooldown_channels: List[int] + cooldown_seconds: int + minimum_lines: int + + class Free(metaclass=YAMLGetter): section = 'free' @@ -523,11 +562,21 @@ class Free(metaclass=YAMLGetter): cooldown_per: float -class Mention(metaclass=YAMLGetter): - section = 'mention' +class HelpChannels(metaclass=YAMLGetter): + section = 'help_channels' - message_timeout: int - reset_delay: int + enable: bool + claim_minutes: int + cmd_whitelist: List[int] + idle_minutes: int + deleted_idle_minutes: int + max_available: int + max_total_channels: int + name_prefix: str + notify: bool + notify_channel: int + notify_minutes: int + notify_roles: List[int] class RedirectOutput(metaclass=YAMLGetter): @@ -537,6 +586,34 @@ class RedirectOutput(metaclass=YAMLGetter): delete_delay: int +class PythonNews(metaclass=YAMLGetter): + section = 'python_news' + + mail_lists: List[str] + channel: int + webhook: int + + +class Verification(metaclass=YAMLGetter): + section = "verification" + + unverified_after: int + kicked_after: int + reminder_frequency: int + bot_message_delete_delay: int + kick_confirmation_threshold: float + + +class VoiceGate(metaclass=YAMLGetter): + section = "voice_gate" + + minimum_days_member: int + minimum_messages: int + bot_message_delete_delay: int + minimum_activity_blocks: int + voice_ping_delete_delay: int + + class Event(Enum): """ Event names. This does not include every event (for example, raw @@ -564,22 +641,24 @@ class Event(Enum): # Debug mode -DEBUG_MODE = True if 'local' in os.environ.get("SITE_URL", "local") else False +DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") # Paths BOT_DIR = os.path.dirname(__file__) PROJECT_ROOT = os.path.abspath(os.path.join(BOT_DIR, os.pardir)) # Default role combinations -MODERATION_ROLES = Roles.moderator, Roles.admin, Roles.owner -STAFF_ROLES = Roles.helpers, Roles.moderator, Roles.admin, Roles.owner +MODERATION_ROLES = Guild.moderation_roles +STAFF_ROLES = Guild.staff_roles -# Roles combinations -STAFF_CHANNELS = Guild.staff_channels +# Channel combinations +MODERATION_CHANNELS = Guild.moderation_channels -# Default Channel combinations -MODERATION_CHANNELS = Channels.admins, Channels.admin_spam, Channels.mod_alerts, Channels.mods, Channels.mod_spam +# Category combinations +MODERATION_CATEGORIES = Guild.moderation_categories +# Git SHA for Sentry +GIT_SHA = os.environ.get("GIT_SHA", "development") # Bot replies NEGATIVE_REPLIES = [ diff --git a/bot/converters.py b/bot/converters.py index cca57a02d..d0a9731d6 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -2,6 +2,7 @@ import logging import re import typing as t from datetime import datetime +from functools import partial from ssl import CertificateError import dateutil.parser @@ -9,11 +10,18 @@ import dateutil.tz import discord from aiohttp import ClientConnectorError from dateutil.relativedelta import relativedelta -from discord.ext.commands import BadArgument, Context, Converter, UserConverter +from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, UserConverter +from discord.utils import DISCORD_EPOCH, snowflake_time +from bot.api import ResponseCodeError +from bot.constants import URLs +from bot.utils.regex import INVITE_RE log = logging.getLogger(__name__) +DISCORD_EPOCH_DT = datetime.utcfromtimestamp(DISCORD_EPOCH / 1000) +RE_USER_MENTION = re.compile(r"<@!?([0-9]+)>$") + def allowed_strings(*values, preserve_case: bool = False) -> t.Callable[[str], str]: """ @@ -34,6 +42,90 @@ def allowed_strings(*values, preserve_case: bool = False) -> t.Callable[[str], s return converter +class ValidDiscordServerInvite(Converter): + """ + A converter that validates whether a given string is a valid Discord server invite. + + Raises 'BadArgument' if: + - The string is not a valid Discord server invite. + - The string is valid, but is an invite for a group DM. + - The string is valid, but is expired. + + Returns a (partial) guild object if: + - The string is a valid vanity + - The string is a full invite URI + - The string contains the invite code (the stuff after discord.gg/) + + See the Discord API docs for documentation on the guild object: + https://discord.com/developers/docs/resources/guild#guild-object + """ + + async def convert(self, ctx: Context, server_invite: str) -> dict: + """Check whether the string is a valid Discord server invite.""" + invite_code = INVITE_RE.search(server_invite) + if invite_code: + response = await ctx.bot.http_session.get( + f"{URLs.discord_invite_api}/{invite_code[1]}" + ) + if response.status != 404: + invite_data = await response.json() + return invite_data.get("guild") + + id_converter = IDConverter() + if id_converter._get_id_match(server_invite): + raise BadArgument("Guild IDs are not supported, only invites.") + + raise BadArgument("This does not appear to be a valid Discord server invite.") + + +class ValidFilterListType(Converter): + """ + A converter that checks whether the given string is a valid FilterList type. + + Raises `BadArgument` if the argument is not a valid FilterList type, and simply + passes through the given argument otherwise. + """ + + @staticmethod + async def get_valid_types(bot: Bot) -> list: + """ + Try to get a list of valid filter list types. + + Raise a BadArgument if the API can't respond. + """ + try: + valid_types = await bot.api_client.get('bot/filter-lists/get-types') + except ResponseCodeError: + raise BadArgument("Cannot validate list_type: Unable to fetch valid types from API.") + + return [enum for enum, classname in valid_types] + + async def convert(self, ctx: Context, list_type: str) -> str: + """Checks whether the given string is a valid FilterList type.""" + valid_types = await self.get_valid_types(ctx.bot) + list_type = list_type.upper() + + if list_type not in valid_types: + + # Maybe the user is using the plural form of this type, + # e.g. "guild_invites" instead of "guild_invite". + # + # This code will support the simple plural form (a single 's' at the end), + # which works for all current list types, but if a list type is added in the future + # which has an irregular plural form (like 'ies'), this code will need to be + # refactored to support this. + if list_type.endswith("S") and list_type[:-1] in valid_types: + list_type = list_type[:-1] + + else: + valid_types_list = '\n'.join([f"• {type_.lower()}" for type_ in valid_types]) + raise BadArgument( + f"You have provided an invalid list type!\n\n" + f"Please provide one of the following: \n{valid_types_list}" + ) + return list_type + + class ValidPythonIdentifier(Converter): """ A converter that checks whether the given string is a valid Python identifier. @@ -85,17 +177,42 @@ class ValidURL(Converter): return url -class InfractionSearchQuery(Converter): - """A converter that checks if the argument is a Discord user, and if not, falls back to a string.""" +class Snowflake(IDConverter): + """ + Converts to an int if the argument is a valid Discord snowflake. + + A snowflake is valid if: + + * It consists of 15-21 digits (0-9) + * Its parsed datetime is after the Discord epoch + * Its parsed datetime is less than 1 day after the current time + """ + + async def convert(self, ctx: Context, arg: str) -> int: + """ + Ensure `arg` matches the ID pattern and its timestamp is in range. + + Return `arg` as an int if it's a valid snowflake. + """ + error = f"Invalid snowflake {arg!r}" + + if not self._get_id_match(arg): + raise BadArgument(error) + + snowflake = int(arg) - @staticmethod - async def convert(ctx: Context, arg: str) -> t.Union[discord.Member, str]: - """Check if the argument is a Discord user, and if not, falls back to a string.""" try: - maybe_snowflake = arg.strip("<@!>") - return await ctx.bot.fetch_user(maybe_snowflake) - except (discord.NotFound, discord.HTTPException): - return arg + time = snowflake_time(snowflake) + except (OverflowError, OSError) as e: + # Not sure if this can ever even happen, but let's be safe. + raise BadArgument(f"{error}: {e}") + + if time < DISCORD_EPOCH_DT: + raise BadArgument(f"{error}: timestamp is before the Discord epoch.") + elif (datetime.utcnow() - time).days < -1: + raise BadArgument(f"{error}: timestamp is too far into the future.") + + return snowflake class Subreddit(Converter): @@ -141,40 +258,24 @@ class TagNameConverter(Converter): @staticmethod async def convert(ctx: Context, tag_name: str) -> str: """Lowercase & strip whitespace from proposed tag_name & ensure it's valid.""" - def is_number(value: str) -> bool: - """Check to see if the input string is numeric.""" - try: - float(value) - except ValueError: - return False - return True - tag_name = tag_name.lower().strip() # The tag name has at least one invalid character. if ascii(tag_name)[1:-1] != tag_name: - log.warning(f"{ctx.author} tried to put an invalid character in a tag name. " - "Rejecting the request.") raise BadArgument("Don't be ridiculous, you can't use that character!") # The tag name is either empty, or consists of nothing but whitespace. elif not tag_name: - log.warning(f"{ctx.author} tried to create a tag with a name consisting only of whitespace. " - "Rejecting the request.") raise BadArgument("Tag names should not be empty, or filled with whitespace.") - # The tag name is a number of some kind, we don't allow that. - elif is_number(tag_name): - log.warning(f"{ctx.author} tried to create a tag with a digit as its name. " - "Rejecting the request.") - raise BadArgument("Tag names can't be numbers.") - # The tag name is longer than 127 characters. elif len(tag_name) > 127: - log.warning(f"{ctx.author} tried to request a tag name with over 127 characters. " - "Rejecting the request.") raise BadArgument("Are you insane? That's way too long!") + # The tag name is ascii but does not contain any letters. + elif not any(character.isalpha() for character in tag_name): + raise BadArgument("Tag names must contain at least one letter.") + return tag_name @@ -192,15 +293,13 @@ class TagContentConverter(Converter): # The tag contents should not be empty, or filled with whitespace. if not tag_content: - log.warning(f"{ctx.author} tried to create a tag containing only whitespace. " - "Rejecting the request.") raise BadArgument("Tag contents should not be empty, or filled with whitespace.") return tag_content -class Duration(Converter): - """Convert duration strings into UTC datetime.datetime objects.""" +class DurationDelta(Converter): + """Convert duration strings into dateutil.relativedelta.relativedelta objects.""" duration_parser = re.compile( r"((?P<years>\d+?) ?(years|year|Y|y) ?)?" @@ -212,9 +311,9 @@ class Duration(Converter): r"((?P<seconds>\d+?) ?(seconds|second|S|s))?" ) - async def convert(self, ctx: Context, duration: str) -> datetime: + async def convert(self, ctx: Context, duration: str) -> relativedelta: """ - Converts a `duration` string to a datetime object that's `duration` in the future. + Converts a `duration` string to a relativedelta object. The converter supports the following symbols for each unit of time: - years: `Y`, `y`, `year`, `years` @@ -233,9 +332,52 @@ class Duration(Converter): duration_dict = {unit: int(amount) for unit, amount in match.groupdict(default=0).items()} delta = relativedelta(**duration_dict) + + return delta + + +class Duration(DurationDelta): + """Convert duration strings into UTC datetime.datetime objects.""" + + async def convert(self, ctx: Context, duration: str) -> datetime: + """ + Converts a `duration` string to a datetime object that's `duration` in the future. + + The converter supports the same symbols for each unit of time as its parent class. + """ + delta = await super().convert(ctx, duration) now = datetime.utcnow() - return now + delta + try: + return now + delta + except ValueError: + raise BadArgument(f"`{duration}` results in a datetime outside the supported range.") + + +class OffTopicName(Converter): + """A converter that ensures an added off-topic name is valid.""" + + async def convert(self, ctx: Context, argument: str) -> str: + """Attempt to replace any invalid characters with their approximate Unicode equivalent.""" + allowed_characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-" + + # Chain multiple words to a single one + argument = "-".join(argument.split()) + + if not (2 <= len(argument) <= 96): + raise BadArgument("Channel name must be between 2 and 96 chars long") + + elif not all(c.isalnum() or c in allowed_characters for c in argument): + raise BadArgument( + "Channel name must only consist of " + "alphanumeric characters, minus signs or apostrophes." + ) + + # Replace invalid characters with unicode alternatives. + table = str.maketrans( + allowed_characters, '𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-' + ) + return argument.translate(table) class ISODateTime(Converter): @@ -280,6 +422,34 @@ class ISODateTime(Converter): return dt +class HushDurationConverter(Converter): + """Convert passed duration to `int` minutes or `None`.""" + + MINUTES_RE = re.compile(r"(\d+)(?:M|m|$)") + + async def convert(self, ctx: Context, argument: str) -> t.Optional[int]: + """ + Convert `argument` to a duration that's max 15 minutes or None. + + If `"forever"` is passed, None is returned; otherwise an int of the extracted time. + Accepted formats are: + * <duration>, + * <duration>m, + * <duration>M, + * forever. + """ + if argument == "forever": + return None + match = self.MINUTES_RE.match(argument) + if not match: + raise BadArgument(f"{argument} is not a valid minutes duration.") + + duration = int(match.group(1)) + if duration > 15: + raise BadArgument("Duration must be at most 15 minutes.") + return duration + + def proxy_user(user_id: str) -> discord.Object: """ Create a proxy user object from the given id. @@ -303,6 +473,24 @@ def proxy_user(user_id: str) -> discord.Object: return user +class UserMentionOrID(UserConverter): + """ + Converts to a `discord.User`, but only if a mention or userID is provided. + + Unlike the default `UserConverter`, it doesn't allow conversion from a name or name#descrim. + This is useful in cases where that lookup strategy would lead to ambiguity. + """ + + async def convert(self, ctx: Context, argument: str) -> discord.User: + """Convert the `arg` to a `discord.User`.""" + match = self._get_id_match(argument) or RE_USER_MENTION.match(argument) + + if match is not None: + return await super().convert(ctx, argument) + else: + raise BadArgument(f"`{argument}` is not a User mention or a User ID.") + + class FetchedUser(UserConverter): """ Converts to a `discord.User` or, if it fails, a `discord.Object`. @@ -341,12 +529,55 @@ class FetchedUser(UserConverter): except discord.HTTPException as e: # If the Discord error isn't `Unknown user`, return a proxy instead if e.code != 10013: - log.warning(f"Failed to fetch user, returning a proxy instead: status {e.status}") + log.info(f"Failed to fetch user, returning a proxy instead: status {e.status}") return proxy_user(arg) log.debug(f"Failed to fetch user {arg}: user does not exist.") raise BadArgument(f"User `{arg}` does not exist") +def _snowflake_from_regex(pattern: t.Pattern, arg: str) -> int: + """ + Extract the snowflake from `arg` using a regex `pattern` and return it as an int. + + The snowflake is expected to be within the first capture group in `pattern`. + """ + match = pattern.match(arg) + if not match: + raise BadArgument(f"Mention {str!r} is invalid.") + + return int(match.group(1)) + + +class Infraction(Converter): + """ + Attempts to convert a given infraction ID into an infraction. + + Alternatively, `l`, `last`, or `recent` can be passed in order to + obtain the most recent infraction by the actor. + """ + + async def convert(self, ctx: Context, arg: str) -> t.Optional[dict]: + """Attempts to convert `arg` into an infraction `dict`.""" + if arg in ("l", "last", "recent"): + params = { + "actor__id": ctx.author.id, + "ordering": "-inserted_at" + } + + infractions = await ctx.bot.api_client.get("bot/infractions", params=params) + + if not infractions: + raise BadArgument( + "Couldn't find most recent infraction; you have never given an infraction." + ) + else: + return infractions[0] + + else: + return await ctx.bot.api_client.get(f"bot/infractions/{arg}") + + Expiry = t.Union[Duration, ISODateTime] FetchedMember = t.Union[discord.Member, FetchedUser] +UserMention = partial(_snowflake_from_regex, RE_USER_MENTION) diff --git a/bot/decorators.py b/bot/decorators.py index 2d18eaa6a..063c8f878 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -1,116 +1,68 @@ +import asyncio import logging -import random -from asyncio import Lock, sleep +import typing as t from contextlib import suppress from functools import wraps -from typing import Callable, Container, Union -from weakref import WeakValueDictionary -from discord import Colour, Embed, Member -from discord.errors import NotFound +from discord import Member, NotFound from discord.ext import commands -from discord.ext.commands import CheckFailure, Cog, Context +from discord.ext.commands import Cog, Context -from bot.constants import ERROR_REPLIES, RedirectOutput -from bot.utils.checks import with_role_check, without_role_check +from bot.constants import Channels, RedirectOutput +from bot.utils import function +from bot.utils.checks import in_whitelist_check log = logging.getLogger(__name__) -class InChannelCheckFailure(CheckFailure): - """Raised when a check fails for a message being sent in a whitelisted channel.""" - - def __init__(self, *channels: int): - self.channels = channels - channels_str = ', '.join(f"<#{c_id}>" for c_id in channels) +def in_whitelist( + *, + channels: t.Container[int] = (), + categories: t.Container[int] = (), + roles: t.Container[int] = (), + redirect: t.Optional[int] = Channels.bot_commands, + fail_silently: bool = False, +) -> t.Callable: + """ + Check if a command was issued in a whitelisted context. - super().__init__(f"Sorry, but you may only use this command within {channels_str}.") + The whitelists that can be provided are: + - `channels`: a container with channel ids for whitelisted channels + - `categories`: a container with category ids for whitelisted categories + - `roles`: a container with role ids for whitelisted roles -def in_channel( - *channels: int, - hidden_channels: Container[int] = None, - bypass_roles: Container[int] = None -) -> Callable: + If the command was invoked in a context that was not whitelisted, the member is either + redirected to the `redirect` channel that was passed (default: #bot-commands) or simply + told that they're not allowed to use this particular command (if `None` was passed). """ - Checks that the message is in a whitelisted channel or optionally has a bypass role. - - Hidden channels are channels which will not be displayed in the InChannelCheckFailure error - message. - """ - hidden_channels = hidden_channels or [] - bypass_roles = bypass_roles or [] - def predicate(ctx: Context) -> bool: - """In-channel checker predicate.""" - if ctx.channel.id in channels or ctx.channel.id in hidden_channels: - log.debug(f"{ctx.author} tried to call the '{ctx.command.name}' command. " - f"The command was used in a whitelisted channel.") - return True - - if bypass_roles: - if any(r.id in bypass_roles for r in ctx.author.roles): - log.debug(f"{ctx.author} tried to call the '{ctx.command.name}' command. " - f"The command was not used in a whitelisted channel, " - f"but the author had a role to bypass the in_channel check.") - return True - - log.debug(f"{ctx.author} tried to call the '{ctx.command.name}' command. " - f"The in_channel check failed.") - - raise InChannelCheckFailure(*channels) - - return commands.check(predicate) - - -def with_role(*role_ids: int) -> Callable: - """Returns True if the user has any one of the roles in role_ids.""" - async def predicate(ctx: Context) -> bool: - """With role checker predicate.""" - return with_role_check(ctx, *role_ids) - return commands.check(predicate) + """Check if command was issued in a whitelisted context.""" + return in_whitelist_check(ctx, channels, categories, roles, redirect, fail_silently) - -def without_role(*role_ids: int) -> Callable: - """Returns True if the user does not have any of the roles in role_ids.""" - async def predicate(ctx: Context) -> bool: - return without_role_check(ctx, *role_ids) return commands.check(predicate) -def locked() -> Callable: +def has_no_roles(*roles: t.Union[str, int]) -> t.Callable: """ - Allows the user to only run one instance of the decorated command at a time. - - Subsequent calls to the command from the same author are ignored until the command has completed invocation. + Returns True if the user does not have any of the roles specified. - This decorator must go before (below) the `command` decorator. + `roles` are the names or IDs of the disallowed roles. """ - def wrap(func: Callable) -> Callable: - func.__locks = WeakValueDictionary() - - @wraps(func) - async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None: - lock = func.__locks.setdefault(ctx.author.id, Lock()) - if lock.locked(): - embed = Embed() - embed.colour = Colour.red() - - log.debug(f"User tried to invoke a locked command.") - embed.description = ( - "You're already using this command. Please wait until it is done before you use it again." - ) - embed.title = random.choice(ERROR_REPLIES) - await ctx.send(embed=embed) - return + async def predicate(ctx: Context) -> bool: + try: + await commands.has_any_role(*roles).predicate(ctx) + except commands.MissingAnyRole: + return True + else: + # This error is never shown to users, so don't bother trying to make it too pretty. + roles_ = ", ".join(f"'{item}'" for item in roles) + raise commands.CheckFailure(f"You have at least one of the disallowed roles: {roles_}") - async with func.__locks.setdefault(ctx.author.id, Lock()): - await func(self, ctx, *args, **kwargs) - return inner - return wrap + return commands.check(predicate) -def redirect_output(destination_channel: int, bypass_roles: Container[int] = None) -> Callable: +def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = None) -> t.Callable: """ Changes the channel in the context of the command to redirect the output to a certain channel. @@ -118,7 +70,7 @@ def redirect_output(destination_channel: int, bypass_roles: Container[int] = Non This decorator must go before (below) the `command` decorator. """ - def wrap(func: Callable) -> Callable: + def wrap(func: t.Callable) -> t.Callable: @wraps(func) async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None: if ctx.channel.id == destination_channel: @@ -137,15 +89,14 @@ def redirect_output(destination_channel: int, bypass_roles: Container[int] = Non log.trace(f"Redirecting output of {ctx.author}'s command '{ctx.command.name}' to {redirect_channel.name}") ctx.channel = redirect_channel await ctx.channel.send(f"Here's the output of your command, {ctx.author.mention}") - await func(self, ctx, *args, **kwargs) + asyncio.create_task(func(self, ctx, *args, **kwargs)) message = await old_channel.send( f"Hey, {ctx.author.mention}, you can find the output of your command here: " f"{redirect_channel.mention}" ) - if RedirectOutput.delete_invocation: - await sleep(RedirectOutput.delete_delay) + await asyncio.sleep(RedirectOutput.delete_delay) with suppress(NotFound): await message.delete() @@ -154,42 +105,40 @@ def redirect_output(destination_channel: int, bypass_roles: Container[int] = Non with suppress(NotFound): await ctx.message.delete() log.trace("Redirect output: Deleted invocation message") + return inner return wrap -def respect_role_hierarchy(target_arg: Union[int, str] = 0) -> Callable: +def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: """ Ensure the highest role of the invoking member is greater than that of the target member. If the condition fails, a warning is sent to the invoking context. A target which is not an instance of discord.Member will always pass. - A value of 0 (i.e. position 0) for `target_arg` corresponds to the argument which comes after - `ctx`. If the target argument is a kwarg, its name can instead be given. + `member_arg` is the keyword name or position index of the parameter of the decorated command + whose value is the target member. This decorator must go before (below) the `command` decorator. """ - def wrap(func: Callable) -> Callable: + def decorator(func: t.Callable) -> t.Callable: @wraps(func) - async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None: - try: - target = kwargs[target_arg] - except KeyError: - try: - target = args[target_arg] - except IndexError: - raise ValueError(f"Could not find target argument at position {target_arg}") - except TypeError: - raise ValueError(f"Could not find target kwarg with key {target_arg!r}") + async def wrapper(*args, **kwargs) -> None: + log.trace(f"{func.__name__}: respect role hierarchy decorator called") + + bound_args = function.get_bound_args(func, args, kwargs) + target = function.get_arg_value(member_arg, bound_args) if not isinstance(target, Member): log.trace("The target is not a discord.Member; skipping role hierarchy check.") - await func(self, ctx, *args, **kwargs) + await func(*args, **kwargs) return + ctx = function.get_arg_value(1, bound_args) cmd = ctx.command.name actor = ctx.author + if target.top_role >= actor.top_role: log.info( f"{actor} ({actor.id}) attempted to {cmd} " @@ -200,6 +149,7 @@ def respect_role_hierarchy(target_arg: Union[int, str] = 0) -> Callable: "someone with an equal or higher top role." ) else: - await func(self, ctx, *args, **kwargs) - return inner - return wrap + log.trace(f"{func.__name__}: {target.top_role=} < {actor.top_role=}; calling func") + await func(*args, **kwargs) + return wrapper + return decorator diff --git a/bot/errors.py b/bot/errors.py new file mode 100644 index 000000000..65d715203 --- /dev/null +++ b/bot/errors.py @@ -0,0 +1,20 @@ +from typing import Hashable + + +class LockedResourceError(RuntimeError): + """ + Exception raised when an operation is attempted on a locked resource. + + Attributes: + `type` -- name of the locked resource's type + `id` -- ID of the locked resource + """ + + def __init__(self, resource_type: str, resource_id: Hashable): + self.type = resource_type + self.id = resource_id + + super().__init__( + f"Cannot operate on {self.type.lower()} `{self.id}`; " + "it is currently locked and in use by another operation." + ) diff --git a/bot/cogs/__init__.py b/bot/exts/__init__.py index e69de29bb..e69de29bb 100644 --- a/bot/cogs/__init__.py +++ b/bot/exts/__init__.py diff --git a/tests/bot/cogs/__init__.py b/bot/exts/backend/__init__.py index e69de29bb..e69de29bb 100644 --- a/tests/bot/cogs/__init__.py +++ b/bot/exts/backend/__init__.py diff --git a/bot/exts/backend/config_verifier.py b/bot/exts/backend/config_verifier.py new file mode 100644 index 000000000..d72c6c22e --- /dev/null +++ b/bot/exts/backend/config_verifier.py @@ -0,0 +1,40 @@ +import logging + +from discord.ext.commands import Cog + +from bot import constants +from bot.bot import Bot + + +log = logging.getLogger(__name__) + + +class ConfigVerifier(Cog): + """Verify config on startup.""" + + def __init__(self, bot: Bot): + self.bot = bot + self.channel_verify_task = self.bot.loop.create_task(self.verify_channels()) + + async def verify_channels(self) -> None: + """ + Verify channels. + + If any channels in config aren't present in server, log them in a warning. + """ + await self.bot.wait_until_guild_available() + server = self.bot.get_guild(constants.Guild.id) + + server_channel_ids = {channel.id for channel in server.channels} + invalid_channels = [ + channel_name for channel_name, channel_id in constants.Channels + if channel_id not in server_channel_ids + ] + + if invalid_channels: + log.warning(f"Configured channels do not exist in server: {', '.join(invalid_channels)}.") + + +def setup(bot: Bot) -> None: + """Load the ConfigVerifier cog.""" + bot.add_cog(ConfigVerifier(bot)) diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py new file mode 100644 index 000000000..f3d8537bd --- /dev/null +++ b/bot/exts/backend/error_handler.py @@ -0,0 +1,327 @@ +import contextlib +import difflib +import logging +import typing as t + +from discord import Embed +from discord.ext.commands import Cog, Context, errors +from sentry_sdk import push_scope + +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.constants import Channels, Colours, Icons +from bot.converters import TagNameConverter +from bot.errors import LockedResourceError +from bot.utils.checks import InWhitelistCheckFailure + +log = logging.getLogger(__name__) + + +class ErrorHandler(Cog): + """Handles errors emitted from commands.""" + + def __init__(self, bot: Bot): + self.bot = bot + + def _get_error_embed(self, title: str, body: str) -> Embed: + """Return an embed that contains the exception.""" + return Embed( + title=title, + colour=Colours.soft_red, + description=body + ) + + @Cog.listener() + async def on_command_error(self, ctx: Context, e: errors.CommandError) -> None: + """ + Provide generic command error handling. + + Error handling is deferred to any local error handler, if present. This is done by + checking for the presence of a `handled` attribute on the error. + + Error handling emits a single error message in the invoking context `ctx` and a log message, + prioritised as follows: + + 1. If the name fails to match a command: + * If it matches shh+ or unshh+, the channel is silenced or unsilenced respectively. + Otherwise if it matches a tag, the tag is invoked + * If CommandNotFound is raised when invoking the tag (determined by the presence of the + `invoked_from_error_handler` attribute), this error is treated as being unexpected + and therefore sends an error message + * Commands in the verification channel are ignored + 2. UserInputError: see `handle_user_input_error` + 3. CheckFailure: see `handle_check_failure` + 4. CommandOnCooldown: send an error message in the invoking context + 5. ResponseCodeError: see `handle_api_error` + 6. Otherwise, if not a DisabledCommand, handling is deferred to `handle_unexpected_error` + """ + command = ctx.command + + if hasattr(e, "handled"): + log.trace(f"Command {command} had its error already handled locally; ignoring.") + return + + if isinstance(e, errors.CommandNotFound) and not hasattr(ctx, "invoked_from_error_handler"): + if await self.try_silence(ctx): + return + if ctx.channel.id != Channels.verification: + # Try to look for a tag with the command's name + await self.try_get_tag(ctx) + return # Exit early to avoid logging. + elif isinstance(e, errors.UserInputError): + await self.handle_user_input_error(ctx, e) + elif isinstance(e, errors.CheckFailure): + await self.handle_check_failure(ctx, e) + elif isinstance(e, errors.CommandOnCooldown): + await ctx.send(e) + elif isinstance(e, errors.CommandInvokeError): + if isinstance(e.original, ResponseCodeError): + await self.handle_api_error(ctx, e.original) + elif isinstance(e.original, LockedResourceError): + await ctx.send(f"{e.original} Please wait for it to finish and try again later.") + else: + await self.handle_unexpected_error(ctx, e.original) + return # Exit early to avoid logging. + elif not isinstance(e, errors.DisabledCommand): + # ConversionError, MaxConcurrencyReached, ExtensionError + await self.handle_unexpected_error(ctx, e) + return # Exit early to avoid logging. + + log.debug( + f"Command {command} invoked by {ctx.message.author} with error " + f"{e.__class__.__name__}: {e}" + ) + + @staticmethod + def get_help_command(ctx: Context) -> t.Coroutine: + """Return a prepared `help` command invocation coroutine.""" + if ctx.command: + return ctx.send_help(ctx.command) + + return ctx.send_help() + + async def try_silence(self, ctx: Context) -> bool: + """ + Attempt to invoke the silence or unsilence command if invoke with matches a pattern. + + Respecting the checks if: + * invoked with `shh+` silence channel for amount of h's*2 with max of 15. + * invoked with `unshh+` unsilence channel + Return bool depending on success of command. + """ + command = ctx.invoked_with.lower() + silence_command = self.bot.get_command("silence") + ctx.invoked_from_error_handler = True + try: + if not await silence_command.can_run(ctx): + log.debug("Cancelling attempt to invoke silence/unsilence due to failed checks.") + return False + except errors.CommandError: + log.debug("Cancelling attempt to invoke silence/unsilence due to failed checks.") + return False + if command.startswith("shh"): + await ctx.invoke(silence_command, duration=min(command.count("h")*2, 15)) + return True + elif command.startswith("unshh"): + await ctx.invoke(self.bot.get_command("unsilence")) + return True + return False + + async def try_get_tag(self, ctx: Context) -> None: + """ + Attempt to display a tag by interpreting the command name as a tag name. + + The invocation of tags get respects its checks. Any CommandErrors raised will be handled + by `on_command_error`, but the `invoked_from_error_handler` attribute will be added to + the context to prevent infinite recursion in the case of a CommandNotFound exception. + """ + tags_get_command = self.bot.get_command("tags get") + ctx.invoked_from_error_handler = True + + log_msg = "Cancelling attempt to fall back to a tag due to failed checks." + try: + if not await tags_get_command.can_run(ctx): + log.debug(log_msg) + return + except errors.CommandError as tag_error: + log.debug(log_msg) + await self.on_command_error(ctx, tag_error) + return + + try: + tag_name = await TagNameConverter.convert(ctx, ctx.invoked_with) + except errors.BadArgument: + log.debug( + f"{ctx.author} tried to use an invalid command " + f"and the fallback tag failed validation in TagNameConverter." + ) + else: + with contextlib.suppress(ResponseCodeError): + await ctx.invoke(tags_get_command, tag_name=tag_name) + + tags_cog = self.bot.get_cog("Tags") + command_name = ctx.invoked_with + sent = await tags_cog.display_tag(ctx, command_name) + + if not sent: + await self.send_command_suggestion(ctx, command_name) + + # Return to not raise the exception + return + + async def send_command_suggestion(self, ctx: Context, command_name: str) -> None: + """Sends user similar commands if any can be found.""" + # No similar tag found, or tag on cooldown - + # searching for a similar command + raw_commands = [] + for cmd in self.bot.walk_commands(): + if not cmd.hidden: + raw_commands += (cmd.name, *cmd.aliases) + similar_command_data = difflib.get_close_matches(command_name, raw_commands, 1) + similar_command_name = similar_command_data[0] + similar_command = self.bot.get_command(similar_command_name) + + log_msg = "Cancelling attempt to suggest a command due to failed checks." + try: + if not await similar_command.can_run(ctx): + log.debug(log_msg) + return + except errors.CommandError as cmd_error: + log.debug(log_msg) + await self.on_command_error(ctx, cmd_error) + return + + misspelled_content = ctx.message.content + e = Embed() + e.set_author(name="Did you mean:", icon_url=Icons.questionmark) + e.description = f"{misspelled_content.replace(command_name, similar_command_name, 1)}" + await ctx.send(embed=e, delete_after=10.0) + + async def handle_user_input_error(self, ctx: Context, e: errors.UserInputError) -> None: + """ + Send an error message in `ctx` for UserInputError, sometimes invoking the help command too. + + * MissingRequiredArgument: send an error message with arg name and the help command + * TooManyArguments: send an error message and the help command + * BadArgument: send an error message and the help command + * BadUnionArgument: send an error message including the error produced by the last converter + * ArgumentParsingError: send an error message + * Other: send an error message and the help command + """ + prepared_help_command = self.get_help_command(ctx) + + if isinstance(e, errors.MissingRequiredArgument): + embed = self._get_error_embed("Missing required argument", e.param.name) + await ctx.send(embed=embed) + await prepared_help_command + self.bot.stats.incr("errors.missing_required_argument") + elif isinstance(e, errors.TooManyArguments): + embed = self._get_error_embed("Too many arguments", str(e)) + await ctx.send(embed=embed) + await prepared_help_command + self.bot.stats.incr("errors.too_many_arguments") + elif isinstance(e, errors.BadArgument): + embed = self._get_error_embed("Bad argument", str(e)) + await ctx.send(embed=embed) + await prepared_help_command + self.bot.stats.incr("errors.bad_argument") + elif isinstance(e, errors.BadUnionArgument): + embed = self._get_error_embed("Bad argument", f"{e}\n{e.errors[-1]}") + await ctx.send(embed=embed) + self.bot.stats.incr("errors.bad_union_argument") + elif isinstance(e, errors.ArgumentParsingError): + embed = self._get_error_embed("Argument parsing error", str(e)) + await ctx.send(embed=embed) + self.bot.stats.incr("errors.argument_parsing_error") + else: + embed = self._get_error_embed( + "Input error", + "Something about your input seems off. Check the arguments and try again." + ) + await ctx.send(embed=embed) + await prepared_help_command + self.bot.stats.incr("errors.other_user_input_error") + + @staticmethod + async def handle_check_failure(ctx: Context, e: errors.CheckFailure) -> None: + """ + Send an error message in `ctx` for certain types of CheckFailure. + + The following types are handled: + + * BotMissingPermissions + * BotMissingRole + * BotMissingAnyRole + * NoPrivateMessage + * InWhitelistCheckFailure + """ + bot_missing_errors = ( + errors.BotMissingPermissions, + errors.BotMissingRole, + errors.BotMissingAnyRole + ) + + if isinstance(e, bot_missing_errors): + ctx.bot.stats.incr("errors.bot_permission_error") + await ctx.send( + "Sorry, it looks like I don't have the permissions or roles I need to do that." + ) + elif isinstance(e, (InWhitelistCheckFailure, errors.NoPrivateMessage)): + ctx.bot.stats.incr("errors.wrong_channel_or_dm_error") + await ctx.send(e) + + @staticmethod + async def handle_api_error(ctx: Context, e: ResponseCodeError) -> None: + """Send an error message in `ctx` for ResponseCodeError and log it.""" + if e.status == 404: + await ctx.send("There does not seem to be anything matching your query.") + log.debug(f"API responded with 404 for command {ctx.command}") + ctx.bot.stats.incr("errors.api_error_404") + elif e.status == 400: + content = await e.response.json() + log.debug(f"API responded with 400 for command {ctx.command}: %r.", content) + await ctx.send("According to the API, your request is malformed.") + ctx.bot.stats.incr("errors.api_error_400") + elif 500 <= e.status < 600: + await ctx.send("Sorry, there seems to be an internal issue with the API.") + log.warning(f"API responded with {e.status} for command {ctx.command}") + ctx.bot.stats.incr("errors.api_internal_server_error") + else: + await ctx.send(f"Got an unexpected status code from the API (`{e.status}`).") + log.warning(f"Unexpected API response for command {ctx.command}: {e.status}") + ctx.bot.stats.incr(f"errors.api_error_{e.status}") + + @staticmethod + async def handle_unexpected_error(ctx: Context, e: errors.CommandError) -> None: + """Send a generic error message in `ctx` and log the exception as an error with exc_info.""" + await ctx.send( + f"Sorry, an unexpected error occurred. Please let us know!\n\n" + f"```{e.__class__.__name__}: {e}```" + ) + + ctx.bot.stats.incr("errors.unexpected") + + with push_scope() as scope: + scope.user = { + "id": ctx.author.id, + "username": str(ctx.author) + } + + scope.set_tag("command", ctx.command.qualified_name) + scope.set_tag("message_id", ctx.message.id) + scope.set_tag("channel_id", ctx.channel.id) + + scope.set_extra("full_message", ctx.message.content) + + if ctx.guild is not None: + scope.set_extra( + "jump_to", + f"https://discordapp.com/channels/{ctx.guild.id}/{ctx.channel.id}/{ctx.message.id}" + ) + + log.error(f"Error executing command invoked by {ctx.message.author}: {ctx.message.content}", exc_info=e) + + +def setup(bot: Bot) -> None: + """Load the ErrorHandler cog.""" + bot.add_cog(ErrorHandler(bot)) diff --git a/bot/cogs/logging.py b/bot/exts/backend/logging.py index d1b7dcab3..94fa2b139 100644 --- a/bot/cogs/logging.py +++ b/bot/exts/backend/logging.py @@ -20,7 +20,7 @@ class Logging(Cog): async def startup_greeting(self) -> None: """Announce our presence to the configured devlog channel.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() log.info("Bot connected!") embed = Embed(description="Connected!") @@ -34,7 +34,7 @@ class Logging(Cog): ) if not DEBUG_MODE: - await self.bot.get_channel(Channels.devlog).send(embed=embed) + await self.bot.get_channel(Channels.dev_log).send(embed=embed) def setup(bot: Bot) -> None: diff --git a/bot/exts/backend/sync/__init__.py b/bot/exts/backend/sync/__init__.py new file mode 100644 index 000000000..829098f79 --- /dev/null +++ b/bot/exts/backend/sync/__init__.py @@ -0,0 +1,8 @@ +from bot.bot import Bot + + +def setup(bot: Bot) -> None: + """Load the Sync cog.""" + # Defer import to reduce side effects from importing the sync package. + from bot.exts.backend.sync._cog import Sync + bot.add_cog(Sync(bot)) diff --git a/bot/cogs/sync/cog.py b/bot/exts/backend/sync/_cog.py index 4e6ed156b..48d2b6f02 100644 --- a/bot/cogs/sync/cog.py +++ b/bot/exts/backend/sync/_cog.py @@ -1,14 +1,14 @@ import logging -from typing import Callable, Dict, Iterable, Union +from typing import Any, Dict -from discord import Guild, Member, Role, User +from discord import Member, Role, User from discord.ext import commands from discord.ext.commands import Cog, Context from bot import constants from bot.api import ResponseCodeError from bot.bot import Bot -from bot.cogs.sync import syncers +from bot.exts.backend.sync import _syncers log = logging.getLogger(__name__) @@ -16,53 +16,37 @@ log = logging.getLogger(__name__) class Sync(Cog): """Captures relevant events and sends them to the site.""" - # The server to synchronize events on. - # Note that setting this wrongly will result in things getting deleted - # that possibly shouldn't be. - SYNC_SERVER_ID = constants.Guild.id - - # An iterable of callables that are called when the bot is ready. - ON_READY_SYNCERS: Iterable[Callable[[Bot, Guild], None]] = ( - syncers.sync_roles, - syncers.sync_users - ) - def __init__(self, bot: Bot) -> None: self.bot = bot - self.bot.loop.create_task(self.sync_guild()) async def sync_guild(self) -> None: """Syncs the roles/users of the guild with the database.""" - await self.bot.wait_until_ready() - guild = self.bot.get_guild(self.SYNC_SERVER_ID) - if guild is not None: - for syncer in self.ON_READY_SYNCERS: - syncer_name = syncer.__name__[5:] # drop off `sync_` - log.info("Starting `%s` syncer.", syncer_name) - total_created, total_updated, total_deleted = await syncer(self.bot, guild) - if total_deleted is None: - log.info( - f"`{syncer_name}` syncer finished, created `{total_created}`, updated `{total_updated}`." - ) - else: - log.info( - f"`{syncer_name}` syncer finished, created `{total_created}`, updated `{total_updated}`, " - f"deleted `{total_deleted}`." - ) - - async def patch_user(self, user_id: int, updated_information: Dict[str, Union[str, int]]) -> None: + await self.bot.wait_until_guild_available() + + guild = self.bot.get_guild(constants.Guild.id) + if guild is None: + return + + for syncer in (_syncers.RoleSyncer, _syncers.UserSyncer): + await syncer.sync(guild) + + async def patch_user(self, user_id: int, json: Dict[str, Any], ignore_404: bool = False) -> None: """Send a PATCH request to partially update a user in the database.""" try: - await self.bot.api_client.patch("bot/users/" + str(user_id), json=updated_information) + await self.bot.api_client.patch(f"bot/users/{user_id}", json=json) except ResponseCodeError as e: if e.response.status != 404: raise - log.warning("Unable to update user, got 404. Assuming race condition from join event.") + if not ignore_404: + log.warning("Unable to update user, got 404. Assuming race condition from join event.") @Cog.listener() async def on_guild_role_create(self, role: Role) -> None: """Adds newly create role to the database table over the API.""" + if role.guild.id != constants.Guild.id: + return + await self.bot.api_client.post( 'bot/roles', json={ @@ -77,17 +61,25 @@ class Sync(Cog): @Cog.listener() async def on_guild_role_delete(self, role: Role) -> None: """Deletes role from the database when it's deleted from the guild.""" + if role.guild.id != constants.Guild.id: + return + await self.bot.api_client.delete(f'bot/roles/{role.id}') @Cog.listener() async def on_guild_role_update(self, before: Role, after: Role) -> None: """Syncs role with the database if any of the stored attributes were updated.""" - if ( - before.name != after.name - or before.colour != after.colour - or before.permissions != after.permissions - or before.position != after.position - ): + if after.guild.id != constants.Guild.id: + return + + was_updated = ( + before.name != after.name + or before.colour != after.colour + or before.permissions != after.permissions + or before.position != after.position + ) + + if was_updated: await self.bot.api_client.put( f'bot/roles/{after.id}', json={ @@ -108,8 +100,10 @@ class Sync(Cog): previously left), it will update the user's information. If the user is not yet known by the database, the user is added. """ + if member.guild.id != constants.Guild.id: + return + packed = { - 'avatar_hash': member.avatar, 'discriminator': int(member.discriminator), 'id': member.id, 'in_guild': True, @@ -137,36 +131,33 @@ class Sync(Cog): @Cog.listener() async def on_member_remove(self, member: Member) -> None: - """Updates the user information when a member leaves the guild.""" - await self.bot.api_client.put( - f'bot/users/{member.id}', - json={ - 'avatar_hash': member.avatar, - 'discriminator': int(member.discriminator), - 'id': member.id, - 'in_guild': False, - 'name': member.name, - 'roles': sorted(role.id for role in member.roles) - } - ) + """Set the in_guild field to False when a member leaves the guild.""" + if member.guild.id != constants.Guild.id: + return + + await self.patch_user(member.id, json={"in_guild": False}) @Cog.listener() async def on_member_update(self, before: Member, after: Member) -> None: """Update the roles of the member in the database if a change is detected.""" + if after.guild.id != constants.Guild.id: + return + if before.roles != after.roles: updated_information = {"roles": sorted(role.id for role in after.roles)} - await self.patch_user(after.id, updated_information=updated_information) + await self.patch_user(after.id, json=updated_information) @Cog.listener() async def on_user_update(self, before: User, after: User) -> None: """Update the user information in the database if a relevant change is detected.""" - if any(getattr(before, attr) != getattr(after, attr) for attr in ("name", "discriminator", "avatar")): + attrs = ("name", "discriminator") + if any(getattr(before, attr) != getattr(after, attr) for attr in attrs): updated_information = { "name": after.name, "discriminator": int(after.discriminator), - "avatar_hash": after.avatar, } - await self.patch_user(after.id, updated_information=updated_information) + # A 404 likely means the user is in another guild. + await self.patch_user(after.id, json=updated_information, ignore_404=True) @commands.group(name='sync') @commands.has_permissions(administrator=True) @@ -176,25 +167,11 @@ class Sync(Cog): @sync_group.command(name='roles') @commands.has_permissions(administrator=True) async def sync_roles_command(self, ctx: Context) -> None: - """Manually synchronize the guild's roles with the roles on the site.""" - initial_response = await ctx.send("📊 Synchronizing roles.") - total_created, total_updated, total_deleted = await syncers.sync_roles(self.bot, ctx.guild) - await initial_response.edit( - content=( - f"👌 Role synchronization complete, created **{total_created}** " - f", updated **{total_created}** roles, and deleted **{total_deleted}** roles." - ) - ) + """Manually synchronise the guild's roles with the roles on the site.""" + await _syncers.RoleSyncer.sync(ctx.guild, ctx) @sync_group.command(name='users') @commands.has_permissions(administrator=True) async def sync_users_command(self, ctx: Context) -> None: - """Manually synchronize the guild's users with the users on the site.""" - initial_response = await ctx.send("📊 Synchronizing users.") - total_created, total_updated, total_deleted = await syncers.sync_users(self.bot, ctx.guild) - await initial_response.edit( - content=( - f"👌 User synchronization complete, created **{total_created}** " - f"and updated **{total_created}** users." - ) - ) + """Manually synchronise the guild's users with the users on the site.""" + await _syncers.UserSyncer.sync(ctx.guild, ctx) diff --git a/bot/exts/backend/sync/_syncers.py b/bot/exts/backend/sync/_syncers.py new file mode 100644 index 000000000..2eb9f9971 --- /dev/null +++ b/bot/exts/backend/sync/_syncers.py @@ -0,0 +1,216 @@ +import abc +import logging +import typing as t +from collections import namedtuple + +from discord import Guild +from discord.ext.commands import Context + +import bot +from bot.api import ResponseCodeError + +log = logging.getLogger(__name__) + +# These objects are declared as namedtuples because tuples are hashable, +# something that we make use of when diffing site roles against guild roles. +_Role = namedtuple('Role', ('id', 'name', 'colour', 'permissions', 'position')) +_Diff = namedtuple('Diff', ('created', 'updated', 'deleted')) + + +# Implementation of static abstract methods are not enforced if the subclass is never instantiated. +# However, methods are kept abstract to at least symbolise that they should be abstract. +class Syncer(abc.ABC): + """Base class for synchronising the database with objects in the Discord cache.""" + + @staticmethod + @property + @abc.abstractmethod + def name() -> str: + """The name of the syncer; used in output messages and logging.""" + raise NotImplementedError # pragma: no cover + + @staticmethod + @abc.abstractmethod + async def _get_diff(guild: Guild) -> _Diff: + """Return the difference between the cache of `guild` and the database.""" + raise NotImplementedError # pragma: no cover + + @staticmethod + @abc.abstractmethod + async def _sync(diff: _Diff) -> None: + """Perform the API calls for synchronisation.""" + raise NotImplementedError # pragma: no cover + + @classmethod + async def sync(cls, guild: Guild, ctx: t.Optional[Context] = None) -> None: + """ + Synchronise the database with the cache of `guild`. + + If `ctx` is given, send a message with the results. + """ + log.info(f"Starting {cls.name} syncer.") + + if ctx: + message = await ctx.send(f"📊 Synchronising {cls.name}s.") + else: + message = None + diff = await cls._get_diff(guild) + + try: + await cls._sync(diff) + except ResponseCodeError as e: + log.exception(f"{cls.name} syncer failed!") + + # Don't show response text because it's probably some really long HTML. + results = f"status {e.status}\n```{e.response_json or 'See log output for details'}```" + content = f":x: Synchronisation of {cls.name}s failed: {results}" + else: + diff_dict = diff._asdict() + results = (f"{name} `{len(val)}`" for name, val in diff_dict.items() if val is not None) + results = ", ".join(results) + + log.info(f"{cls.name} syncer finished: {results}.") + content = f":ok_hand: Synchronisation of {cls.name}s complete: {results}" + + if message: + await message.edit(content=content) + + +class RoleSyncer(Syncer): + """Synchronise the database with roles in the cache.""" + + name = "role" + + @staticmethod + async def _get_diff(guild: Guild) -> _Diff: + """Return the difference of roles between the cache of `guild` and the database.""" + log.trace("Getting the diff for roles.") + roles = await bot.instance.api_client.get('bot/roles') + + # Pack DB roles and guild roles into one common, hashable format. + # They're hashable so that they're easily comparable with sets later. + db_roles = {_Role(**role_dict) for role_dict in roles} + guild_roles = { + _Role( + id=role.id, + name=role.name, + colour=role.colour.value, + permissions=role.permissions.value, + position=role.position, + ) + for role in guild.roles + } + + guild_role_ids = {role.id for role in guild_roles} + api_role_ids = {role.id for role in db_roles} + new_role_ids = guild_role_ids - api_role_ids + deleted_role_ids = api_role_ids - guild_role_ids + + # New roles are those which are on the cached guild but not on the + # DB guild, going by the role ID. We need to send them in for creation. + roles_to_create = {role for role in guild_roles if role.id in new_role_ids} + roles_to_update = guild_roles - db_roles - roles_to_create + roles_to_delete = {role for role in db_roles if role.id in deleted_role_ids} + + return _Diff(roles_to_create, roles_to_update, roles_to_delete) + + @staticmethod + async def _sync(diff: _Diff) -> None: + """Synchronise the database with the role cache of `guild`.""" + log.trace("Syncing created roles...") + for role in diff.created: + await bot.instance.api_client.post('bot/roles', json=role._asdict()) + + log.trace("Syncing updated roles...") + for role in diff.updated: + await bot.instance.api_client.put(f'bot/roles/{role.id}', json=role._asdict()) + + log.trace("Syncing deleted roles...") + for role in diff.deleted: + await bot.instance.api_client.delete(f'bot/roles/{role.id}') + + +class UserSyncer(Syncer): + """Synchronise the database with users in the cache.""" + + name = "user" + + @staticmethod + async def _get_diff(guild: Guild) -> _Diff: + """Return the difference of users between the cache of `guild` and the database.""" + log.trace("Getting the diff for users.") + + users_to_create = [] + users_to_update = [] + seen_guild_users = set() + + async for db_user in UserSyncer._get_users(): + # Store user fields which are to be updated. + updated_fields = {} + + def maybe_update(db_field: str, guild_value: t.Union[str, int]) -> None: + # Equalize DB user and guild user attributes. + if db_user[db_field] != guild_value: + updated_fields[db_field] = guild_value + + if guild_user := guild.get_member(db_user["id"]): + seen_guild_users.add(guild_user.id) + + maybe_update("name", guild_user.name) + maybe_update("discriminator", int(guild_user.discriminator)) + maybe_update("in_guild", True) + + guild_roles = [role.id for role in guild_user.roles] + if set(db_user["roles"]) != set(guild_roles): + updated_fields["roles"] = guild_roles + + elif db_user["in_guild"]: + # The user is known in the DB but not the guild, and the + # DB currently specifies that the user is a member of the guild. + # This means that the user has left since the last sync. + # Update the `in_guild` attribute of the user on the site + # to signify that the user left. + updated_fields["in_guild"] = False + + if updated_fields: + updated_fields["id"] = db_user["id"] + users_to_update.append(updated_fields) + + for member in guild.members: + if member.id not in seen_guild_users: + # The user is known on the guild but not on the API. This means + # that the user has joined since the last sync. Create it. + new_user = { + "id": member.id, + "name": member.name, + "discriminator": int(member.discriminator), + "roles": [role.id for role in member.roles], + "in_guild": True + } + users_to_create.append(new_user) + + return _Diff(users_to_create, users_to_update, None) + + @staticmethod + async def _get_users() -> t.AsyncIterable: + """GET users from database.""" + query_params = { + "page": 1 + } + while query_params["page"]: + res = await bot.instance.api_client.get("bot/users", params=query_params) + for user in res["results"]: + yield user + + query_params["page"] = res["next_page_no"] + + @staticmethod + async def _sync(diff: _Diff) -> None: + """Synchronise the database with the user cache of `guild`.""" + log.trace("Syncing created users...") + if diff.created: + await bot.instance.api_client.post("bot/users", json=diff.created) + + log.trace("Syncing updated users...") + if diff.updated: + await bot.instance.api_client.patch("bot/users/bulk_patch", json=diff.updated) diff --git a/tests/bot/cogs/sync/__init__.py b/bot/exts/filters/__init__.py index e69de29bb..e69de29bb 100644 --- a/tests/bot/cogs/sync/__init__.py +++ b/bot/exts/filters/__init__.py diff --git a/bot/exts/filters/antimalware.py b/bot/exts/filters/antimalware.py new file mode 100644 index 000000000..26f00e91f --- /dev/null +++ b/bot/exts/filters/antimalware.py @@ -0,0 +1,102 @@ +import logging +import typing as t +from os.path import splitext + +from discord import Embed, Message, NotFound +from discord.ext.commands import Cog + +from bot.bot import Bot +from bot.constants import Channels, Filter, URLs + +log = logging.getLogger(__name__) + +PY_EMBED_DESCRIPTION = ( + "It looks like you tried to attach a Python file - " + f"please use a code-pasting service such as {URLs.site_schema}{URLs.site_paste}" +) + +TXT_EMBED_DESCRIPTION = ( + "**Uh-oh!** It looks like your message got zapped by our spam filter. " + "We currently don't allow `.txt` attachments, so here are some tips to help you travel safely: \n\n" + "• If you attempted to send a message longer than 2000 characters, try shortening your message " + "to fit within the character limit or use a pasting service (see below) \n\n" + "• If you tried to show someone your code, you can use codeblocks \n(run `!code-blocks` in " + "{cmd_channel_mention} for more information) or use a pasting service like: " + f"\n\n{URLs.site_schema}{URLs.site_paste}" +) + +DISALLOWED_EMBED_DESCRIPTION = ( + "It looks like you tried to attach file type(s) that we do not allow ({blocked_extensions_str}). " + "We currently allow the following file types: **{joined_whitelist}**.\n\n" + "Feel free to ask in {meta_channel_mention} if you think this is a mistake." +) + + +class AntiMalware(Cog): + """Delete messages which contain attachments with non-whitelisted file extensions.""" + + def __init__(self, bot: Bot): + self.bot = bot + + def _get_whitelisted_file_formats(self) -> list: + """Get the file formats currently on the whitelist.""" + return self.bot.filter_list_cache['FILE_FORMAT.True'].keys() + + def _get_disallowed_extensions(self, message: Message) -> t.Iterable[str]: + """Get an iterable containing all the disallowed extensions of attachments.""" + file_extensions = {splitext(attachment.filename.lower())[1] for attachment in message.attachments} + extensions_blocked = file_extensions - set(self._get_whitelisted_file_formats()) + return extensions_blocked + + @Cog.listener() + async def on_message(self, message: Message) -> None: + """Identify messages with prohibited attachments.""" + # Return when message don't have attachment and don't moderate DMs + if not message.attachments or not message.guild: + return + + # Ignore webhook and bot messages + if message.webhook_id or message.author.bot: + return + + # Check if user is staff, if is, return + # Since we only care that roles exist to iterate over, check for the attr rather than a User/Member instance + if hasattr(message.author, "roles") and any(role.id in Filter.role_whitelist for role in message.author.roles): + return + + embed = Embed() + extensions_blocked = self._get_disallowed_extensions(message) + blocked_extensions_str = ', '.join(extensions_blocked) + if ".py" in extensions_blocked: + # Short-circuit on *.py files to provide a pastebin link + embed.description = PY_EMBED_DESCRIPTION + elif ".txt" in extensions_blocked: + # Work around Discord AutoConversion of messages longer than 2000 chars to .txt + cmd_channel = self.bot.get_channel(Channels.bot_commands) + embed.description = TXT_EMBED_DESCRIPTION.format(cmd_channel_mention=cmd_channel.mention) + elif extensions_blocked: + meta_channel = self.bot.get_channel(Channels.meta) + embed.description = DISALLOWED_EMBED_DESCRIPTION.format( + joined_whitelist=', '.join(self._get_whitelisted_file_formats()), + blocked_extensions_str=blocked_extensions_str, + meta_channel_mention=meta_channel.mention, + ) + + if embed.description: + log.info( + f"User '{message.author}' ({message.author.id}) uploaded blacklisted file(s): {blocked_extensions_str}", + extra={"attachment_list": [attachment.filename for attachment in message.attachments]} + ) + + await message.channel.send(f"Hey {message.author.mention}!", embed=embed) + + # Delete the offending message: + try: + await message.delete() + except NotFound: + log.info(f"Tried to delete message `{message.id}`, but message could not be found.") + + +def setup(bot: Bot) -> None: + """Load the AntiMalware cog.""" + bot.add_cog(AntiMalware(bot)) diff --git a/bot/cogs/antispam.py b/bot/exts/filters/antispam.py index f67ef6f05..af8528a68 100644 --- a/bot/cogs/antispam.py +++ b/bot/exts/filters/antispam.py @@ -11,15 +11,14 @@ from discord.ext.commands import Cog from bot import rules from bot.bot import Bot -from bot.cogs.moderation import ModLog from bot.constants import ( AntiSpam as AntiSpamConfig, Channels, Colours, DEBUG_MODE, Event, Filter, Guild as GuildConfig, Icons, - STAFF_ROLES, ) from bot.converters import Duration -from bot.utils.messages import send_attachments +from bot.exts.moderation.modlog import ModLog +from bot.utils.messages import format_user, send_attachments log = logging.getLogger(__name__) @@ -27,14 +26,15 @@ log = logging.getLogger(__name__) RULE_FUNCTION_MAPPING = { 'attachments': rules.apply_attachments, 'burst': rules.apply_burst, - 'burst_shared': rules.apply_burst_shared, + # burst shared is temporarily disabled due to a bug + # 'burst_shared': rules.apply_burst_shared, 'chars': rules.apply_chars, 'discord_emojis': rules.apply_discord_emojis, 'duplicates': rules.apply_duplicates, 'links': rules.apply_links, 'mentions': rules.apply_mentions, 'newlines': rules.apply_newlines, - 'role_mentions': rules.apply_role_mentions + 'role_mentions': rules.apply_role_mentions, } @@ -67,7 +67,7 @@ class DeletionContext: async def upload_messages(self, actor_id: int, modlog: ModLog) -> None: """Method that takes care of uploading the queue and posting modlog alert.""" - triggered_by_users = ", ".join(f"{m} (`{m.id}`)" for m in self.members.values()) + triggered_by_users = ", ".join(format_user(m) for m in self.members.values()) mod_alert_message = ( f"**Triggered by:** {triggered_by_users}\n" @@ -94,7 +94,7 @@ class DeletionContext: await modlog.send_log_message( icon_url=Icons.filtering, colour=Colour(Colours.soft_red), - title=f"Spam detected!", + title="Spam detected!", text=mod_alert_message, thumbnail=last_message.author.avatar_url_as(static_format="png"), channel_id=Channels.mod_alerts, @@ -123,14 +123,14 @@ class AntiSpam(Cog): async def alert_on_validation_error(self) -> None: """Unloads the cog and alerts admins if configuration validation failed.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() if self.validation_errors: body = "**The following errors were encountered:**\n" body += "\n".join(f"- {error}" for error in self.validation_errors.values()) body += "\n\n**The cog has been unloaded.**" await self.mod_log.send_log_message( - title=f"Error: AntiSpam configuration validation failed!", + title="Error: AntiSpam configuration validation failed!", text=body, ping_everyone=True, icon_url=Icons.token_removed, @@ -148,7 +148,7 @@ class AntiSpam(Cog): or message.guild.id != GuildConfig.id or message.author.bot or (message.channel.id in Filter.channel_whitelist and not DEBUG_MODE) - or (any(role.id in STAFF_ROLES for role in message.author.roles) and not DEBUG_MODE) + or (any(role.id in Filter.role_whitelist for role in message.author.roles) and not DEBUG_MODE) ): return @@ -182,6 +182,7 @@ class AntiSpam(Cog): # which contains the reason for why the message violated the rule and # an iterable of all members that violated the rule. if result is not None: + self.bot.stats.incr(f"mod_alerts.{rule_name}") reason, members, relevant_messages = result full_reason = f"`{rule_name}` rule: {reason}" @@ -218,7 +219,6 @@ class AntiSpam(Cog): # Get context and make sure the bot becomes the actor of infraction by patching the `author` attributes context = await self.bot.get_context(msg) context.author = self.bot.user - context.message.author = self.bot.user # Since we're going to invoke the tempmute command directly, we need to manually call the converter. dt_remove_role_after = await self.expiration_date_converter.convert(context, f"{remove_role_after}S") diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py new file mode 100644 index 000000000..232c1e48b --- /dev/null +++ b/bot/exts/filters/filter_lists.py @@ -0,0 +1,272 @@ +import logging +from typing import Optional + +from discord import Colour, Embed +from discord.ext.commands import BadArgument, Cog, Context, IDConverter, group, has_any_role + +from bot import constants +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.converters import ValidDiscordServerInvite, ValidFilterListType +from bot.pagination import LinePaginator + +log = logging.getLogger(__name__) + + +class FilterLists(Cog): + """Commands for blacklisting and whitelisting things.""" + + methods_with_filterlist_types = [ + "allow_add", + "allow_delete", + "allow_get", + "deny_add", + "deny_delete", + "deny_get", + ] + + def __init__(self, bot: Bot) -> None: + self.bot = bot + self.bot.loop.create_task(self._amend_docstrings()) + + async def _amend_docstrings(self) -> None: + """Add the valid FilterList types to the docstrings, so they'll appear in !help invocations.""" + await self.bot.wait_until_guild_available() + + # Add valid filterlist types to the docstrings + valid_types = await ValidFilterListType.get_valid_types(self.bot) + valid_types = [f"`{type_.lower()}`" for type_ in valid_types] + + for method_name in self.methods_with_filterlist_types: + command = getattr(self, method_name) + command.help = ( + f"{command.help}\n\nValid **list_type** values are {', '.join(valid_types)}." + ) + + async def _add_data( + self, + ctx: Context, + allowed: bool, + list_type: ValidFilterListType, + content: str, + comment: Optional[str] = None, + ) -> None: + """Add an item to a filterlist.""" + allow_type = "whitelist" if allowed else "blacklist" + + # If this is a server invite, we gotta validate it. + if list_type == "GUILD_INVITE": + guild_data = await self._validate_guild_invite(ctx, content) + content = guild_data.get("id") + + # Unless the user has specified another comment, let's + # use the server name as the comment so that the list + # of guild IDs will be more easily readable when we + # display it. + if not comment: + comment = guild_data.get("name") + + # If it's a file format, let's make sure it has a leading dot. + elif list_type == "FILE_FORMAT" and not content.startswith("."): + content = f".{content}" + + # Try to add the item to the database + log.trace(f"Trying to add the {content} item to the {list_type} {allow_type}") + payload = { + "allowed": allowed, + "type": list_type, + "content": content, + "comment": comment, + } + + try: + item = await self.bot.api_client.post( + "bot/filter-lists", + json=payload + ) + except ResponseCodeError as e: + if e.status == 400: + await ctx.message.add_reaction("❌") + log.debug( + f"{ctx.author} tried to add data to a {allow_type}, but the API returned 400, " + "probably because the request violated the UniqueConstraint." + ) + raise BadArgument( + f"Unable to add the item to the {allow_type}. " + "The item probably already exists. Keep in mind that a " + "blacklist and a whitelist for the same item cannot co-exist, " + "and we do not permit any duplicates." + ) + raise + + # Insert the item into the cache + self.bot.insert_item_into_filter_list_cache(item) + await ctx.message.add_reaction("✅") + + async def _delete_data(self, ctx: Context, allowed: bool, list_type: ValidFilterListType, content: str) -> None: + """Remove an item from a filterlist.""" + allow_type = "whitelist" if allowed else "blacklist" + + # If this is a server invite, we need to convert it. + if list_type == "GUILD_INVITE" and not IDConverter()._get_id_match(content): + guild_data = await self._validate_guild_invite(ctx, content) + content = guild_data.get("id") + + # If it's a file format, let's make sure it has a leading dot. + elif list_type == "FILE_FORMAT" and not content.startswith("."): + content = f".{content}" + + # Find the content and delete it. + log.trace(f"Trying to delete the {content} item from the {list_type} {allow_type}") + item = self.bot.filter_list_cache[f"{list_type}.{allowed}"].get(content) + + if item is not None: + try: + await self.bot.api_client.delete( + f"bot/filter-lists/{item['id']}" + ) + del self.bot.filter_list_cache[f"{list_type}.{allowed}"][content] + await ctx.message.add_reaction("✅") + except ResponseCodeError as e: + log.debug( + f"{ctx.author} tried to delete an item with the id {item['id']}, but " + f"the API raised an unexpected error: {e}" + ) + await ctx.message.add_reaction("❌") + else: + await ctx.message.add_reaction("❌") + + async def _list_all_data(self, ctx: Context, allowed: bool, list_type: ValidFilterListType) -> None: + """Paginate and display all items in a filterlist.""" + allow_type = "whitelist" if allowed else "blacklist" + result = self.bot.filter_list_cache[f"{list_type}.{allowed}"] + + # Build a list of lines we want to show in the paginator + lines = [] + for content, metadata in result.items(): + line = f"• `{content}`" + + if comment := metadata.get("comment"): + line += f" - {comment}" + + lines.append(line) + lines = sorted(lines) + + # Build the embed + list_type_plural = list_type.lower().replace("_", " ").title() + "s" + embed = Embed( + title=f"{allow_type.title()}ed {list_type_plural} ({len(result)} total)", + colour=Colour.blue() + ) + log.trace(f"Trying to list {len(result)} items from the {list_type.lower()} {allow_type}") + + if result: + await LinePaginator.paginate(lines, ctx, embed, max_lines=15, empty=False) + else: + embed.description = "Hmmm, seems like there's nothing here yet." + await ctx.send(embed=embed) + await ctx.message.add_reaction("❌") + + async def _sync_data(self, ctx: Context) -> None: + """Syncs the filterlists with the API.""" + try: + log.trace("Attempting to sync FilterList cache with data from the API.") + await self.bot.cache_filter_list_data() + await ctx.message.add_reaction("✅") + except ResponseCodeError as e: + log.debug( + f"{ctx.author} tried to sync FilterList cache data but " + f"the API raised an unexpected error: {e}" + ) + await ctx.message.add_reaction("❌") + + @staticmethod + async def _validate_guild_invite(ctx: Context, invite: str) -> dict: + """ + Validates a guild invite, and returns the guild info as a dict. + + Will raise a BadArgument if the guild invite is invalid. + """ + log.trace(f"Attempting to validate whether or not {invite} is a guild invite.") + validator = ValidDiscordServerInvite() + guild_data = await validator.convert(ctx, invite) + + # If we make it this far without raising a BadArgument, the invite is + # valid. Let's return a dict of guild information. + log.trace(f"{invite} validated as server invite. Converting to ID.") + return guild_data + + @group(aliases=("allowlist", "allow", "al", "wl")) + async def whitelist(self, ctx: Context) -> None: + """Group for whitelisting commands.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @group(aliases=("denylist", "deny", "bl", "dl")) + async def blacklist(self, ctx: Context) -> None: + """Group for blacklisting commands.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @whitelist.command(name="add", aliases=("a", "set")) + async def allow_add( + self, + ctx: Context, + list_type: ValidFilterListType, + content: str, + *, + comment: Optional[str] = None, + ) -> None: + """Add an item to the specified allowlist.""" + await self._add_data(ctx, True, list_type, content, comment) + + @blacklist.command(name="add", aliases=("a", "set")) + async def deny_add( + self, + ctx: Context, + list_type: ValidFilterListType, + content: str, + *, + comment: Optional[str] = None, + ) -> None: + """Add an item to the specified denylist.""" + await self._add_data(ctx, False, list_type, content, comment) + + @whitelist.command(name="remove", aliases=("delete", "rm",)) + async def allow_delete(self, ctx: Context, list_type: ValidFilterListType, content: str) -> None: + """Remove an item from the specified allowlist.""" + await self._delete_data(ctx, True, list_type, content) + + @blacklist.command(name="remove", aliases=("delete", "rm",)) + async def deny_delete(self, ctx: Context, list_type: ValidFilterListType, content: str) -> None: + """Remove an item from the specified denylist.""" + await self._delete_data(ctx, False, list_type, content) + + @whitelist.command(name="get", aliases=("list", "ls", "fetch", "show")) + async def allow_get(self, ctx: Context, list_type: ValidFilterListType) -> None: + """Get the contents of a specified allowlist.""" + await self._list_all_data(ctx, True, list_type) + + @blacklist.command(name="get", aliases=("list", "ls", "fetch", "show")) + async def deny_get(self, ctx: Context, list_type: ValidFilterListType) -> None: + """Get the contents of a specified denylist.""" + await self._list_all_data(ctx, False, list_type) + + @whitelist.command(name="sync", aliases=("s",)) + async def allow_sync(self, ctx: Context) -> None: + """Syncs both allowlists and denylists with the API.""" + await self._sync_data(ctx) + + @blacklist.command(name="sync", aliases=("s",)) + async def deny_sync(self, ctx: Context) -> None: + """Syncs both allowlists and denylists with the API.""" + await self._sync_data(ctx) + + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators to invoke the commands in this cog.""" + return await has_any_role(*constants.MODERATION_ROLES).predicate(ctx) + + +def setup(bot: Bot) -> None: + """Load the FilterLists cog.""" + bot.add_cog(FilterLists(bot)) diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py new file mode 100644 index 000000000..208fc9e1f --- /dev/null +++ b/bot/exts/filters/filtering.py @@ -0,0 +1,605 @@ +import asyncio +import logging +import re +from datetime import datetime, timedelta +from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Union + +import dateutil +import discord.errors +from async_rediscache import RedisCache +from dateutil.relativedelta import relativedelta +from discord import Colour, HTTPException, Member, Message, NotFound, TextChannel +from discord.ext.commands import Cog +from discord.utils import escape_markdown + +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.constants import ( + Channels, Colours, Filter, + Guild, Icons, URLs +) +from bot.exts.moderation.modlog import ModLog +from bot.utils.messages import format_user +from bot.utils.regex import INVITE_RE +from bot.utils.scheduling import Scheduler + +log = logging.getLogger(__name__) + +# Regular expressions +CODE_BLOCK_RE = re.compile( + r"(?P<delim>``?)[^`]+?(?P=delim)(?!`+)" # Inline codeblock + r"|```(.+?)```", # Multiline codeblock + re.DOTALL | re.MULTILINE +) +EVERYONE_PING_RE = re.compile(rf"@everyone|<@&{Guild.id}>|@here") +SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL) +URL_RE = re.compile(r"(https?://[^\s]+)", flags=re.IGNORECASE) +ZALGO_RE = re.compile(r"[\u0300-\u036F\u0489]") + +# Other constants. +DAYS_BETWEEN_ALERTS = 3 +OFFENSIVE_MSG_DELETE_TIME = timedelta(days=Filter.offensive_msg_delete_days) + +FilterMatch = Union[re.Match, dict, bool, List[discord.Embed]] + + +class Stats(NamedTuple): + """Additional stats on a triggered filter to append to a mod log.""" + + message_content: str + additional_embeds: Optional[List[discord.Embed]] + additional_embeds_msg: Optional[str] + + +class Filtering(Cog): + """Filtering out invites, blacklisting domains, and warning us of certain regular expressions.""" + + # Redis cache mapping a user ID to the last timestamp a bad nickname alert was sent + name_alerts = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self.scheduler = Scheduler(self.__class__.__name__) + self.name_lock = asyncio.Lock() + + staff_mistake_str = "If you believe this was a mistake, please let staff know!" + self.filters = { + "filter_zalgo": { + "enabled": Filter.filter_zalgo, + "function": self._has_zalgo, + "type": "filter", + "content_only": True, + "user_notification": Filter.notify_user_zalgo, + "notification_msg": ( + "Your post has been removed for abusing Unicode character rendering (aka Zalgo text). " + f"{staff_mistake_str}" + ), + "schedule_deletion": False + }, + "filter_invites": { + "enabled": Filter.filter_invites, + "function": self._has_invites, + "type": "filter", + "content_only": True, + "user_notification": Filter.notify_user_invites, + "notification_msg": ( + f"Per Rule 6, your invite link has been removed. {staff_mistake_str}\n\n" + r"Our server rules can be found here: <https://pythondiscord.com/pages/rules>" + ), + "schedule_deletion": False + }, + "filter_domains": { + "enabled": Filter.filter_domains, + "function": self._has_urls, + "type": "filter", + "content_only": True, + "user_notification": Filter.notify_user_domains, + "notification_msg": ( + f"Your URL has been removed because it matched a blacklisted domain. {staff_mistake_str}" + ), + "schedule_deletion": False + }, + "filter_everyone_ping": { + "enabled": Filter.filter_everyone_ping, + "function": self._has_everyone_ping, + "type": "filter", + "content_only": True, + "user_notification": Filter.notify_user_everyone_ping, + "notification_msg": ( + "Please don't try to ping `@everyone` or `@here`. " + f"Your message has been removed. {staff_mistake_str}" + ), + "schedule_deletion": False, + "ping_everyone": False + }, + "watch_regex": { + "enabled": Filter.watch_regex, + "function": self._has_watch_regex_match, + "type": "watchlist", + "content_only": True, + "schedule_deletion": True + }, + "watch_rich_embeds": { + "enabled": Filter.watch_rich_embeds, + "function": self._has_rich_embed, + "type": "watchlist", + "content_only": False, + "schedule_deletion": False + } + } + + self.bot.loop.create_task(self.reschedule_offensive_msg_deletion()) + + def cog_unload(self) -> None: + """Cancel scheduled tasks.""" + self.scheduler.cancel_all() + + def _get_filterlist_items(self, list_type: str, *, allowed: bool) -> list: + """Fetch items from the filter_list_cache.""" + return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"].keys() + + @staticmethod + def _expand_spoilers(text: str) -> str: + """Return a string containing all interpretations of a spoilered message.""" + split_text = SPOILER_RE.split(text) + return ''.join( + split_text[0::2] + split_text[1::2] + split_text + ) + + @property + def mod_log(self) -> ModLog: + """Get currently loaded ModLog cog instance.""" + return self.bot.get_cog("ModLog") + + @Cog.listener() + async def on_message(self, msg: Message) -> None: + """Invoke message filter for new messages.""" + await self._filter_message(msg) + + # Ignore webhook messages. + if msg.webhook_id is None: + await self.check_bad_words_in_name(msg.author) + + @Cog.listener() + async def on_message_edit(self, before: Message, after: Message) -> None: + """ + Invoke message filter for message edits. + + If there have been multiple edits, calculate the time delta from the previous edit. + """ + if not before.edited_at: + delta = relativedelta(after.edited_at, before.created_at).microseconds + else: + delta = relativedelta(after.edited_at, before.edited_at).microseconds + await self._filter_message(after, delta) + + def get_name_matches(self, name: str) -> List[re.Match]: + """Check bad words from passed string (name). Return list of matches.""" + matches = [] + watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) + for pattern in watchlist_patterns: + if match := re.search(pattern, name, flags=re.IGNORECASE): + matches.append(match) + return matches + + async def check_send_alert(self, member: Member) -> bool: + """When there is less than 3 days after last alert, return `False`, otherwise `True`.""" + if last_alert := await self.name_alerts.get(member.id): + last_alert = datetime.utcfromtimestamp(last_alert) + if datetime.utcnow() - timedelta(days=DAYS_BETWEEN_ALERTS) < last_alert: + log.trace(f"Last alert was too recent for {member}'s nickname.") + return False + + return True + + async def check_bad_words_in_name(self, member: Member) -> None: + """Send a mod alert every 3 days if a username still matches a watchlist pattern.""" + # Use lock to avoid race conditions + async with self.name_lock: + # Check whether the users display name contains any words in our blacklist + matches = self.get_name_matches(member.display_name) + + if not matches or not await self.check_send_alert(member): + return + + log.info(f"Sending bad nickname alert for '{member.display_name}' ({member.id}).") + + log_string = ( + f"**User:** {format_user(member)}\n" + f"**Display Name:** {escape_markdown(member.display_name)}\n" + f"**Bad Matches:** {', '.join(match.group() for match in matches)}" + ) + + await self.mod_log.send_log_message( + icon_url=Icons.token_removed, + colour=Colours.soft_red, + title="Username filtering alert", + text=log_string, + channel_id=Channels.mod_alerts, + thumbnail=member.avatar_url + ) + + # Update time when alert sent + await self.name_alerts.set(member.id, datetime.utcnow().timestamp()) + + async def filter_eval(self, result: str, msg: Message) -> bool: + """ + Filter the result of an !eval to see if it violates any of our rules, and then respond accordingly. + + Also requires the original message, to check whether to filter and for mod logs. + Returns whether a filter was triggered or not. + """ + filter_triggered = False + # Should we filter this message? + if self._check_filter(msg): + for filter_name, _filter in self.filters.items(): + # Is this specific filter enabled in the config? + # We also do not need to worry about filters that take the full message, + # since all we have is an arbitrary string. + if _filter["enabled"] and _filter["content_only"]: + match = await _filter["function"](result) + + if match: + # If this is a filter (not a watchlist), we set the variable so we know + # that it has been triggered + if _filter["type"] == "filter": + filter_triggered = True + + stats = self._add_stats(filter_name, match, result) + await self._send_log(filter_name, _filter, msg, stats, is_eval=True) + + break # We don't want multiple filters to trigger + + return filter_triggered + + async def _filter_message(self, msg: Message, delta: Optional[int] = None) -> None: + """Filter the input message to see if it violates any of our rules, and then respond accordingly.""" + # Should we filter this message? + if self._check_filter(msg): + for filter_name, _filter in self.filters.items(): + # Is this specific filter enabled in the config? + if _filter["enabled"]: + # Double trigger check for the embeds filter + if filter_name == "watch_rich_embeds": + # If the edit delta is less than 0.001 seconds, then we're probably dealing + # with a double filter trigger. + if delta is not None and delta < 100: + continue + + # Does the filter only need the message content or the full message? + if _filter["content_only"]: + match = await _filter["function"](msg.content) + else: + match = await _filter["function"](msg) + + if match: + is_private = msg.channel.type is discord.ChannelType.private + + # If this is a filter (not a watchlist) and not in a DM, delete the message. + if _filter["type"] == "filter" and not is_private: + try: + # Embeds (can?) trigger both the `on_message` and `on_message_edit` + # event handlers, triggering filtering twice for the same message. + # + # If `on_message`-triggered filtering already deleted the message + # then `on_message_edit`-triggered filtering will raise exception + # since the message no longer exists. + # + # In addition, to avoid sending two notifications to the user, the + # logs, and mod_alert, we return if the message no longer exists. + await msg.delete() + except discord.errors.NotFound: + return + + # Notify the user if the filter specifies + if _filter["user_notification"]: + await self.notify_member(msg.author, _filter["notification_msg"], msg.channel) + + # If the message is classed as offensive, we store it in the site db and + # it will be deleted it after one week. + if _filter["schedule_deletion"] and not is_private: + delete_date = (msg.created_at + OFFENSIVE_MSG_DELETE_TIME).isoformat() + data = { + 'id': msg.id, + 'channel_id': msg.channel.id, + 'delete_date': delete_date + } + + try: + await self.bot.api_client.post('bot/offensive-messages', json=data) + except ResponseCodeError as e: + if e.status == 400 and "already exists" in e.response_json.get("id", [""])[0]: + log.debug(f"Offensive message {msg.id} already exists.") + else: + log.error(f"Offensive message {msg.id} failed to post: {e}") + else: + self.schedule_msg_delete(data) + log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}") + + stats = self._add_stats(filter_name, match, msg.content) + await self._send_log(filter_name, _filter, msg, stats) + + break # We don't want multiple filters to trigger + + async def _send_log( + self, + filter_name: str, + _filter: Dict[str, Any], + msg: discord.Message, + stats: Stats, + *, + is_eval: bool = False, + ) -> None: + """Send a mod log for a triggered filter.""" + if msg.channel.type is discord.ChannelType.private: + channel_str = "via DM" + ping_everyone = False + else: + channel_str = f"in {msg.channel.mention}" + # Allow specific filters to override ping_everyone + ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True) + + eval_msg = "using !eval " if is_eval else "" + message = ( + f"The {filter_name} {_filter['type']} was triggered by {format_user(msg.author)} " + f"{channel_str} {eval_msg}with [the following message]({msg.jump_url}):\n\n" + f"{stats.message_content}" + ) + + log.debug(message) + + # Send pretty mod log embed to mod-alerts + await self.mod_log.send_log_message( + icon_url=Icons.filtering, + colour=Colour(Colours.soft_red), + title=f"{_filter['type'].title()} triggered!", + text=message, + thumbnail=msg.author.avatar_url_as(static_format="png"), + channel_id=Channels.mod_alerts, + ping_everyone=ping_everyone, + additional_embeds=stats.additional_embeds, + additional_embeds_msg=stats.additional_embeds_msg + ) + + def _add_stats(self, name: str, match: FilterMatch, content: str) -> Stats: + """Adds relevant statistical information to the relevant filter and increments the bot's stats.""" + # Word and match stats for watch_regex + if name == "watch_regex": + surroundings = match.string[max(match.start() - 10, 0): match.end() + 10] + message_content = ( + f"**Match:** '{match[0]}'\n" + f"**Location:** '...{escape_markdown(surroundings)}...'\n" + f"\n**Original Message:**\n{escape_markdown(content)}" + ) + else: # Use original content + message_content = content + + additional_embeds = None + additional_embeds_msg = None + + self.bot.stats.incr(f"filters.{name}") + + # The function returns True for invalid invites. + # They have no data so additional embeds can't be created for them. + if name == "filter_invites" and match is not True: + additional_embeds = [] + for _, data in match.items(): + embed = discord.Embed(description=( + f"**Members:**\n{data['members']}\n" + f"**Active:**\n{data['active']}" + )) + embed.set_author(name=data["name"]) + embed.set_thumbnail(url=data["icon"]) + embed.set_footer(text=f"Guild ID: {data['id']}") + additional_embeds.append(embed) + additional_embeds_msg = "For the following guild(s):" + + elif name == "watch_rich_embeds": + additional_embeds = match + additional_embeds_msg = "With the following embed(s):" + + return Stats(message_content, additional_embeds, additional_embeds_msg) + + @staticmethod + def _check_filter(msg: Message) -> bool: + """Check whitelists to see if we should filter this message.""" + role_whitelisted = False + + if type(msg.author) is Member: # Only Member has roles, not User. + for role in msg.author.roles: + if role.id in Filter.role_whitelist: + role_whitelisted = True + + return ( + msg.channel.id not in Filter.channel_whitelist # Channel not in whitelist + and not role_whitelisted # Role not in whitelist + and not msg.author.bot # Author not a bot + ) + + async def _has_watch_regex_match(self, text: str) -> Union[bool, re.Match]: + """ + Return True if `text` matches any regex from `word_watchlist` or `token_watchlist` configs. + + `word_watchlist`'s patterns are placed between word boundaries while `token_watchlist` is + matched as-is. Spoilers are expanded, if any, and URLs are ignored. + """ + if SPOILER_RE.search(text): + text = self._expand_spoilers(text) + + # Make sure it's not a URL + if URL_RE.search(text): + return False + + watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) + for pattern in watchlist_patterns: + match = re.search(pattern, text, flags=re.IGNORECASE) + if match: + return match + + async def _has_urls(self, text: str) -> bool: + """Returns True if the text contains one of the blacklisted URLs from the config file.""" + if not URL_RE.search(text): + return False + + text = text.lower() + domain_blacklist = self._get_filterlist_items("domain_name", allowed=False) + + for url in domain_blacklist: + if url.lower() in text: + return True + + return False + + @staticmethod + async def _has_zalgo(text: str) -> bool: + """ + Returns True if the text contains zalgo characters. + + Zalgo range is \u0300 – \u036F and \u0489. + """ + return bool(ZALGO_RE.search(text)) + + async def _has_invites(self, text: str) -> Union[dict, bool]: + """ + Checks if there's any invites in the text content that aren't in the guild whitelist. + + If any are detected, a dictionary of invite data is returned, with a key per invite. + If none are detected, False is returned. + + Attempts to catch some of common ways to try to cheat the system. + """ + # Remove backslashes to prevent escape character aroundfuckery like + # discord\.gg/gdudes-pony-farm + text = text.replace("\\", "") + + invites = INVITE_RE.findall(text) + invite_data = dict() + for invite in invites: + if invite in invite_data: + continue + + response = await self.bot.http_session.get( + f"{URLs.discord_invite_api}/{invite}", params={"with_counts": "true"} + ) + response = await response.json() + guild = response.get("guild") + if guild is None: + # Lack of a "guild" key in the JSON response indicates either an group DM invite, an + # expired invite, or an invalid invite. The API does not currently differentiate + # between invalid and expired invites + return True + + guild_id = guild.get("id") + guild_invite_whitelist = self._get_filterlist_items("guild_invite", allowed=True) + guild_invite_blacklist = self._get_filterlist_items("guild_invite", allowed=False) + + # Is this invite allowed? + guild_partnered_or_verified = ( + 'PARTNERED' in guild.get("features", []) + or 'VERIFIED' in guild.get("features", []) + ) + invite_not_allowed = ( + guild_id in guild_invite_blacklist # Blacklisted guilds are never permitted. + or guild_id not in guild_invite_whitelist # Whitelisted guilds are always permitted. + and not guild_partnered_or_verified # Otherwise guilds have to be Verified or Partnered. + ) + + if invite_not_allowed: + guild_icon_hash = guild["icon"] + guild_icon = ( + "https://cdn.discordapp.com/icons/" + f"{guild_id}/{guild_icon_hash}.png?size=512" + ) + + invite_data[invite] = { + "name": guild["name"], + "id": guild['id'], + "icon": guild_icon, + "members": response["approximate_member_count"], + "active": response["approximate_presence_count"] + } + + return invite_data if invite_data else False + + @staticmethod + async def _has_rich_embed(msg: Message) -> Union[bool, List[discord.Embed]]: + """Determines if `msg` contains any rich embeds not auto-generated from a URL.""" + if msg.embeds: + for embed in msg.embeds: + if embed.type == "rich": + urls = URL_RE.findall(msg.content) + if not embed.url or embed.url not in urls: + # If `embed.url` does not exist or if `embed.url` is not part of the content + # of the message, it's unlikely to be an auto-generated embed by Discord. + return msg.embeds + else: + log.trace( + "Found a rich embed sent by a regular user account, " + "but it was likely just an automatic URL embed." + ) + return False + return False + + @staticmethod + async def _has_everyone_ping(text: str) -> bool: + """Determines if `msg` contains an @everyone or @here ping outside of a codeblock.""" + # First pass to avoid running re.sub on every message + if not EVERYONE_PING_RE.search(text): + return False + + content_without_codeblocks = CODE_BLOCK_RE.sub("", text) + return bool(EVERYONE_PING_RE.search(content_without_codeblocks)) + + async def notify_member(self, filtered_member: Member, reason: str, channel: TextChannel) -> None: + """ + Notify filtered_member about a moderation action with the reason str. + + First attempts to DM the user, fall back to in-channel notification if user has DMs disabled + """ + try: + await filtered_member.send(reason) + except discord.errors.Forbidden: + await channel.send(f"{filtered_member.mention} {reason}") + + def schedule_msg_delete(self, msg: dict) -> None: + """Delete an offensive message once its deletion date is reached.""" + delete_at = dateutil.parser.isoparse(msg['delete_date']).replace(tzinfo=None) + self.scheduler.schedule_at(delete_at, msg['id'], self.delete_offensive_msg(msg)) + + async def reschedule_offensive_msg_deletion(self) -> None: + """Get all the pending message deletion from the API and reschedule them.""" + await self.bot.wait_until_ready() + response = await self.bot.api_client.get('bot/offensive-messages',) + + now = datetime.utcnow() + + for msg in response: + delete_at = dateutil.parser.isoparse(msg['delete_date']).replace(tzinfo=None) + + if delete_at < now: + await self.delete_offensive_msg(msg) + else: + self.schedule_msg_delete(msg) + + async def delete_offensive_msg(self, msg: Mapping[str, str]) -> None: + """Delete an offensive message, and then delete it from the db.""" + try: + channel = self.bot.get_channel(msg['channel_id']) + if channel: + msg_obj = await channel.fetch_message(msg['id']) + await msg_obj.delete() + except NotFound: + log.info( + f"Tried to delete message {msg['id']}, but the message can't be found " + f"(it has been probably already deleted)." + ) + except HTTPException as e: + log.warning(f"Failed to delete message {msg['id']}: status {e.status}") + + await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}') + log.info(f"Deleted the offensive message with id {msg['id']}.") + + +def setup(bot: Bot) -> None: + """Load the Filtering cog.""" + bot.add_cog(Filtering(bot)) diff --git a/bot/cogs/security.py b/bot/exts/filters/security.py index c680c5e27..c680c5e27 100644 --- a/bot/cogs/security.py +++ b/bot/exts/filters/security.py diff --git a/bot/exts/filters/token_remover.py b/bot/exts/filters/token_remover.py new file mode 100644 index 000000000..bd6a1f97a --- /dev/null +++ b/bot/exts/filters/token_remover.py @@ -0,0 +1,233 @@ +import base64 +import binascii +import logging +import re +import typing as t + +from discord import Colour, Message, NotFound +from discord.ext.commands import Cog + +from bot import utils +from bot.bot import Bot +from bot.constants import Channels, Colours, Event, Icons +from bot.exts.moderation.modlog import ModLog +from bot.utils.messages import format_user + +log = logging.getLogger(__name__) + +LOG_MESSAGE = ( + "Censored a seemingly valid token sent by {author} in {channel}, " + "token was `{user_id}.{timestamp}.{hmac}`" +) +UNKNOWN_USER_LOG_MESSAGE = "Decoded user ID: `{user_id}` (Not present in server)." +KNOWN_USER_LOG_MESSAGE = ( + "Decoded user ID: `{user_id}` **(Present in server)**.\n" + "This matches `{user_name}` and means this is likely a valid **{kind}** token." +) +DELETION_MESSAGE_TEMPLATE = ( + "Hey {mention}! I noticed you posted a seemingly valid Discord API " + "token in your message and have removed your message. " + "This means that your token has been **compromised**. " + "Please change your token **immediately** at: " + "<https://discordapp.com/developers/applications/me>\n\n" + "Feel free to re-post it with the token removed. " + "If you believe this was a mistake, please let us know!" +) +DISCORD_EPOCH = 1_420_070_400 +TOKEN_EPOCH = 1_293_840_000 + +# Three parts delimited by dots: user ID, creation timestamp, HMAC. +# The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string. +# Each part only matches base64 URL-safe characters. +# Padding has never been observed, but the padding character '=' is matched just in case. +TOKEN_RE = re.compile(r"([\w\-=]+)\.([\w\-=]+)\.([\w\-=]+)", re.ASCII) + + +class Token(t.NamedTuple): + """A Discord Bot token.""" + + user_id: str + timestamp: str + hmac: str + + +class TokenRemover(Cog): + """Scans messages for potential discord.py bot tokens and removes them.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @property + def mod_log(self) -> ModLog: + """Get currently loaded ModLog cog instance.""" + return self.bot.get_cog("ModLog") + + @Cog.listener() + async def on_message(self, msg: Message) -> None: + """ + Check each message for a string that matches Discord's token pattern. + + See: https://discordapp.com/developers/docs/reference#snowflakes + """ + # Ignore DMs; can't delete messages in there anyway. + if not msg.guild or msg.author.bot: + return + + found_token = self.find_token_in_message(msg) + if found_token: + await self.take_action(msg, found_token) + + @Cog.listener() + async def on_message_edit(self, before: Message, after: Message) -> None: + """ + Check each edit for a string that matches Discord's token pattern. + + See: https://discordapp.com/developers/docs/reference#snowflakes + """ + await self.on_message(after) + + async def take_action(self, msg: Message, found_token: Token) -> None: + """Remove the `msg` containing the `found_token` and send a mod log message.""" + self.mod_log.ignore(Event.message_delete, msg.id) + + try: + await msg.delete() + except NotFound: + log.debug(f"Failed to remove token in message {msg.id}: message already deleted.") + return + + await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) + + log_message = self.format_log_message(msg, found_token) + userid_message, mention_everyone = self.format_userid_log_message(msg, found_token) + log.debug(log_message) + + # Send pretty mod log embed to mod-alerts + await self.mod_log.send_log_message( + icon_url=Icons.token_removed, + colour=Colour(Colours.soft_red), + title="Token removed!", + text=log_message + "\n" + userid_message, + thumbnail=msg.author.avatar_url_as(static_format="png"), + channel_id=Channels.mod_alerts, + ping_everyone=mention_everyone, + ) + + self.bot.stats.incr("tokens.removed_tokens") + + @classmethod + def format_userid_log_message(cls, msg: Message, token: Token) -> t.Tuple[str, bool]: + """ + Format the portion of the log message that includes details about the detected user ID. + + If the user is resolved to a member, the format includes the user ID, name, and the + kind of user detected. + + If we resolve to a member and it is not a bot, we also return True to ping everyone. + + Returns a tuple of (log_message, mention_everyone) + """ + user_id = cls.extract_user_id(token.user_id) + user = msg.guild.get_member(user_id) + + if user: + return KNOWN_USER_LOG_MESSAGE.format( + user_id=user_id, + user_name=str(user), + kind="BOT" if user.bot else "USER", + ), not user.bot + else: + return UNKNOWN_USER_LOG_MESSAGE.format(user_id=user_id), False + + @staticmethod + def format_log_message(msg: Message, token: Token) -> str: + """Return the generic portion of the log message to send for `token` being censored in `msg`.""" + return LOG_MESSAGE.format( + author=format_user(msg.author), + channel=msg.channel.mention, + user_id=token.user_id, + timestamp=token.timestamp, + hmac='x' * len(token.hmac), + ) + + @classmethod + def find_token_in_message(cls, msg: Message) -> t.Optional[Token]: + """Return a seemingly valid token found in `msg` or `None` if no token is found.""" + # Use finditer rather than search to guard against method calls prematurely returning the + # token check (e.g. `message.channel.send` also matches our token pattern) + for match in TOKEN_RE.finditer(msg.content): + token = Token(*match.groups()) + if ( + (cls.extract_user_id(token.user_id) is not None) + and cls.is_valid_timestamp(token.timestamp) + and cls.is_maybe_valid_hmac(token.hmac) + ): + # Short-circuit on first match + return token + + # No matching substring + return + + @staticmethod + def extract_user_id(b64_content: str) -> t.Optional[int]: + """Return a user ID integer from part of a potential token, or None if it couldn't be decoded.""" + b64_content = utils.pad_base64(b64_content) + + try: + decoded_bytes = base64.urlsafe_b64decode(b64_content) + string = decoded_bytes.decode('utf-8') + if not (string.isascii() and string.isdigit()): + # This case triggers if there are fancy unicode digits in the base64 encoding, + # that means it's not a valid user id. + return None + return int(string) + except (binascii.Error, ValueError): + return None + + @staticmethod + def is_valid_timestamp(b64_content: str) -> bool: + """ + Return True if `b64_content` decodes to a valid timestamp. + + If the timestamp is greater than the Discord epoch, it's probably valid. + See: https://i.imgur.com/7WdehGn.png + """ + b64_content = utils.pad_base64(b64_content) + + try: + decoded_bytes = base64.urlsafe_b64decode(b64_content) + timestamp = int.from_bytes(decoded_bytes, byteorder="big") + except (binascii.Error, ValueError) as e: + log.debug(f"Failed to decode token timestamp '{b64_content}': {e}") + return False + + # Seems like newer tokens don't need the epoch added, but add anyway since an upper bound + # is not checked. + if timestamp + TOKEN_EPOCH >= DISCORD_EPOCH: + return True + else: + log.debug(f"Invalid token timestamp '{b64_content}': smaller than Discord epoch") + return False + + @staticmethod + def is_maybe_valid_hmac(b64_content: str) -> bool: + """ + Determine if a given HMAC portion of a token is potentially valid. + + If the HMAC has 3 or less characters, it's probably a dummy value like "xxxxxxxxxx", + and thus the token can probably be skipped. + """ + unique = len(set(b64_content.lower())) + if unique <= 3: + log.debug( + f"Considering the HMAC {b64_content} a dummy because it has {unique}" + " case-insensitively unique characters" + ) + return False + else: + return True + + +def setup(bot: Bot) -> None: + """Load the TokenRemover cog.""" + bot.add_cog(TokenRemover(bot)) diff --git a/bot/exts/filters/webhook_remover.py b/bot/exts/filters/webhook_remover.py new file mode 100644 index 000000000..08fe94055 --- /dev/null +++ b/bot/exts/filters/webhook_remover.py @@ -0,0 +1,85 @@ +import logging +import re + +from discord import Colour, Message, NotFound +from discord.ext.commands import Cog + +from bot.bot import Bot +from bot.constants import Channels, Colours, Event, Icons +from bot.exts.moderation.modlog import ModLog +from bot.utils.messages import format_user + +WEBHOOK_URL_RE = re.compile(r"((?:https?://)?discord(?:app)?\.com/api/webhooks/\d+/)\S+/?", re.IGNORECASE) + +ALERT_MESSAGE_TEMPLATE = ( + "{user}, looks like you posted a Discord webhook URL. Therefore, your " + "message has been removed. Your webhook may have been **compromised** so " + "please re-create the webhook **immediately**. If you believe this was " + "mistake, please let us know." +) + +log = logging.getLogger(__name__) + + +class WebhookRemover(Cog): + """Scan messages to detect Discord webhooks links.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @property + def mod_log(self) -> ModLog: + """Get current instance of `ModLog`.""" + return self.bot.get_cog("ModLog") + + async def delete_and_respond(self, msg: Message, redacted_url: str) -> None: + """Delete `msg` and send a warning that it contained the Discord webhook `redacted_url`.""" + # Don't log this, due internal delete, not by user. Will make different entry. + self.mod_log.ignore(Event.message_delete, msg.id) + + try: + await msg.delete() + except NotFound: + log.debug(f"Failed to remove webhook in message {msg.id}: message already deleted.") + return + + await msg.channel.send(ALERT_MESSAGE_TEMPLATE.format(user=msg.author.mention)) + + message = ( + f"{format_user(msg.author)} posted a Discord webhook URL to {msg.channel.mention}. " + f"Webhook URL was `{redacted_url}`" + ) + log.debug(message) + + # Send entry to moderation alerts. + await self.mod_log.send_log_message( + icon_url=Icons.token_removed, + colour=Colour(Colours.soft_red), + title="Discord webhook URL removed!", + text=message, + thumbnail=msg.author.avatar_url_as(static_format="png"), + channel_id=Channels.mod_alerts + ) + + self.bot.stats.incr("tokens.removed_webhooks") + + @Cog.listener() + async def on_message(self, msg: Message) -> None: + """Check if a Discord webhook URL is in `message`.""" + # Ignore DMs; can't delete messages in there anyway. + if not msg.guild or msg.author.bot: + return + + matches = WEBHOOK_URL_RE.search(msg.content) + if matches: + await self.delete_and_respond(msg, matches[1] + "xxx") + + @Cog.listener() + async def on_message_edit(self, before: Message, after: Message) -> None: + """Check if a Discord webhook URL is in the edited message `after`.""" + await self.on_message(after) + + +def setup(bot: Bot) -> None: + """Load `WebhookRemover` cog.""" + bot.add_cog(WebhookRemover(bot)) diff --git a/tests/bot/patches/__init__.py b/bot/exts/fun/__init__.py index e69de29bb..e69de29bb 100644 --- a/tests/bot/patches/__init__.py +++ b/bot/exts/fun/__init__.py diff --git a/bot/cogs/duck_pond.py b/bot/exts/fun/duck_pond.py index 345d2856c..48aa2749c 100644 --- a/bot/cogs/duck_pond.py +++ b/bot/exts/fun/duck_pond.py @@ -1,13 +1,16 @@ +import asyncio import logging -from typing import Optional, Union +from typing import Union import discord from discord import Color, Embed, Member, Message, RawReactionActionEvent, User, errors -from discord.ext.commands import Cog +from discord.ext.commands import Cog, Context, command from bot import constants from bot.bot import Bot +from bot.utils.checks import has_any_role from bot.utils.messages import send_attachments +from bot.utils.webhooks import send_webhook log = logging.getLogger(__name__) @@ -18,11 +21,14 @@ class DuckPond(Cog): def __init__(self, bot: Bot): self.bot = bot self.webhook_id = constants.Webhooks.duck_pond + self.webhook = None + self.ducked_messages = [] self.bot.loop.create_task(self.fetch_webhook()) + self.relay_lock = None async def fetch_webhook(self) -> None: """Fetches the webhook object, so we can post to it.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() try: self.webhook = await self.bot.fetch_webhook(self.webhook_id) @@ -47,23 +53,13 @@ class DuckPond(Cog): return True return False - async def send_webhook( - self, - content: Optional[str] = None, - username: Optional[str] = None, - avatar_url: Optional[str] = None, - embed: Optional[Embed] = None, - ) -> None: - """Send a webhook to the duck_pond channel.""" - try: - await self.webhook.send( - content=content, - username=username, - avatar_url=avatar_url, - embed=embed - ) - except discord.HTTPException: - log.exception("Failed to send a message to the Duck Pool webhook") + @staticmethod + def _is_duck_emoji(emoji: Union[str, discord.PartialEmoji, discord.Emoji]) -> bool: + """Check if the emoji is a valid duck emoji.""" + if isinstance(emoji, str): + return emoji == "🦆" + else: + return hasattr(emoji, "name") and emoji.name.startswith("ducky_") async def count_ducks(self, message: Message) -> int: """ @@ -71,33 +67,24 @@ class DuckPond(Cog): Only counts ducks added by staff members. """ - duck_count = 0 - duck_reactors = [] + duck_reactors = set() + # iterate over all reactions for reaction in message.reactions: - async for user in reaction.users(): - - # Is the user a staff member and not already counted as reactor? - if not self.is_staff(user) or user.id in duck_reactors: - continue - - # Is the emoji a duck? - if hasattr(reaction.emoji, "id"): - if reaction.emoji.id in constants.DuckPond.custom_emojis: - duck_count += 1 - duck_reactors.append(user.id) - elif isinstance(reaction.emoji, str): - if reaction.emoji == "🦆": - duck_count += 1 - duck_reactors.append(user.id) - return duck_count + # check if the current reaction is a duck + if not self._is_duck_emoji(reaction.emoji): + continue + + # update the set of reactors with all staff reactors + duck_reactors |= {user.id async for user in reaction.users() if self.is_staff(user)} + + return len(duck_reactors) async def relay_message(self, message: Message) -> None: """Relays the message's content and attachments to the duck pond channel.""" - clean_content = message.clean_content - - if clean_content: - await self.send_webhook( + if message.clean_content: + await send_webhook( + webhook=self.webhook, content=message.clean_content, username=message.author.display_name, avatar_url=message.author.avatar_url @@ -111,26 +98,44 @@ class DuckPond(Cog): description=":x: **This message contained an attachment, but it could not be retrieved**", color=Color.red() ) - await self.send_webhook( + await send_webhook( + webhook=self.webhook, embed=e, username=message.author.display_name, avatar_url=message.author.avatar_url ) except discord.HTTPException: - log.exception(f"Failed to send an attachment to the webhook") + log.exception("Failed to send an attachment to the webhook") - await message.add_reaction("✅") + async def locked_relay(self, message: discord.Message) -> bool: + """Relay a message after obtaining the relay lock.""" + if self.relay_lock is None: + # Lazily load the lock to ensure it's created within the + # appropriate event loop. + self.relay_lock = asyncio.Lock() - @staticmethod - def _payload_has_duckpond_emoji(payload: RawReactionActionEvent) -> bool: + async with self.relay_lock: + # check if the message has a checkmark after acquiring the lock + if await self.has_green_checkmark(message): + return False + + # relay the message + await self.relay_message(message) + + # add a green checkmark to indicate that the message was relayed + await message.add_reaction("✅") + return True + + def _payload_has_duckpond_emoji(self, emoji: discord.PartialEmoji) -> bool: """Test if the RawReactionActionEvent payload contains a duckpond emoji.""" - if payload.emoji.is_custom_emoji(): - if payload.emoji.id in constants.DuckPond.custom_emojis: - return True - elif payload.emoji.name == "🦆": - return True + if emoji.is_unicode_emoji(): + # For unicode PartialEmojis, the `name` attribute is just the string + # representation of the emoji. This is what the helper method + # expects, as unicode emojis show up as just a `str` instance when + # inspecting the reactions attached to a message. + emoji = emoji.name - return False + return self._is_duck_emoji(emoji) @Cog.listener() async def on_raw_reaction_add(self, payload: RawReactionActionEvent) -> None: @@ -141,33 +146,51 @@ class DuckPond(Cog): amount of ducks specified in the config under duck_pond/threshold, it will send the message off to the duck pond. """ + # Ignore other guilds and DMs. + if payload.guild_id != constants.Guild.id: + return + + # Was this reaction issued in a blacklisted channel? + if payload.channel_id in constants.DuckPond.channel_blacklist: + return + # Is the emoji in the reaction a duck? - if not self._payload_has_duckpond_emoji(payload): + if not self._payload_has_duckpond_emoji(payload.emoji): return channel = discord.utils.get(self.bot.get_all_channels(), id=payload.channel_id) + if channel is None: + return + message = await channel.fetch_message(payload.message_id) member = discord.utils.get(message.guild.members, id=payload.user_id) - # Is the member a human and a staff member? - if not self.is_staff(member) or member.bot: + # Was the message sent by a human staff member? + if not self.is_staff(message.author) or message.author.bot: return - # Does the message already have a green checkmark? - if await self.has_green_checkmark(message): + # Is the reactor a human staff member? + if not self.is_staff(member) or member.bot: return # Time to count our ducks! duck_count = await self.count_ducks(message) # If we've got more than the required amount of ducks, send the message to the duck_pond. - if duck_count >= constants.DuckPond.threshold: - await self.relay_message(message) + if duck_count >= constants.DuckPond.threshold and message.id not in self.ducked_messages: + self.ducked_messages.append(message.id) + await self.locked_relay(message) @Cog.listener() async def on_raw_reaction_remove(self, payload: RawReactionActionEvent) -> None: """Ensure that people don't remove the green checkmark from duck ponded messages.""" + # Ignore other guilds and DMs. + if payload.guild_id != constants.Guild.id: + return + channel = discord.utils.get(self.bot.get_all_channels(), id=payload.channel_id) + if channel is None: + return # Prevent the green checkmark from being removed if payload.emoji.name == "✅": @@ -176,6 +199,15 @@ class DuckPond(Cog): if duck_count >= constants.DuckPond.threshold: await message.add_reaction("✅") + @command(name="duckify", aliases=("duckpond", "pondify")) + @has_any_role(constants.Roles.admins) + async def duckify(self, ctx: Context, message: discord.Message) -> None: + """Relay a message to the duckpond, no ducks required!""" + if await self.locked_relay(message): + await ctx.message.add_reaction("🦆") + else: + await ctx.message.add_reaction("❌") + def setup(bot: Bot) -> None: """Load the DuckPond cog.""" diff --git a/bot/cogs/off_topic_names.py b/bot/exts/fun/off_topic_names.py index bf777ea5a..7fc93b88c 100644 --- a/bot/cogs/off_topic_names.py +++ b/bot/exts/fun/off_topic_names.py @@ -1,49 +1,21 @@ -import asyncio import difflib import logging from datetime import datetime, timedelta from discord import Colour, Embed -from discord.ext.commands import BadArgument, Cog, Context, Converter, group +from discord.ext.commands import Cog, Context, group, has_any_role +from discord.utils import sleep_until from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import Channels, MODERATION_ROLES -from bot.decorators import with_role +from bot.converters import OffTopicName from bot.pagination import LinePaginator - CHANNELS = (Channels.off_topic_0, Channels.off_topic_1, Channels.off_topic_2) log = logging.getLogger(__name__) -class OffTopicName(Converter): - """A converter that ensures an added off-topic name is valid.""" - - @staticmethod - async def convert(ctx: Context, argument: str) -> str: - """Attempt to replace any invalid characters with their approximate Unicode equivalent.""" - allowed_characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-" - - # Chain multiple words to a single one - argument = "-".join(argument.split()) - - if not (2 <= len(argument) <= 96): - raise BadArgument("Channel name must be between 2 and 96 chars long") - - elif not all(c.isalnum() or c in allowed_characters for c in argument): - raise BadArgument( - "Channel name must only consist of " - "alphanumeric characters, minus signs or apostrophes." - ) - - # Replace invalid characters with unicode alternatives. - table = str.maketrans( - allowed_characters, '𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-' - ) - return argument.translate(table) - - async def update_names(bot: Bot) -> None: """Background updater task that performs the daily channel name update.""" while True: @@ -51,8 +23,7 @@ async def update_names(bot: Bot) -> None: # we go past midnight in the `seconds_to_sleep` set below. today_at_midnight = datetime.utcnow().replace(microsecond=0, second=0, minute=0, hour=0) next_midnight = today_at_midnight + timedelta(days=1) - seconds_to_sleep = (next_midnight - datetime.utcnow()).seconds + 1 - await asyncio.sleep(seconds_to_sleep) + await sleep_until(next_midnight) try: channel_0_name, channel_1_name, channel_2_name = await bot.api_client.get( @@ -88,19 +59,19 @@ class OffTopicNames(Cog): async def init_offtopic_updater(self) -> None: """Start off-topic channel updating event loop if it hasn't already started.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() if self.updater_task is None: coro = update_names(self.bot) self.updater_task = self.bot.loop.create_task(coro) @group(name='otname', aliases=('otnames', 'otn'), invoke_without_command=True) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def otname_group(self, ctx: Context) -> None: """Add or list items from the off-topic channel name rotation.""" - await ctx.invoke(self.bot.get_command("help"), "otname") + await ctx.send_help(ctx.command) @otname_group.command(name='add', aliases=('a',)) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def add_command(self, ctx: Context, *, name: OffTopicName) -> None: """ Adds a new off-topic name to the rotation. @@ -123,7 +94,7 @@ class OffTopicNames(Cog): await self._add_name(ctx, name) @otname_group.command(name='forceadd', aliases=('fa',)) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def force_add_command(self, ctx: Context, *, name: OffTopicName) -> None: """Forcefully adds a new off-topic name to the rotation.""" await self._add_name(ctx, name) @@ -136,7 +107,7 @@ class OffTopicNames(Cog): await ctx.send(f":ok_hand: Added `{name}` to the names list.") @otname_group.command(name='delete', aliases=('remove', 'rm', 'del', 'd')) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def delete_command(self, ctx: Context, *, name: OffTopicName) -> None: """Removes a off-topic name from the rotation.""" await self.bot.api_client.delete(f'bot/off-topic-channel-names/{name}') @@ -145,7 +116,7 @@ class OffTopicNames(Cog): await ctx.send(f":ok_hand: Removed `{name}` from the names list.") @otname_group.command(name='list', aliases=('l',)) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def list_command(self, ctx: Context) -> None: """ Lists all currently known off-topic channel names in a paginator. @@ -165,7 +136,7 @@ class OffTopicNames(Cog): await ctx.send(embed=embed) @otname_group.command(name='search', aliases=('s',)) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def search_command(self, ctx: Context, *, query: OffTopicName) -> None: """Search for an off-topic name.""" result = await self.bot.api_client.get('bot/off-topic-channel-names') diff --git a/bot/exts/help_channels/__init__.py b/bot/exts/help_channels/__init__.py new file mode 100644 index 000000000..781f40449 --- /dev/null +++ b/bot/exts/help_channels/__init__.py @@ -0,0 +1,41 @@ +import logging + +from bot import constants +from bot.bot import Bot +from bot.exts.help_channels._channel import MAX_CHANNELS_PER_CATEGORY + +log = logging.getLogger(__name__) + + +def validate_config() -> None: + """Raise a ValueError if the cog's config is invalid.""" + log.trace("Validating config.") + total = constants.HelpChannels.max_total_channels + available = constants.HelpChannels.max_available + + if total == 0 or available == 0: + raise ValueError("max_total_channels and max_available and must be greater than 0.") + + if total < available: + raise ValueError( + f"max_total_channels ({total}) must be greater than or equal to max_available " + f"({available})." + ) + + if total > MAX_CHANNELS_PER_CATEGORY: + raise ValueError( + f"max_total_channels ({total}) must be less than or equal to " + f"{MAX_CHANNELS_PER_CATEGORY} due to Discord's limit on channels per category." + ) + + +def setup(bot: Bot) -> None: + """Load the HelpChannels cog.""" + # Defer import to reduce side effects from importing the help_channels package. + from bot.exts.help_channels._cog import HelpChannels + try: + validate_config() + except ValueError as e: + log.error(f"HelpChannels cog will not be loaded due to misconfiguration: {e}") + else: + bot.add_cog(HelpChannels(bot)) diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py new file mode 100644 index 000000000..4cea385b7 --- /dev/null +++ b/bot/exts/help_channels/_caches.py @@ -0,0 +1,19 @@ +from async_rediscache import RedisCache + +# This dictionary maps a help channel to the time it was claimed +# RedisCache[discord.TextChannel.id, UtcPosixTimestamp] +claim_times = RedisCache(namespace="HelpChannels.claim_times") + +# This cache tracks which channels are claimed by which members. +# RedisCache[discord.TextChannel.id, t.Union[discord.User.id, discord.Member.id]] +claimants = RedisCache(namespace="HelpChannels.help_channel_claimants") + +# This cache maps a help channel to original question message in same channel. +# RedisCache[discord.TextChannel.id, discord.Message.id] +question_messages = RedisCache(namespace="HelpChannels.question_messages") + +# This cache maps a help channel to whether it has had any +# activity other than the original claimant. True being no other +# activity and False being other activity. +# RedisCache[discord.TextChannel.id, bool] +unanswered = RedisCache(namespace="HelpChannels.unanswered") diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py new file mode 100644 index 000000000..e717d7af8 --- /dev/null +++ b/bot/exts/help_channels/_channel.py @@ -0,0 +1,57 @@ +import logging +import typing as t +from datetime import datetime, timedelta + +import discord + +from bot import constants +from bot.exts.help_channels import _caches, _message + +log = logging.getLogger(__name__) + +MAX_CHANNELS_PER_CATEGORY = 50 +EXCLUDED_CHANNELS = (constants.Channels.cooldown,) + + +def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[discord.TextChannel]: + """Yield the text channels of the `category` in an unsorted manner.""" + log.trace(f"Getting text channels in the category '{category}' ({category.id}).") + + # This is faster than using category.channels because the latter sorts them. + for channel in category.guild.channels: + if channel.category_id == category.id and not is_excluded_channel(channel): + yield channel + + +async def get_idle_time(channel: discord.TextChannel) -> t.Optional[int]: + """ + Return the time elapsed, in seconds, since the last message sent in the `channel`. + + Return None if the channel has no messages. + """ + log.trace(f"Getting the idle time for #{channel} ({channel.id}).") + + msg = await _message.get_last_message(channel) + if not msg: + log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages.") + return None + + idle_time = (datetime.utcnow() - msg.created_at).seconds + + log.trace(f"#{channel} ({channel.id}) has been idle for {idle_time} seconds.") + return idle_time + + +async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: + """Return the duration `channel_id` has been in use. Return None if it's not in use.""" + log.trace(f"Calculating in use time for channel {channel_id}.") + + claimed_timestamp = await _caches.claim_times.get(channel_id) + if claimed_timestamp: + claimed = datetime.utcfromtimestamp(claimed_timestamp) + return datetime.utcnow() - claimed + + +def is_excluded_channel(channel: discord.abc.GuildChannel) -> bool: + """Check if a channel should be excluded from the help channel system.""" + return not isinstance(channel, discord.TextChannel) or channel.id in EXCLUDED_CHANNELS diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py new file mode 100644 index 000000000..983c5d183 --- /dev/null +++ b/bot/exts/help_channels/_cog.py @@ -0,0 +1,520 @@ +import asyncio +import logging +import random +import typing as t +from datetime import datetime, timezone + +import discord +import discord.abc +from discord.ext import commands + +from bot import constants +from bot.bot import Bot +from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name +from bot.utils import channel as channel_utils +from bot.utils.scheduling import Scheduler + +log = logging.getLogger(__name__) + +HELP_CHANNEL_TOPIC = """ +This is a Python help channel. You can claim your own help channel in the Python Help: Available category. +""" + + +class HelpChannels(commands.Cog): + """ + Manage the help channel system of the guild. + + The system is based on a 3-category system: + + Available Category + + * Contains channels which are ready to be occupied by someone who needs help + * Will always contain `constants.HelpChannels.max_available` channels; refilled automatically + from the pool of dormant channels + * Prioritise using the channels which have been dormant for the longest amount of time + * If there are no more dormant channels, the bot will automatically create a new one + * If there are no dormant channels to move, helpers will be notified (see `notify()`) + * When a channel becomes available, the dormant embed will be edited to show `AVAILABLE_MSG` + * User can only claim a channel at an interval `constants.HelpChannels.claim_minutes` + * To keep track of cooldowns, user which claimed a channel will have a temporary role + + In Use Category + + * Contains all channels which are occupied by someone needing help + * Channel moves to dormant category after `constants.HelpChannels.idle_minutes` of being idle + * Command can prematurely mark a channel as dormant + * Channel claimant is allowed to use the command + * Allowed roles for the command are configurable with `constants.HelpChannels.cmd_whitelist` + * When a channel becomes dormant, an embed with `DORMANT_MSG` will be sent + + Dormant Category + + * Contains channels which aren't in use + * Channels are used to refill the Available category + + Help channels are named after the chemical elements in `bot/resources/elements.json`. + """ + + def __init__(self, bot: Bot): + self.bot = bot + self.scheduler = Scheduler(self.__class__.__name__) + + # Categories + self.available_category: discord.CategoryChannel = None + self.in_use_category: discord.CategoryChannel = None + self.dormant_category: discord.CategoryChannel = None + + # Queues + self.channel_queue: asyncio.Queue[discord.TextChannel] = None + self.name_queue: t.Deque[str] = None + + self.last_notification: t.Optional[datetime] = None + + # Asyncio stuff + self.queue_tasks: t.List[asyncio.Task] = [] + self.on_message_lock = asyncio.Lock() + self.init_task = self.bot.loop.create_task(self.init_cog()) + + def cog_unload(self) -> None: + """Cancel the init task and scheduled tasks when the cog unloads.""" + log.trace("Cog unload: cancelling the init_cog task") + self.init_task.cancel() + + log.trace("Cog unload: cancelling the channel queue tasks") + for task in self.queue_tasks: + task.cancel() + + self.scheduler.cancel_all() + + def create_channel_queue(self) -> asyncio.Queue: + """ + Return a queue of dormant channels to use for getting the next available channel. + + The channels are added to the queue in a random order. + """ + log.trace("Creating the channel queue.") + + channels = list(_channel.get_category_channels(self.dormant_category)) + random.shuffle(channels) + + log.trace("Populating the channel queue with channels.") + queue = asyncio.Queue() + for channel in channels: + queue.put_nowait(channel) + + return queue + + async def create_dormant(self) -> t.Optional[discord.TextChannel]: + """ + Create and return a new channel in the Dormant category. + + The new channel will sync its permission overwrites with the category. + + Return None if no more channel names are available. + """ + log.trace("Getting a name for a new dormant channel.") + + try: + name = self.name_queue.popleft() + except IndexError: + log.debug("No more names available for new dormant channels.") + return None + + log.debug(f"Creating a new dormant channel named {name}.") + return await self.dormant_category.create_text_channel(name, topic=HELP_CHANNEL_TOPIC) + + async def dormant_check(self, ctx: commands.Context) -> bool: + """Return True if the user is the help channel claimant or passes the role check.""" + if await _caches.claimants.get(ctx.channel.id) == ctx.author.id: + log.trace(f"{ctx.author} is the help channel claimant, passing the check for dormant.") + self.bot.stats.incr("help.dormant_invoke.claimant") + return True + + log.trace(f"{ctx.author} is not the help channel claimant, checking roles.") + has_role = await commands.has_any_role(*constants.HelpChannels.cmd_whitelist).predicate(ctx) + + if has_role: + self.bot.stats.incr("help.dormant_invoke.staff") + + return has_role + + @commands.command(name="close", aliases=["dormant", "solved"], enabled=False) + async def close_command(self, ctx: commands.Context) -> None: + """ + Make the current in-use help channel dormant. + + Make the channel dormant if the user passes the `dormant_check`, + delete the message that invoked this. + """ + log.trace("close command invoked; checking if the channel is in-use.") + + if ctx.channel.category != self.in_use_category: + log.debug(f"{ctx.author} invoked command 'dormant' outside an in-use help channel") + return + + if await self.dormant_check(ctx): + await self.move_to_dormant(ctx.channel, "command") + self.scheduler.cancel(ctx.channel.id) + + async def get_available_candidate(self) -> discord.TextChannel: + """ + Return a dormant channel to turn into an available channel. + + If no channel is available, wait indefinitely until one becomes available. + """ + log.trace("Getting an available channel candidate.") + + try: + channel = self.channel_queue.get_nowait() + except asyncio.QueueEmpty: + log.info("No candidate channels in the queue; creating a new channel.") + channel = await self.create_dormant() + + if not channel: + log.info("Couldn't create a candidate channel; waiting to get one from the queue.") + notify_channel = self.bot.get_channel(constants.HelpChannels.notify_channel) + last_notification = await _message.notify(notify_channel, self.last_notification) + if last_notification: + self.last_notification = last_notification + self.bot.stats.incr("help.out_of_channel_alerts") + + channel = await self.wait_for_dormant_channel() + + return channel + + async def init_available(self) -> None: + """Initialise the Available category with channels.""" + log.trace("Initialising the Available category with channels.") + + channels = list(_channel.get_category_channels(self.available_category)) + missing = constants.HelpChannels.max_available - len(channels) + + # If we've got less than `max_available` channel available, we should add some. + if missing > 0: + log.trace(f"Moving {missing} missing channels to the Available category.") + for _ in range(missing): + await self.move_to_available() + + # If for some reason we have more than `max_available` channels available, + # we should move the superfluous ones over to dormant. + elif missing < 0: + log.trace(f"Moving {abs(missing)} superfluous available channels over to the Dormant category.") + for channel in channels[:abs(missing)]: + await self.move_to_dormant(channel, "auto") + + async def init_categories(self) -> None: + """Get the help category objects. Remove the cog if retrieval fails.""" + log.trace("Getting the CategoryChannel objects for the help categories.") + + try: + self.available_category = await channel_utils.try_get_channel( + constants.Categories.help_available + ) + self.in_use_category = await channel_utils.try_get_channel( + constants.Categories.help_in_use + ) + self.dormant_category = await channel_utils.try_get_channel( + constants.Categories.help_dormant + ) + except discord.HTTPException: + log.exception("Failed to get a category; cog will be removed") + self.bot.remove_cog(self.qualified_name) + + async def init_cog(self) -> None: + """Initialise the help channel system.""" + log.trace("Waiting for the guild to be available before initialisation.") + await self.bot.wait_until_guild_available() + + log.trace("Initialising the cog.") + await self.init_categories() + await _cooldown.check_cooldowns(self.scheduler) + + self.channel_queue = self.create_channel_queue() + self.name_queue = _name.create_name_queue( + self.available_category, + self.in_use_category, + self.dormant_category, + ) + + log.trace("Moving or rescheduling in-use channels.") + for channel in _channel.get_category_channels(self.in_use_category): + await self.move_idle_channel(channel, has_task=False) + + # Prevent the command from being used until ready. + # The ready event wasn't used because channels could change categories between the time + # the command is invoked and the cog is ready (e.g. if move_idle_channel wasn't called yet). + # This may confuse users. So would potentially long delays for the cog to become ready. + self.close_command.enabled = True + + await self.init_available() + self.report_stats() + + log.info("Cog is ready!") + + def report_stats(self) -> None: + """Report the channel count stats.""" + total_in_use = sum(1 for _ in _channel.get_category_channels(self.in_use_category)) + total_available = sum(1 for _ in _channel.get_category_channels(self.available_category)) + total_dormant = sum(1 for _ in _channel.get_category_channels(self.dormant_category)) + + self.bot.stats.gauge("help.total.in_use", total_in_use) + self.bot.stats.gauge("help.total.available", total_available) + self.bot.stats.gauge("help.total.dormant", total_dormant) + + async def move_idle_channel(self, channel: discord.TextChannel, has_task: bool = True) -> None: + """ + Make the `channel` dormant if idle or schedule the move if still active. + + If `has_task` is True and rescheduling is required, the extant task to make the channel + dormant will first be cancelled. + """ + log.trace(f"Handling in-use channel #{channel} ({channel.id}).") + + if not await _message.is_empty(channel): + idle_seconds = constants.HelpChannels.idle_minutes * 60 + else: + idle_seconds = constants.HelpChannels.deleted_idle_minutes * 60 + + time_elapsed = await _channel.get_idle_time(channel) + + if time_elapsed is None or time_elapsed >= idle_seconds: + log.info( + f"#{channel} ({channel.id}) is idle longer than {idle_seconds} seconds " + f"and will be made dormant." + ) + + await self.move_to_dormant(channel, "auto") + else: + # Cancel the existing task, if any. + if has_task: + self.scheduler.cancel(channel.id) + + delay = idle_seconds - time_elapsed + log.info( + f"#{channel} ({channel.id}) is still active; " + f"scheduling it to be moved after {delay} seconds." + ) + + self.scheduler.schedule_later(delay, channel.id, self.move_idle_channel(channel)) + + async def move_to_bottom_position(self, channel: discord.TextChannel, category_id: int, **options) -> None: + """ + Move the `channel` to the bottom position of `category` and edit channel attributes. + + To ensure "stable sorting", we use the `bulk_channel_update` endpoint and provide the current + positions of the other channels in the category as-is. This should make sure that the channel + really ends up at the bottom of the category. + + If `options` are provided, the channel will be edited after the move is completed. This is the + same order of operations that `discord.TextChannel.edit` uses. For information on available + options, see the documentation on `discord.TextChannel.edit`. While possible, position-related + options should be avoided, as it may interfere with the category move we perform. + """ + # Get a fresh copy of the category from the bot to avoid the cache mismatch issue we had. + category = await channel_utils.try_get_channel(category_id) + + payload = [{"id": c.id, "position": c.position} for c in category.channels] + + # Calculate the bottom position based on the current highest position in the category. If the + # category is currently empty, we simply use the current position of the channel to avoid making + # unnecessary changes to positions in the guild. + bottom_position = payload[-1]["position"] + 1 if payload else channel.position + + payload.append( + { + "id": channel.id, + "position": bottom_position, + "parent_id": category.id, + "lock_permissions": True, + } + ) + + # We use d.py's method to ensure our request is processed by d.py's rate limit manager + await self.bot.http.bulk_channel_update(category.guild.id, payload) + + # Now that the channel is moved, we can edit the other attributes + if options: + await channel.edit(**options) + + async def move_to_available(self) -> None: + """Make a channel available.""" + log.trace("Making a channel available.") + + channel = await self.get_available_candidate() + log.info(f"Making #{channel} ({channel.id}) available.") + + await _message.send_available_message(channel) + + log.trace(f"Moving #{channel} ({channel.id}) to the Available category.") + + await self.move_to_bottom_position( + channel=channel, + category_id=constants.Categories.help_available, + ) + + self.report_stats() + + async def move_to_dormant(self, channel: discord.TextChannel, caller: str) -> None: + """ + Make the `channel` dormant. + + A caller argument is provided for metrics. + """ + log.info(f"Moving #{channel} ({channel.id}) to the Dormant category.") + + await self.move_to_bottom_position( + channel=channel, + category_id=constants.Categories.help_dormant, + ) + + await self.unclaim_channel(channel) + + self.bot.stats.incr(f"help.dormant_calls.{caller}") + + in_use_time = await _channel.get_in_use_time(channel.id) + if in_use_time: + self.bot.stats.timing("help.in_use_time", in_use_time) + + unanswered = await _caches.unanswered.get(channel.id) + if unanswered: + self.bot.stats.incr("help.sessions.unanswered") + elif unanswered is not None: + self.bot.stats.incr("help.sessions.answered") + + log.trace(f"Position of #{channel} ({channel.id}) is actually {channel.position}.") + log.trace(f"Sending dormant message for #{channel} ({channel.id}).") + embed = discord.Embed(description=_message.DORMANT_MSG) + await channel.send(embed=embed) + + await _message.unpin(channel) + + log.trace(f"Pushing #{channel} ({channel.id}) into the channel queue.") + self.channel_queue.put_nowait(channel) + self.report_stats() + + async def unclaim_channel(self, channel: discord.TextChannel) -> None: + """ + Mark the channel as unclaimed and remove the cooldown role from the claimant if needed. + + The role is only removed if they have no claimed channels left once the current one is unclaimed. + This method also handles canceling the automatic removal of the cooldown role. + """ + claimant_id = await _caches.claimants.pop(channel.id) + + # Ignore missing task when cooldown has passed but the channel still isn't dormant. + if claimant_id in self.scheduler: + self.scheduler.cancel(claimant_id) + + claimant = self.bot.get_guild(constants.Guild.id).get_member(claimant_id) + if claimant is None: + log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed") + return + + # Remove the cooldown role if the claimant has no other channels left + if not any(claimant.id == user_id for _, user_id in await _caches.claimants.items()): + await _cooldown.remove_cooldown_role(claimant) + + async def move_to_in_use(self, channel: discord.TextChannel) -> None: + """Make a channel in-use and schedule it to be made dormant.""" + log.info(f"Moving #{channel} ({channel.id}) to the In Use category.") + + await self.move_to_bottom_position( + channel=channel, + category_id=constants.Categories.help_in_use, + ) + + timeout = constants.HelpChannels.idle_minutes * 60 + + log.trace(f"Scheduling #{channel} ({channel.id}) to become dormant in {timeout} sec.") + self.scheduler.schedule_later(timeout, channel.id, self.move_idle_channel(channel)) + self.report_stats() + + @commands.Cog.listener() + async def on_message(self, message: discord.Message) -> None: + """Move an available channel to the In Use category and replace it with a dormant one.""" + if message.author.bot: + return # Ignore messages sent by bots. + + channel = message.channel + + await _message.check_for_answer(message) + + is_available = channel_utils.is_in_category(channel, constants.Categories.help_available) + if not is_available or _channel.is_excluded_channel(channel): + return # Ignore messages outside the Available category or in excluded channels. + + log.trace("Waiting for the cog to be ready before processing messages.") + await self.init_task + + log.trace("Acquiring lock to prevent a channel from being processed twice...") + async with self.on_message_lock: + log.trace(f"on_message lock acquired for {message.id}.") + + if not channel_utils.is_in_category(channel, constants.Categories.help_available): + log.debug( + f"Message {message.id} will not make #{channel} ({channel.id}) in-use " + f"because another message in the channel already triggered that." + ) + return + + log.info(f"Channel #{channel} was claimed by `{message.author.id}`.") + await self.move_to_in_use(channel) + await _cooldown.revoke_send_permissions(message.author, self.scheduler) + + await _message.pin(message) + + # Add user with channel for dormant check. + await _caches.claimants.set(channel.id, message.author.id) + + self.bot.stats.incr("help.claimed") + + # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. + timestamp = datetime.now(timezone.utc).timestamp() + await _caches.claim_times.set(channel.id, timestamp) + + await _caches.unanswered.set(channel.id, True) + + log.trace(f"Releasing on_message lock for {message.id}.") + + # Move a dormant channel to the Available category to fill in the gap. + # This is done last and outside the lock because it may wait indefinitely for a channel to + # be put in the queue. + await self.move_to_available() + + @commands.Cog.listener() + async def on_message_delete(self, msg: discord.Message) -> None: + """ + Reschedule an in-use channel to become dormant sooner if the channel is empty. + + The new time for the dormant task is configured with `HelpChannels.deleted_idle_minutes`. + """ + if not channel_utils.is_in_category(msg.channel, constants.Categories.help_in_use): + return + + if not await _message.is_empty(msg.channel): + return + + log.trace("Waiting for the cog to be ready before processing deleted messages.") + await self.init_task + + log.info(f"Claimant of #{msg.channel} ({msg.author}) deleted message, channel is empty now. Rescheduling task.") + + # Cancel existing dormant task before scheduling new. + self.scheduler.cancel(msg.channel.id) + + delay = constants.HelpChannels.deleted_idle_minutes * 60 + self.scheduler.schedule_later(delay, msg.channel.id, self.move_idle_channel(msg.channel)) + + async def wait_for_dormant_channel(self) -> discord.TextChannel: + """Wait for a dormant channel to become available in the queue and return it.""" + log.trace("Waiting for a dormant channel.") + + task = asyncio.create_task(self.channel_queue.get()) + self.queue_tasks.append(task) + channel = await task + + log.trace(f"Channel #{channel} ({channel.id}) finally retrieved from the queue.") + self.queue_tasks.remove(task) + + return channel diff --git a/bot/exts/help_channels/_cooldown.py b/bot/exts/help_channels/_cooldown.py new file mode 100644 index 000000000..c5c39297f --- /dev/null +++ b/bot/exts/help_channels/_cooldown.py @@ -0,0 +1,95 @@ +import logging +from typing import Callable, Coroutine + +import discord + +import bot +from bot import constants +from bot.exts.help_channels import _caches, _channel +from bot.utils.scheduling import Scheduler + +log = logging.getLogger(__name__) +CoroutineFunc = Callable[..., Coroutine] + + +async def add_cooldown_role(member: discord.Member) -> None: + """Add the help cooldown role to `member`.""" + log.trace(f"Adding cooldown role for {member} ({member.id}).") + await _change_cooldown_role(member, member.add_roles) + + +async def check_cooldowns(scheduler: Scheduler) -> None: + """Remove expired cooldowns and re-schedule active ones.""" + log.trace("Checking all cooldowns to remove or re-schedule them.") + guild = bot.instance.get_guild(constants.Guild.id) + cooldown = constants.HelpChannels.claim_minutes * 60 + + for channel_id, member_id in await _caches.claimants.items(): + member = guild.get_member(member_id) + if not member: + continue # Member probably left the guild. + + in_use_time = await _channel.get_in_use_time(channel_id) + + if not in_use_time or in_use_time.seconds > cooldown: + # Remove the role if no claim time could be retrieved or if the cooldown expired. + # Since the channel is in the claimants cache, it is definitely strange for a time + # to not exist. However, it isn't a reason to keep the user stuck with a cooldown. + await remove_cooldown_role(member) + else: + # The member is still on a cooldown; re-schedule it for the remaining time. + delay = cooldown - in_use_time.seconds + scheduler.schedule_later(delay, member.id, remove_cooldown_role(member)) + + +async def remove_cooldown_role(member: discord.Member) -> None: + """Remove the help cooldown role from `member`.""" + log.trace(f"Removing cooldown role for {member} ({member.id}).") + await _change_cooldown_role(member, member.remove_roles) + + +async def revoke_send_permissions(member: discord.Member, scheduler: Scheduler) -> None: + """ + Disallow `member` to send messages in the Available category for a certain time. + + The time until permissions are reinstated can be configured with + `HelpChannels.claim_minutes`. + """ + log.trace( + f"Revoking {member}'s ({member.id}) send message permissions in the Available category." + ) + + await add_cooldown_role(member) + + # Cancel the existing task, if any. + # Would mean the user somehow bypassed the lack of permissions (e.g. user is guild owner). + if member.id in scheduler: + scheduler.cancel(member.id) + + delay = constants.HelpChannels.claim_minutes * 60 + scheduler.schedule_later(delay, member.id, remove_cooldown_role(member)) + + +async def _change_cooldown_role(member: discord.Member, coro_func: CoroutineFunc) -> None: + """ + Change `member`'s cooldown role via awaiting `coro_func` and handle errors. + + `coro_func` is intended to be `discord.Member.add_roles` or `discord.Member.remove_roles`. + """ + guild = bot.instance.get_guild(constants.Guild.id) + role = guild.get_role(constants.Roles.help_cooldown) + if role is None: + log.warning(f"Help cooldown role ({constants.Roles.help_cooldown}) could not be found!") + return + + try: + await coro_func(role) + except discord.NotFound: + log.debug(f"Failed to change role for {member} ({member.id}): member not found") + except discord.Forbidden: + log.debug( + f"Forbidden to change role for {member} ({member.id}); " + f"possibly due to role hierarchy" + ) + except discord.HTTPException as e: + log.error(f"Failed to change role for {member} ({member.id}): {e.status} {e.code}") diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py new file mode 100644 index 000000000..2bbd4bdd6 --- /dev/null +++ b/bot/exts/help_channels/_message.py @@ -0,0 +1,217 @@ +import logging +import typing as t +from datetime import datetime + +import discord + +import bot +from bot import constants +from bot.exts.help_channels import _caches +from bot.utils.channel import is_in_category + +log = logging.getLogger(__name__) + +ASKING_GUIDE_URL = "https://pythondiscord.com/pages/asking-good-questions/" + +AVAILABLE_MSG = f""" +**Send your question here to claim the channel** +This channel will be dedicated to answering your question only. Others will try to answer and help you solve the issue. + +**Keep in mind:** +• It's always ok to just ask your question. You don't need permission. +• Explain what you expect to happen and what actually happens. +• Include a code sample and error message, if you got any. + +For more tips, check out our guide on **[asking good questions]({ASKING_GUIDE_URL})**. +""" + +AVAILABLE_TITLE = "Available help channel" + +AVAILABLE_FOOTER = f"Closes after {constants.HelpChannels.idle_minutes} minutes of inactivity or when you send !close." + +DORMANT_MSG = f""" +This help channel has been marked as **dormant**, and has been moved into the **Help: Dormant** \ +category at the bottom of the channel list. It is no longer possible to send messages in this \ +channel until it becomes available again. + +If your question wasn't answered yet, you can claim a new help channel from the \ +**Help: Available** category by simply asking your question again. Consider rephrasing the \ +question to maximize your chance of getting a good answer. If you're not sure how, have a look \ +through our guide for **[asking a good question]({ASKING_GUIDE_URL})**. +""" + + +async def check_for_answer(message: discord.Message) -> None: + """Checks for whether new content in a help channel comes from non-claimants.""" + channel = message.channel + + # Confirm the channel is an in use help channel + if is_in_category(channel, constants.Categories.help_in_use): + log.trace(f"Checking if #{channel} ({channel.id}) has been answered.") + + # Check if there is an entry in unanswered + if await _caches.unanswered.contains(channel.id): + claimant_id = await _caches.claimants.get(channel.id) + if not claimant_id: + # The mapping for this channel doesn't exist, we can't do anything. + return + + # Check the message did not come from the claimant + if claimant_id != message.author.id: + # Mark the channel as answered + await _caches.unanswered.set(channel.id, False) + + +async def get_last_message(channel: discord.TextChannel) -> t.Optional[discord.Message]: + """Return the last message sent in the channel or None if no messages exist.""" + log.trace(f"Getting the last message in #{channel} ({channel.id}).") + + try: + return await channel.history(limit=1).next() # noqa: B305 + except discord.NoMoreItems: + log.debug(f"No last message available; #{channel} ({channel.id}) has no messages.") + return None + + +async def is_empty(channel: discord.TextChannel) -> bool: + """Return True if there's an AVAILABLE_MSG and the messages leading up are bot messages.""" + log.trace(f"Checking if #{channel} ({channel.id}) is empty.") + + # A limit of 100 results in a single API call. + # If AVAILABLE_MSG isn't found within 100 messages, then assume the channel is not empty. + # Not gonna do an extensive search for it cause it's too expensive. + async for msg in channel.history(limit=100): + if not msg.author.bot: + log.trace(f"#{channel} ({channel.id}) has a non-bot message.") + return False + + if _match_bot_embed(msg, AVAILABLE_MSG): + log.trace(f"#{channel} ({channel.id}) has the available message embed.") + return True + + return False + + +async def notify(channel: discord.TextChannel, last_notification: t.Optional[datetime]) -> t.Optional[datetime]: + """ + Send a message in `channel` notifying about a lack of available help channels. + + If a notification was sent, return the `datetime` at which the message was sent. Otherwise, + return None. + + Configuration: + + * `HelpChannels.notify` - toggle notifications + * `HelpChannels.notify_minutes` - minimum interval between notifications + * `HelpChannels.notify_roles` - roles mentioned in notifications + """ + if not constants.HelpChannels.notify: + return + + log.trace("Notifying about lack of channels.") + + if last_notification: + elapsed = (datetime.utcnow() - last_notification).seconds + minimum_interval = constants.HelpChannels.notify_minutes * 60 + should_send = elapsed >= minimum_interval + else: + should_send = True + + if not should_send: + log.trace("Notification not sent because it's too recent since the previous one.") + return + + try: + log.trace("Sending notification message.") + + mentions = " ".join(f"<@&{role}>" for role in constants.HelpChannels.notify_roles) + allowed_roles = [discord.Object(id_) for id_ in constants.HelpChannels.notify_roles] + + message = await channel.send( + f"{mentions} A new available help channel is needed but there " + f"are no more dormant ones. Consider freeing up some in-use channels manually by " + f"using the `{constants.Bot.prefix}dormant` command within the channels.", + allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles) + ) + + return message.created_at + except Exception: + # Handle it here cause this feature isn't critical for the functionality of the system. + log.exception("Failed to send notification about lack of dormant channels!") + + +async def pin(message: discord.Message) -> None: + """Pin an initial question `message` and store it in a cache.""" + if await _pin_wrapper(message.id, message.channel, pin=True): + await _caches.question_messages.set(message.channel.id, message.id) + + +async def send_available_message(channel: discord.TextChannel) -> None: + """Send the available message by editing a dormant message or sending a new message.""" + channel_info = f"#{channel} ({channel.id})" + log.trace(f"Sending available message in {channel_info}.") + + embed = discord.Embed( + color=constants.Colours.bright_green, + description=AVAILABLE_MSG, + ) + embed.set_author(name=AVAILABLE_TITLE, icon_url=constants.Icons.green_checkmark) + embed.set_footer(text=AVAILABLE_FOOTER) + + msg = await get_last_message(channel) + if _match_bot_embed(msg, DORMANT_MSG): + log.trace(f"Found dormant message {msg.id} in {channel_info}; editing it.") + await msg.edit(embed=embed) + else: + log.trace(f"Dormant message not found in {channel_info}; sending a new message.") + await channel.send(embed=embed) + + +async def unpin(channel: discord.TextChannel) -> None: + """Unpin the initial question message sent in `channel`.""" + msg_id = await _caches.question_messages.pop(channel.id) + if msg_id is None: + log.debug(f"#{channel} ({channel.id}) doesn't have a message pinned.") + else: + await _pin_wrapper(msg_id, channel, pin=False) + + +def _match_bot_embed(message: t.Optional[discord.Message], description: str) -> bool: + """Return `True` if the bot's `message`'s embed description matches `description`.""" + if not message or not message.embeds: + return False + + bot_msg_desc = message.embeds[0].description + if bot_msg_desc is discord.Embed.Empty: + log.trace("Last message was a bot embed but it was empty.") + return False + return message.author == bot.instance.user and bot_msg_desc.strip() == description.strip() + + +async def _pin_wrapper(msg_id: int, channel: discord.TextChannel, *, pin: bool) -> bool: + """ + Pin message `msg_id` in `channel` if `pin` is True or unpin if it's False. + + Return True if successful and False otherwise. + """ + channel_str = f"#{channel} ({channel.id})" + if pin: + func = bot.instance.http.pin_message + verb = "pin" + else: + func = bot.instance.http.unpin_message + verb = "unpin" + + try: + await func(channel.id, msg_id) + except discord.HTTPException as e: + if e.code == 10008: + log.debug(f"Message {msg_id} in {channel_str} doesn't exist; can't {verb}.") + else: + log.exception( + f"Error {verb}ning message {msg_id} in {channel_str}: {e.status} ({e.code})" + ) + return False + else: + log.trace(f"{verb.capitalize()}ned message {msg_id} in {channel_str}.") + return True diff --git a/bot/exts/help_channels/_name.py b/bot/exts/help_channels/_name.py new file mode 100644 index 000000000..728234b1e --- /dev/null +++ b/bot/exts/help_channels/_name.py @@ -0,0 +1,69 @@ +import json +import logging +import typing as t +from collections import deque +from pathlib import Path + +import discord + +from bot import constants +from bot.exts.help_channels._channel import MAX_CHANNELS_PER_CATEGORY, get_category_channels + +log = logging.getLogger(__name__) + + +def create_name_queue(*categories: discord.CategoryChannel) -> deque: + """ + Return a queue of element names to use for creating new channels. + + Skip names that are already in use by channels in `categories`. + """ + log.trace("Creating the chemical element name queue.") + + used_names = _get_used_names(*categories) + + log.trace("Determining the available names.") + available_names = (name for name in _get_names() if name not in used_names) + + log.trace("Populating the name queue with names.") + return deque(available_names) + + +def _get_names() -> t.List[str]: + """ + Return a truncated list of prefixed element names. + + The amount of names is configured with `HelpChannels.max_total_channels`. + The prefix is configured with `HelpChannels.name_prefix`. + """ + count = constants.HelpChannels.max_total_channels + prefix = constants.HelpChannels.name_prefix + + log.trace(f"Getting the first {count} element names from JSON.") + + with Path("bot/resources/elements.json").open(encoding="utf-8") as elements_file: + all_names = json.load(elements_file) + + if prefix: + return [prefix + name for name in all_names[:count]] + else: + return all_names[:count] + + +def _get_used_names(*categories: discord.CategoryChannel) -> t.Set[str]: + """Return names which are already being used by channels in `categories`.""" + log.trace("Getting channel names which are already being used.") + + names = set() + for cat in categories: + for channel in get_category_channels(cat): + names.add(channel.name) + + if len(names) > MAX_CHANNELS_PER_CATEGORY: + log.warning( + f"Too many help channels ({len(names)}) already exist! " + f"Discord only supports {MAX_CHANNELS_PER_CATEGORY} in a category." + ) + + log.trace(f"Got {len(names)} used names: {names}") + return names diff --git a/bot/exts/info/__init__.py b/bot/exts/info/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/info/__init__.py diff --git a/bot/exts/info/codeblock/__init__.py b/bot/exts/info/codeblock/__init__.py new file mode 100644 index 000000000..5c55bc5e3 --- /dev/null +++ b/bot/exts/info/codeblock/__init__.py @@ -0,0 +1,8 @@ +from bot.bot import Bot + + +def setup(bot: Bot) -> None: + """Load the CodeBlockCog cog.""" + # Defer import to reduce side effects from importing the codeblock package. + from bot.exts.info.codeblock._cog import CodeBlockCog + bot.add_cog(CodeBlockCog(bot)) diff --git a/bot/exts/info/codeblock/_cog.py b/bot/exts/info/codeblock/_cog.py new file mode 100644 index 000000000..9094d9d15 --- /dev/null +++ b/bot/exts/info/codeblock/_cog.py @@ -0,0 +1,186 @@ +import logging +import time +from typing import Optional + +import discord +from discord import Message, RawMessageUpdateEvent +from discord.ext.commands import Cog + +from bot import constants +from bot.bot import Bot +from bot.exts.filters.token_remover import TokenRemover +from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE +from bot.exts.info.codeblock._instructions import get_instructions +from bot.utils import has_lines +from bot.utils.channel import is_help_channel +from bot.utils.messages import wait_for_deletion + +log = logging.getLogger(__name__) + + +class CodeBlockCog(Cog, name="Code Block"): + """ + Detect improperly formatted Markdown code blocks and suggest proper formatting. + + There are four basic ways in which a code block is considered improperly formatted: + + 1. The code is not within a code block at all + * Ignored if the code is not valid Python or Python REPL code + 2. Incorrect characters are used for backticks + 3. A language for syntax highlighting is not specified + * Ignored if the code is not valid Python or Python REPL code + 4. A syntax highlighting language is incorrectly specified + * Ignored if the language specified doesn't look like it was meant for Python + * This can go wrong in two ways: + 1. Spaces before the language + 2. No newline immediately following the language + + Messages or code blocks must meet a minimum line count to be detected. Detecting multiple code + blocks is supported. However, if at least one code block is correct, then instructions will not + be sent even if others are incorrect. When multiple incorrect code blocks are found, only the + first one is used as the basis for the instructions sent. + + When an issue is detected, an embed is sent containing specific instructions on fixing what + is wrong. If the user edits their message to fix the code block, the instructions will be + removed. If they fail to fix the code block with an edit, the instructions will be updated to + show what is still incorrect after the user's edit. The embed can be manually deleted with a + reaction. Otherwise, it will automatically be removed after 5 minutes. + + The cog only detects messages in whitelisted channels. Channels may also have a cooldown on the + instructions being sent. Note all help channels are also whitelisted with cooldowns enabled. + + For configurable parameters, see the `code_block` section in config-default.py. + """ + + def __init__(self, bot: Bot): + self.bot = bot + + # Stores allowed channels plus epoch times since the last instructional messages sent. + self.channel_cooldowns = dict.fromkeys(constants.CodeBlock.cooldown_channels, 0.0) + + # Maps users' messages to the messages the bot sent with instructions. + self.codeblock_message_ids = {} + + @staticmethod + def create_embed(instructions: str) -> discord.Embed: + """Return an embed which displays code block formatting `instructions`.""" + return discord.Embed(description=instructions) + + async def get_sent_instructions(self, payload: RawMessageUpdateEvent) -> Optional[Message]: + """ + Return the bot's sent instructions message associated with a user's message `payload`. + + Return None if the message cannot be found. In this case, it's likely the message was + deleted either manually via a reaction or automatically by a timer. + """ + log.trace(f"Retrieving instructions message for ID {payload.message_id}") + channel = self.bot.get_channel(payload.channel_id) + + try: + return await channel.fetch_message(self.codeblock_message_ids[payload.message_id]) + except discord.NotFound: + log.debug("Could not find instructions message; it was probably deleted.") + return None + + def is_on_cooldown(self, channel: discord.TextChannel) -> bool: + """ + Return True if an embed was sent too recently for `channel`. + + The cooldown is configured by `constants.CodeBlock.cooldown_seconds`. + Note: only channels in the `channel_cooldowns` have cooldowns enabled. + """ + log.trace(f"Checking if #{channel} is on cooldown.") + cooldown = constants.CodeBlock.cooldown_seconds + return (time.time() - self.channel_cooldowns.get(channel.id, 0)) < cooldown + + def is_valid_channel(self, channel: discord.TextChannel) -> bool: + """Return True if `channel` is a help channel, may be on a cooldown, or is whitelisted.""" + log.trace(f"Checking if #{channel} qualifies for code block detection.") + return ( + is_help_channel(channel) + or channel.id in self.channel_cooldowns + or channel.id in constants.CodeBlock.channel_whitelist + ) + + async def send_instructions(self, message: discord.Message, instructions: str) -> None: + """ + Send an embed with `instructions` on fixing an incorrect code block in a `message`. + + The embed will be deleted automatically after 5 minutes. + """ + log.info(f"Sending code block formatting instructions for message {message.id}.") + + embed = self.create_embed(instructions) + bot_message = await message.channel.send(f"Hey {message.author.mention}!", embed=embed) + self.codeblock_message_ids[message.id] = bot_message.id + + self.bot.loop.create_task(wait_for_deletion(bot_message, (message.author.id,))) + + # Increase amount of codeblock correction in stats + self.bot.stats.incr("codeblock_corrections") + + def should_parse(self, message: discord.Message) -> bool: + """ + Return True if `message` should be parsed. + + A qualifying message: + + 1. Is not authored by a bot + 2. Is in a valid channel + 3. Has more than 3 lines + 4. Has no bot or webhook token + """ + return ( + not message.author.bot + and self.is_valid_channel(message.channel) + and has_lines(message.content, constants.CodeBlock.minimum_lines) + and not TokenRemover.find_token_in_message(message) + and not WEBHOOK_URL_RE.search(message.content) + ) + + @Cog.listener() + async def on_message(self, msg: Message) -> None: + """Detect incorrect Markdown code blocks in `msg` and send instructions to fix them.""" + if not self.should_parse(msg): + log.trace(f"Skipping code block detection of {msg.id}: message doesn't qualify.") + return + + # When debugging, ignore cooldowns. + if self.is_on_cooldown(msg.channel) and not constants.DEBUG_MODE: + log.trace(f"Skipping code block detection of {msg.id}: #{msg.channel} is on cooldown.") + return + + instructions = get_instructions(msg.content) + if instructions: + await self.send_instructions(msg, instructions) + + if msg.channel.id not in constants.CodeBlock.channel_whitelist: + log.debug(f"Adding #{msg.channel} to the channel cooldowns.") + self.channel_cooldowns[msg.channel.id] = time.time() + + @Cog.listener() + async def on_raw_message_edit(self, payload: RawMessageUpdateEvent) -> None: + """Delete the instructional message if an edited message had its code blocks fixed.""" + if payload.message_id not in self.codeblock_message_ids: + log.trace(f"Ignoring message edit {payload.message_id}: message isn't being tracked.") + return + + if payload.data.get("content") is None or payload.data.get("channel_id") is None: + log.trace(f"Ignoring message edit {payload.message_id}: missing content or channel ID.") + return + + # Parse the message to see if the code blocks have been fixed. + content = payload.data.get("content") + instructions = get_instructions(content) + + bot_message = await self.get_sent_instructions(payload) + if not bot_message: + return + + if not instructions: + log.info("User's incorrect code block has been fixed. Removing instructions message.") + await bot_message.delete() + del self.codeblock_message_ids[payload.message_id] + else: + log.info("Message edited but still has invalid code blocks; editing the instructions.") + await bot_message.edit(embed=self.create_embed(instructions)) diff --git a/bot/exts/info/codeblock/_instructions.py b/bot/exts/info/codeblock/_instructions.py new file mode 100644 index 000000000..dadb5e1ef --- /dev/null +++ b/bot/exts/info/codeblock/_instructions.py @@ -0,0 +1,184 @@ +"""This module generates and formats instructional messages about fixing Markdown code blocks.""" + +import logging +from typing import Optional + +from bot.exts.info.codeblock import _parsing + +log = logging.getLogger(__name__) + +_EXAMPLE_PY = "{lang}\nprint('Hello, world!')" # Make sure to escape any Markdown symbols here. +_EXAMPLE_CODE_BLOCKS = ( + "\\`\\`\\`{content}\n\\`\\`\\`\n\n" + "**This will result in the following:**\n" + "```{content}```" +) + + +def _get_example(language: str) -> str: + """Return an example of a correct code block using `language` for syntax highlighting.""" + # Determine the example code to put in the code block based on the language specifier. + if language.lower() in _parsing.PY_LANG_CODES: + log.trace(f"Code block has a Python language specifier `{language}`.") + content = _EXAMPLE_PY.format(lang=language) + elif language: + log.trace(f"Code block has a foreign language specifier `{language}`.") + # It's not feasible to determine what would be a valid example for other languages. + content = f"{language}\n..." + else: + log.trace("Code block has no language specifier.") + content = "\nHello, world!" + + return _EXAMPLE_CODE_BLOCKS.format(content=content) + + +def _get_bad_ticks_message(code_block: _parsing.CodeBlock) -> Optional[str]: + """Return instructions on using the correct ticks for `code_block`.""" + log.trace("Creating instructions for incorrect code block ticks.") + + valid_ticks = f"\\{_parsing.BACKTICK}" * 3 + instructions = ( + "It looks like you are trying to paste code into this channel.\n\n" + "You seem to be using the wrong symbols to indicate where the code block should start. " + f"The correct symbols would be {valid_ticks}, not `{code_block.tick * 3}`." + ) + + log.trace("Check if the bad ticks code block also has issues with the language specifier.") + addition_msg = _get_bad_lang_message(code_block.content) + if not addition_msg and not code_block.language: + addition_msg = _get_no_lang_message(code_block.content) + + # Combine the back ticks message with the language specifier message. The latter will + # already have an example code block. + if addition_msg: + log.trace("Language specifier issue found; appending additional instructions.") + + # The first line has double newlines which are not desirable when appending the msg. + addition_msg = addition_msg.replace("\n\n", " ", 1) + + # Make the first character of the addition lower case. + instructions += "\n\nFurthermore, " + addition_msg[0].lower() + addition_msg[1:] + else: + log.trace("No issues with the language specifier found.") + example_blocks = _get_example(code_block.language) + instructions += f"\n\n**Here is an example of how it should look:**\n{example_blocks}" + + return instructions + + +def _get_no_ticks_message(content: str) -> Optional[str]: + """If `content` is Python/REPL code, return instructions on using code blocks.""" + log.trace("Creating instructions for a missing code block.") + + if _parsing.is_python_code(content): + example_blocks = _get_example("py") + return ( + "It looks like you're trying to paste code into this channel.\n\n" + "Discord has support for Markdown, which allows you to post code with full " + "syntax highlighting. Please use these whenever you paste code, as this " + "helps improve the legibility and makes it easier for us to help you.\n\n" + f"**To do this, use the following method:**\n{example_blocks}" + ) + else: + log.trace("Aborting missing code block instructions: content is not Python code.") + + +def _get_bad_lang_message(content: str) -> Optional[str]: + """ + Return instructions on fixing the Python language specifier for a code block. + + If `code_block` does not have a Python language specifier, return None. + If there's nothing wrong with the language specifier, return None. + """ + log.trace("Creating instructions for a poorly specified language.") + + info = _parsing.parse_bad_language(content) + if not info: + log.trace("Aborting bad language instructions: language specified isn't Python.") + return + + lines = [] + language = info.language + + if info.has_leading_spaces: + log.trace("Language specifier was preceded by a space.") + lines.append(f"Make sure there are no spaces between the back ticks and `{language}`.") + + if not info.has_terminal_newline: + log.trace("Language specifier was not followed by a newline.") + lines.append( + f"Make sure you put your code on a new line following `{language}`. " + f"There must not be any spaces after `{language}`." + ) + + if lines: + lines = " ".join(lines) + example_blocks = _get_example(language) + + # Note that _get_bad_ticks_message expects the first line to have two newlines. + return ( + f"It looks like you incorrectly specified a language for your code block.\n\n{lines}" + f"\n\n**Here is an example of how it should look:**\n{example_blocks}" + ) + else: + log.trace("Nothing wrong with the language specifier; no instructions to return.") + + +def _get_no_lang_message(content: str) -> Optional[str]: + """ + Return instructions on specifying a language for a code block. + + If `content` is not valid Python or Python REPL code, return None. + """ + log.trace("Creating instructions for a missing language.") + + if _parsing.is_python_code(content): + example_blocks = _get_example("py") + + # Note that _get_bad_ticks_message expects the first line to have two newlines. + return ( + "It looks like you pasted Python code without syntax highlighting.\n\n" + "Please use syntax highlighting to improve the legibility of your code and make " + "it easier for us to help you.\n\n" + f"**To do this, use the following method:**\n{example_blocks}" + ) + else: + log.trace("Aborting missing language instructions: content is not Python code.") + + +def get_instructions(content: str) -> Optional[str]: + """ + Parse `content` and return code block formatting instructions if something is wrong. + + Return None if `content` lacks code block formatting issues. + """ + log.trace("Getting formatting instructions.") + + blocks = _parsing.find_code_blocks(content) + if blocks is None: + log.trace("At least one valid code block found; no instructions to return.") + return + + if not blocks: + log.trace("No code blocks were found in message.") + instructions = _get_no_ticks_message(content) + else: + log.trace("Searching results for a code block with invalid ticks.") + block = next((block for block in blocks if block.tick != _parsing.BACKTICK), None) + + if block: + log.trace("A code block exists but has invalid ticks.") + instructions = _get_bad_ticks_message(block) + else: + log.trace("A code block exists but is missing a language.") + block = blocks[0] + + # Check for a bad language first to avoid parsing content into an AST. + instructions = _get_bad_lang_message(block.content) + if not instructions: + instructions = _get_no_lang_message(block.content) + + if instructions: + instructions += "\nYou can **edit your original message** to correct your code block." + + return instructions diff --git a/bot/exts/info/codeblock/_parsing.py b/bot/exts/info/codeblock/_parsing.py new file mode 100644 index 000000000..e35fbca22 --- /dev/null +++ b/bot/exts/info/codeblock/_parsing.py @@ -0,0 +1,228 @@ +"""This module provides functions for parsing Markdown code blocks.""" + +import ast +import logging +import re +import textwrap +from typing import NamedTuple, Optional, Sequence + +from bot import constants +from bot.utils import has_lines + +log = logging.getLogger(__name__) + +BACKTICK = "`" +PY_LANG_CODES = ("python-repl", "python", "pycon", "py") # Order is important; "py" is last cause it's a subset. +_TICKS = { + BACKTICK, + "'", + '"', + "\u00b4", # ACUTE ACCENT + "\u2018", # LEFT SINGLE QUOTATION MARK + "\u2019", # RIGHT SINGLE QUOTATION MARK + "\u2032", # PRIME + "\u201c", # LEFT DOUBLE QUOTATION MARK + "\u201d", # RIGHT DOUBLE QUOTATION MARK + "\u2033", # DOUBLE PRIME + "\u3003", # VERTICAL KANA REPEAT MARK UPPER HALF +} + +_RE_PYTHON_REPL = re.compile(r"^(>>>|\.\.\.)( |$)") +_RE_IPYTHON_REPL = re.compile(r"^((In|Out) \[\d+\]: |\s*\.{3,}: ?)") + +_RE_CODE_BLOCK = re.compile( + fr""" + (?P<ticks> + (?P<tick>[{''.join(_TICKS)}]) # Put all ticks into a character class within a group. + \2{{2}} # Match previous group 2 more times to ensure the same char. + ) + (?P<lang>[A-Za-z0-9\+\-\.]+\n)? # Optionally match a language specifier followed by a newline. + (?P<code>.+?) # Match the actual code within the block. + \1 # Match the same 3 ticks used at the start of the block. + """, + re.DOTALL | re.VERBOSE +) + +_RE_LANGUAGE = re.compile( + fr""" + ^(?P<spaces>\s+)? # Optionally match leading spaces from the beginning. + (?P<lang>{'|'.join(PY_LANG_CODES)}) # Match a Python language. + (?P<newline>\n)? # Optionally match a newline following the language. + """, + re.IGNORECASE | re.VERBOSE +) + + +class CodeBlock(NamedTuple): + """Represents a Markdown code block.""" + + content: str + language: str + tick: str + + +class BadLanguage(NamedTuple): + """Parsed information about a poorly formatted language specifier.""" + + language: str + has_leading_spaces: bool + has_terminal_newline: bool + + +def find_code_blocks(message: str) -> Optional[Sequence[CodeBlock]]: + """ + Find and return all Markdown code blocks in the `message`. + + Code blocks with 3 or fewer lines are excluded. + + If the `message` contains at least one code block with valid ticks and a specified language, + return None. This is based on the assumption that if the user managed to get one code block + right, they already know how to fix the rest themselves. + """ + log.trace("Finding all code blocks in a message.") + + code_blocks = [] + for match in _RE_CODE_BLOCK.finditer(message): + # Used to ensure non-matched groups have an empty string as the default value. + groups = match.groupdict("") + language = groups["lang"].strip() # Strip the newline cause it's included in the group. + + if groups["tick"] == BACKTICK and language: + log.trace("Message has a valid code block with a language; returning None.") + return None + elif has_lines(groups["code"], constants.CodeBlock.minimum_lines): + code_block = CodeBlock(groups["code"], language, groups["tick"]) + code_blocks.append(code_block) + else: + log.trace("Skipped a code block shorter than 4 lines.") + + return code_blocks + + +def _is_python_code(content: str) -> bool: + """Return True if `content` is valid Python consisting of more than just expressions.""" + log.trace("Checking if content is Python code.") + try: + # Attempt to parse the message into an AST node. + # Invalid Python code will raise a SyntaxError. + tree = ast.parse(content) + except SyntaxError: + log.trace("Code is not valid Python.") + return False + + # Multiple lines of single words could be interpreted as expressions. + # This check is to avoid all nodes being parsed as expressions. + # (e.g. words over multiple lines) + if not all(isinstance(node, ast.Expr) for node in tree.body): + log.trace("Code is valid python.") + return True + else: + log.trace("Code consists only of expressions.") + return False + + +def _is_repl_code(content: str, threshold: int = 3) -> bool: + """Return True if `content` has at least `threshold` number of (I)Python REPL-like lines.""" + log.trace(f"Checking if content is (I)Python REPL code using a threshold of {threshold}.") + + repl_lines = 0 + patterns = (_RE_PYTHON_REPL, _RE_IPYTHON_REPL) + + for line in content.splitlines(): + # Check the line against all patterns. + for pattern in patterns: + if pattern.match(line): + repl_lines += 1 + + # Once a pattern is matched, only use that pattern for the remaining lines. + patterns = (pattern,) + break + + if repl_lines == threshold: + log.trace("Content is (I)Python REPL code.") + return True + + log.trace("Content is not (I)Python REPL code.") + return False + + +def is_python_code(content: str) -> bool: + """Return True if `content` is valid Python code or (I)Python REPL output.""" + dedented = textwrap.dedent(content) + + # Parse AST twice in case _fix_indentation ends up breaking code due to its inaccuracies. + return ( + _is_python_code(dedented) + or _is_repl_code(dedented) + or _is_python_code(_fix_indentation(content)) + ) + + +def parse_bad_language(content: str) -> Optional[BadLanguage]: + """ + Return information about a poorly formatted Python language in code block `content`. + + If the language is not Python, return None. + """ + log.trace("Parsing bad language.") + + match = _RE_LANGUAGE.match(content) + if not match: + return None + + return BadLanguage( + language=match["lang"], + has_leading_spaces=match["spaces"] is not None, + has_terminal_newline=match["newline"] is not None, + ) + + +def _get_leading_spaces(content: str) -> int: + """Return the number of spaces at the start of the first line in `content`.""" + leading_spaces = 0 + for char in content: + if char == " ": + leading_spaces += 1 + else: + return leading_spaces + + +def _fix_indentation(content: str) -> str: + """ + Attempt to fix badly indented code in `content`. + + In most cases, this works like textwrap.dedent. However, if the first line ends with a colon, + all subsequent lines are re-indented to only be one level deep relative to the first line. + The intent is to fix cases where the leading spaces of the first line of code were accidentally + not copied, which makes the first line appear not indented. + + This is fairly naïve and inaccurate. Therefore, it may break some code that was otherwise valid. + It's meant to catch really common cases, so that's acceptable. Its flaws are: + + - It assumes that if the first line ends with a colon, it is the start of an indented block + - It uses 4 spaces as the indentation, regardless of what the rest of the code uses + """ + lines = content.splitlines(keepends=True) + + # Dedent the first line + first_indent = _get_leading_spaces(content) + first_line = lines[0][first_indent:] + + # Can't assume there'll be multiple lines cause line counts of edited messages aren't checked. + if len(lines) == 1: + return first_line + + second_indent = _get_leading_spaces(lines[1]) + + # If the first line ends with a colon, all successive lines need to be indented one + # additional level (assumes an indent width of 4). + if first_line.rstrip().endswith(":"): + second_indent -= 4 + + # All lines must be dedented at least by the same amount as the first line. + first_indent = max(first_indent, second_indent) + + # Dedent the rest of the lines and join them together with the first line. + content = first_line + "".join(line[first_indent:] for line in lines[1:]) + + return content diff --git a/bot/cogs/doc.py b/bot/exts/info/doc.py index 6e7c00b6a..9b5bd6504 100644 --- a/bot/cogs/doc.py +++ b/bot/exts/info/doc.py @@ -3,10 +3,9 @@ import functools import logging import re import textwrap -from collections import OrderedDict from contextlib import suppress from types import SimpleNamespace -from typing import Any, Callable, Optional, Tuple +from typing import Optional, Tuple import discord from bs4 import BeautifulSoup @@ -21,8 +20,9 @@ from urllib3.exceptions import ProtocolError from bot.bot import Bot from bot.constants import MODERATION_ROLES, RedirectOutput from bot.converters import ValidPythonIdentifier, ValidURL -from bot.decorators import with_role from bot.pagination import LinePaginator +from bot.utils.cache import AsyncCache +from bot.utils.messages import wait_for_deletion log = logging.getLogger(__name__) @@ -65,34 +65,7 @@ WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") FAILED_REQUEST_RETRY_AMOUNT = 3 NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay - -def async_cache(max_size: int = 128, arg_offset: int = 0) -> Callable: - """ - LRU cache implementation for coroutines. - - Once the cache exceeds the maximum size, keys are deleted in FIFO order. - - An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key. - """ - # Assign the cache to the function itself so we can clear it from outside. - async_cache.cache = OrderedDict() - - def decorator(function: Callable) -> Callable: - """Define the async_cache decorator.""" - @functools.wraps(function) - async def wrapper(*args) -> Any: - """Decorator wrapper for the caching logic.""" - key = ':'.join(args[arg_offset:]) - - value = async_cache.cache.get(key) - if value is None: - if len(async_cache.cache) > max_size: - async_cache.cache.popitem(last=False) - - async_cache.cache[key] = await function(*args) - return async_cache.cache[key] - return wrapper - return decorator +symbol_cache = AsyncCache() class DocMarkdownConverter(MarkdownConverter): @@ -157,7 +130,7 @@ class Doc(commands.Cog): async def init_refresh_inventory(self) -> None: """Refresh documentation inventory on cog initialization.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() await self.refresh_inventory() async def update_single( @@ -215,7 +188,7 @@ class Doc(commands.Cog): self.base_urls.clear() self.inventories.clear() self.renamed_symbols.clear() - async_cache.cache = OrderedDict() + symbol_cache.clear() # Run all coroutines concurrently - since each of them performs a HTTP # request, this speeds up fetching the inventory data heavily. @@ -280,7 +253,7 @@ class Doc(commands.Cog): return signatures, description.replace('¶', '') - @async_cache(arg_offset=1) + @symbol_cache(arg_offset=1) async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: """ Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. @@ -345,7 +318,7 @@ class Doc(commands.Cog): @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) async def docs_group(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: """Lookup documentation for Python symbols.""" - await ctx.invoke(self.get_command, symbol) + await self.get_command(ctx, symbol) @docs_group.command(name='get', aliases=('g',)) async def get_command(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: @@ -391,10 +364,11 @@ class Doc(commands.Cog): await error_message.delete(delay=NOT_FOUND_DELETE_DELAY) await ctx.message.delete(delay=NOT_FOUND_DELETE_DELAY) else: - await ctx.send(embed=doc_embed) + msg = await ctx.send(embed=doc_embed) + await wait_for_deletion(msg, (ctx.author.id,)) @docs_group.command(name='set', aliases=('s',)) - @with_role(*MODERATION_ROLES) + @commands.has_any_role(*MODERATION_ROLES) async def set_command( self, ctx: commands.Context, package_name: ValidPythonIdentifier, base_url: ValidURL, inventory_url: InventoryURL @@ -431,7 +405,7 @@ class Doc(commands.Cog): await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") @docs_group.command(name='delete', aliases=('remove', 'rm', 'd')) - @with_role(*MODERATION_ROLES) + @commands.has_any_role(*MODERATION_ROLES) async def delete_command(self, ctx: commands.Context, package_name: ValidPythonIdentifier) -> None: """ Removes the specified package from the database. @@ -448,7 +422,7 @@ class Doc(commands.Cog): await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") @docs_group.command(name="refresh", aliases=("rfsh", "r")) - @with_role(*MODERATION_ROLES) + @commands.has_any_role(*MODERATION_ROLES) async def refresh_command(self, ctx: commands.Context) -> None: """Refresh inventories and send differences to channel.""" old_inventories = set(self.base_urls) diff --git a/bot/exts/info/help.py b/bot/exts/info/help.py new file mode 100644 index 000000000..461ff82fd --- /dev/null +++ b/bot/exts/info/help.py @@ -0,0 +1,355 @@ +import itertools +import logging +from collections import namedtuple +from contextlib import suppress +from typing import List, Union + +from discord import Colour, Embed +from discord.ext.commands import Bot, Cog, Command, Context, Group, HelpCommand +from fuzzywuzzy import fuzz, process +from fuzzywuzzy.utils import full_process + +from bot import constants +from bot.constants import Channels, STAFF_ROLES +from bot.decorators import redirect_output +from bot.pagination import LinePaginator +from bot.utils.messages import wait_for_deletion + +log = logging.getLogger(__name__) + +COMMANDS_PER_PAGE = 8 +PREFIX = constants.Bot.prefix + +Category = namedtuple("Category", ["name", "description", "cogs"]) + + +class HelpQueryNotFound(ValueError): + """ + Raised when a HelpSession Query doesn't match a command or cog. + + Contains the custom attribute of ``possible_matches``. + + Instances of this object contain a dictionary of any command(s) that were close to matching the + query, where keys are the possible matched command names and values are the likeness match scores. + """ + + def __init__(self, arg: str, possible_matches: dict = None): + super().__init__(arg) + self.possible_matches = possible_matches + + +class CustomHelpCommand(HelpCommand): + """ + An interactive instance for the bot help command. + + Cogs can be grouped into custom categories. All cogs with the same category will be displayed + under a single category name in the help output. Custom categories are defined inside the cogs + as a class attribute named `category`. A description can also be specified with the attribute + `category_description`. If a description is not found in at least one cog, the default will be + the regular description (class docstring) of the first cog found in the category. + """ + + def __init__(self): + super().__init__(command_attrs={"help": "Shows help for bot commands"}) + + @redirect_output(destination_channel=Channels.bot_commands, bypass_roles=STAFF_ROLES) + async def command_callback(self, ctx: Context, *, command: str = None) -> None: + """Attempts to match the provided query with a valid command or cog.""" + # the only reason we need to tamper with this is because d.py does not support "categories", + # so we need to deal with them ourselves. + + bot = ctx.bot + + if command is None: + # quick and easy, send bot help if command is none + mapping = self.get_bot_mapping() + await self.send_bot_help(mapping) + return + + cog_matches = [] + description = None + for cog in bot.cogs.values(): + if hasattr(cog, "category") and cog.category == command: + cog_matches.append(cog) + if hasattr(cog, "category_description"): + description = cog.category_description + + if cog_matches: + category = Category(name=command, description=description, cogs=cog_matches) + await self.send_category_help(category) + return + + # it's either a cog, group, command or subcommand; let the parent class deal with it + await super().command_callback(ctx, command=command) + + async def get_all_help_choices(self) -> set: + """ + Get all the possible options for getting help in the bot. + + This will only display commands the author has permission to run. + + These include: + - Category names + - Cog names + - Group command names (and aliases) + - Command names (and aliases) + - Subcommand names (with parent group and aliases for subcommand, but not including aliases for group) + + Options and choices are case sensitive. + """ + # first get all commands including subcommands and full command name aliases + choices = set() + for command in await self.filter_commands(self.context.bot.walk_commands()): + # the the command or group name + choices.add(str(command)) + + if isinstance(command, Command): + # all aliases if it's just a command + choices.update(command.aliases) + else: + # otherwise we need to add the parent name in + choices.update(f"{command.full_parent_name} {alias}" for alias in command.aliases) + + # all cog names + choices.update(self.context.bot.cogs) + + # all category names + choices.update(cog.category for cog in self.context.bot.cogs.values() if hasattr(cog, "category")) + return choices + + async def command_not_found(self, string: str) -> "HelpQueryNotFound": + """ + Handles when a query does not match a valid command, group, cog or category. + + Will return an instance of the `HelpQueryNotFound` exception with the error message and possible matches. + """ + choices = await self.get_all_help_choices() + + # Run fuzzywuzzy's processor beforehand, and avoid matching if processed string is empty + # This avoids fuzzywuzzy from raising a warning on inputs with only non-alphanumeric characters + if (processed := full_process(string)): + result = process.extractBests(processed, choices, scorer=fuzz.ratio, score_cutoff=60, processor=None) + else: + result = [] + + return HelpQueryNotFound(f'Query "{string}" not found.', dict(result)) + + async def subcommand_not_found(self, command: Command, string: str) -> "HelpQueryNotFound": + """ + Redirects the error to `command_not_found`. + + `command_not_found` deals with searching and getting best choices for both commands and subcommands. + """ + return await self.command_not_found(f"{command.qualified_name} {string}") + + async def send_error_message(self, error: HelpQueryNotFound) -> None: + """Send the error message to the channel.""" + embed = Embed(colour=Colour.red(), title=str(error)) + + if getattr(error, "possible_matches", None): + matches = "\n".join(f"`{match}`" for match in error.possible_matches) + embed.description = f"**Did you mean:**\n{matches}" + + await self.context.send(embed=embed) + + async def command_formatting(self, command: Command) -> Embed: + """ + Takes a command and turns it into an embed. + + It will add an author, command signature + help, aliases and a note if the user can't run the command. + """ + embed = Embed() + embed.set_author(name="Command Help", icon_url=constants.Icons.questionmark) + + parent = command.full_parent_name + + name = str(command) if not parent else f"{parent} {command.name}" + command_details = f"**```{PREFIX}{name} {command.signature}```**\n" + + # show command aliases + aliases = [f"`{alias}`" if not parent else f"`{parent} {alias}`" for alias in command.aliases] + aliases += [f"`{alias}`" for alias in getattr(command, "root_aliases", ())] + aliases = ", ".join(sorted(aliases)) + if aliases: + command_details += f"**Can also use:** {aliases}\n\n" + + # check if the user is allowed to run this command + if not await command.can_run(self.context): + command_details += "***You cannot run this command.***\n\n" + + command_details += f"*{command.help or 'No details provided.'}*\n" + embed.description = command_details + + return embed + + async def send_command_help(self, command: Command) -> None: + """Send help for a single command.""" + embed = await self.command_formatting(command) + message = await self.context.send(embed=embed) + await wait_for_deletion(message, (self.context.author.id,)) + + @staticmethod + def get_commands_brief_details(commands_: List[Command], return_as_list: bool = False) -> Union[List[str], str]: + """ + Formats the prefix, command name and signature, and short doc for an iterable of commands. + + return_as_list is helpful for passing these command details into the paginator as a list of command details. + """ + details = [] + for command in commands_: + signature = f" {command.signature}" if command.signature else "" + details.append( + f"\n**`{PREFIX}{command.qualified_name}{signature}`**\n*{command.short_doc or 'No details provided'}*" + ) + if return_as_list: + return details + else: + return "".join(details) + + async def send_group_help(self, group: Group) -> None: + """Sends help for a group command.""" + subcommands = group.commands + + if len(subcommands) == 0: + # no subcommands, just treat it like a regular command + await self.send_command_help(group) + return + + # remove commands that the user can't run and are hidden, and sort by name + commands_ = await self.filter_commands(subcommands, sort=True) + + embed = await self.command_formatting(group) + + command_details = self.get_commands_brief_details(commands_) + if command_details: + embed.description += f"\n**Subcommands:**\n{command_details}" + + message = await self.context.send(embed=embed) + await wait_for_deletion(message, (self.context.author.id,)) + + async def send_cog_help(self, cog: Cog) -> None: + """Send help for a cog.""" + # sort commands by name, and remove any the user can't run or are hidden. + commands_ = await self.filter_commands(cog.get_commands(), sort=True) + + embed = Embed() + embed.set_author(name="Command Help", icon_url=constants.Icons.questionmark) + embed.description = f"**{cog.qualified_name}**\n*{cog.description}*" + + command_details = self.get_commands_brief_details(commands_) + if command_details: + embed.description += f"\n\n**Commands:**\n{command_details}" + + message = await self.context.send(embed=embed) + await wait_for_deletion(message, (self.context.author.id,)) + + @staticmethod + def _category_key(command: Command) -> str: + """ + Returns a cog name of a given command for use as a key for `sorted` and `groupby`. + + A zero width space is used as a prefix for results with no cogs to force them last in ordering. + """ + if command.cog: + with suppress(AttributeError): + if command.cog.category: + return f"**{command.cog.category}**" + return f"**{command.cog_name}**" + else: + return "**\u200bNo Category:**" + + async def send_category_help(self, category: Category) -> None: + """ + Sends help for a bot category. + + This sends a brief help for all commands in all cogs registered to the category. + """ + embed = Embed() + embed.set_author(name="Command Help", icon_url=constants.Icons.questionmark) + + all_commands = [] + for cog in category.cogs: + all_commands.extend(cog.get_commands()) + + filtered_commands = await self.filter_commands(all_commands, sort=True) + + command_detail_lines = self.get_commands_brief_details(filtered_commands, return_as_list=True) + description = f"**{category.name}**\n*{category.description}*" + + if command_detail_lines: + description += "\n\n**Commands:**" + + await LinePaginator.paginate( + command_detail_lines, + self.context, + embed, + prefix=description, + max_lines=COMMANDS_PER_PAGE, + max_size=2000, + ) + + async def send_bot_help(self, mapping: dict) -> None: + """Sends help for all bot commands and cogs.""" + bot = self.context.bot + + embed = Embed() + embed.set_author(name="Command Help", icon_url=constants.Icons.questionmark) + + filter_commands = await self.filter_commands(bot.commands, sort=True, key=self._category_key) + + cog_or_category_pages = [] + + for cog_or_category, _commands in itertools.groupby(filter_commands, key=self._category_key): + sorted_commands = sorted(_commands, key=lambda c: c.name) + + if len(sorted_commands) == 0: + continue + + command_detail_lines = self.get_commands_brief_details(sorted_commands, return_as_list=True) + + # Split cogs or categories which have too many commands to fit in one page. + # The length of commands is included for later use when aggregating into pages for the paginator. + for index in range(0, len(sorted_commands), COMMANDS_PER_PAGE): + truncated_lines = command_detail_lines[index:index + COMMANDS_PER_PAGE] + joined_lines = "".join(truncated_lines) + cog_or_category_pages.append((f"**{cog_or_category}**{joined_lines}", len(truncated_lines))) + + pages = [] + counter = 0 + page = "" + for page_details, length in cog_or_category_pages: + counter += length + if counter > COMMANDS_PER_PAGE: + # force a new page on paginator even if it falls short of the max pages + # since we still want to group categories/cogs. + counter = length + pages.append(page) + page = f"{page_details}\n\n" + else: + page += f"{page_details}\n\n" + + if page: + # add any remaining command help that didn't get added in the last iteration above. + pages.append(page) + + await LinePaginator.paginate(pages, self.context, embed=embed, max_lines=1, max_size=2000) + + +class Help(Cog): + """Custom Embed Pagination Help feature.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + self.old_help_command = bot.help_command + bot.help_command = CustomHelpCommand() + bot.help_command.cog = self + + def cog_unload(self) -> None: + """Reset the help command when the cog is unloaded.""" + self.bot.help_command = self.old_help_command + + +def setup(bot: Bot) -> None: + """Load the Help cog.""" + bot.add_cog(Help(bot)) + log.info("Cog loaded: Help") diff --git a/bot/cogs/information.py b/bot/exts/info/information.py index 125d7ce24..b2138b03f 100644 --- a/bot/cogs/information.py +++ b/bot/exts/info/information.py @@ -2,24 +2,32 @@ import colorsys import logging import pprint import textwrap -import typing -from collections import defaultdict -from typing import Any, Mapping, Optional +from collections import Counter, defaultdict +from string import Template +from typing import Any, Mapping, Optional, Tuple, Union -import discord -from discord import CategoryChannel, Colour, Embed, Member, Role, TextChannel, VoiceChannel, utils -from discord.ext import commands -from discord.ext.commands import BucketType, Cog, Context, command, group -from discord.utils import escape_markdown +from discord import ChannelType, Colour, Embed, Guild, Message, Role, Status, utils +from discord.abc import GuildChannel +from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role from bot import constants +from bot.api import ResponseCodeError from bot.bot import Bot -from bot.decorators import InChannelCheckFailure, in_channel, with_role -from bot.utils.checks import cooldown_with_role_bypass, with_role_check +from bot.converters import FetchedMember +from bot.decorators import in_whitelist +from bot.pagination import LinePaginator +from bot.utils.channel import is_mod_channel +from bot.utils.checks import cooldown_with_role_bypass, has_no_roles_check, in_whitelist_check from bot.utils.time import time_since log = logging.getLogger(__name__) +STATUS_EMOTES = { + Status.offline: constants.Emojis.status_offline, + Status.dnd: constants.Emojis.status_dnd, + Status.idle: constants.Emojis.status_idle +} + class Information(Cog): """A cog with commands for generating embeds with server info, such as server stats and user info.""" @@ -27,39 +35,79 @@ class Information(Cog): def __init__(self, bot: Bot): self.bot = bot - @with_role(*constants.MODERATION_ROLES) + @staticmethod + def role_can_read(channel: GuildChannel, role: Role) -> bool: + """Return True if `role` can read messages in `channel`.""" + overwrites = channel.overwrites_for(role) + return overwrites.read_messages is True + + def get_staff_channel_count(self, guild: Guild) -> int: + """ + Get the number of channels that are staff-only. + + We need to know two things about a channel: + - Does the @everyone role have explicit read deny permissions? + - Do staff roles have explicit read allow permissions? + + If the answer to both of these questions is yes, it's a staff channel. + """ + channel_ids = set() + for channel in guild.channels: + if channel.type is ChannelType.category: + continue + + everyone_can_read = self.role_can_read(channel, guild.default_role) + + for role in constants.STAFF_ROLES: + role_can_read = self.role_can_read(channel, guild.get_role(role)) + if role_can_read and not everyone_can_read: + channel_ids.add(channel.id) + break + + return len(channel_ids) + + @staticmethod + def get_channel_type_counts(guild: Guild) -> str: + """Return the total amounts of the various types of channels in `guild`.""" + channel_counter = Counter(c.type for c in guild.channels) + channel_type_list = [] + for channel, count in channel_counter.items(): + channel_type = str(channel).title() + channel_type_list.append(f"{channel_type} channels: {count}") + + channel_type_list = sorted(channel_type_list) + return "\n".join(channel_type_list) + + @has_any_role(*constants.STAFF_ROLES) @command(name="roles") async def roles_info(self, ctx: Context) -> None: """Returns a list of all roles and their corresponding IDs.""" # Sort the roles alphabetically and remove the @everyone role - roles = sorted(ctx.guild.roles, key=lambda role: role.name) - roles = [role for role in roles if role.name != "@everyone"] + roles = sorted(ctx.guild.roles[1:], key=lambda role: role.name) - # Build a string - role_string = "" + # Build a list + role_list = [] for role in roles: - role_string += f"`{role.id}` - {role.mention}\n" + role_list.append(f"`{role.id}` - {role.mention}") # Build an embed embed = Embed( - title="Role information", - colour=Colour.blurple(), - description=role_string + title=f"Role information (Total {len(roles)} role{'s' * (len(role_list) > 1)})", + colour=Colour.blurple() ) - embed.set_footer(text=f"Total roles: {len(roles)}") - - await ctx.send(embed=embed) + await LinePaginator.paginate(role_list, ctx, embed, empty=False) - @with_role(*constants.MODERATION_ROLES) + @has_any_role(*constants.STAFF_ROLES) @command(name="role") - async def role_info(self, ctx: Context, *roles: typing.Union[Role, str]) -> None: + async def role_info(self, ctx: Context, *roles: Union[Role, str]) -> None: """ Return information on a role or list of roles. To specify multiple roles just add to the arguments, delimit roles with spaces in them using quotation marks. """ parsed_roles = [] + failed_roles = [] for role_name in roles: if isinstance(role_name, Role): @@ -70,29 +118,26 @@ class Information(Cog): role = utils.find(lambda r: r.name.lower() == role_name.lower(), ctx.guild.roles) if not role: - await ctx.send(f":x: Could not convert `{role_name}` to a role") + failed_roles.append(role_name) continue parsed_roles.append(role) + if failed_roles: + await ctx.send(f":x: Could not retrieve the following roles: {', '.join(failed_roles)}") + for role in parsed_roles: + h, s, v = colorsys.rgb_to_hsv(*role.colour.to_rgb()) + embed = Embed( title=f"{role.name} info", colour=role.colour, ) - embed.add_field(name="ID", value=role.id, inline=True) - embed.add_field(name="Colour (RGB)", value=f"#{role.colour.value:0>6x}", inline=True) - - h, s, v = colorsys.rgb_to_hsv(*role.colour.to_rgb()) - embed.add_field(name="Colour (HSV)", value=f"{h:.2f} {s:.2f} {v}", inline=True) - embed.add_field(name="Member count", value=len(role.members), inline=True) - embed.add_field(name="Position", value=role.position) - embed.add_field(name="Permission code", value=role.permissions.value, inline=True) await ctx.send(embed=embed) @@ -104,155 +149,151 @@ class Information(Cog): features = ", ".join(ctx.guild.features) region = ctx.guild.region - # How many of each type of channel? roles = len(ctx.guild.roles) - channels = ctx.guild.channels - text_channels = 0 - category_channels = 0 - voice_channels = 0 - for channel in channels: - if type(channel) == TextChannel: - text_channels += 1 - elif type(channel) == CategoryChannel: - category_channels += 1 - elif type(channel) == VoiceChannel: - voice_channels += 1 - - # How many of each user status? member_count = ctx.guild.member_count - members = ctx.guild.members - online = 0 - dnd = 0 - idle = 0 - offline = 0 - for member in members: - if str(member.status) == "online": - online += 1 - elif str(member.status) == "offline": - offline += 1 - elif str(member.status) == "idle": - idle += 1 - elif str(member.status) == "dnd": - dnd += 1 + channel_counts = self.get_channel_type_counts(ctx.guild) - embed = Embed( - colour=Colour.blurple(), - description=textwrap.dedent(f""" + # How many of each user status? + py_invite = await self.bot.fetch_invite(constants.Guild.invite) + online_presences = py_invite.approximate_presence_count + offline_presences = py_invite.approximate_member_count - online_presences + embed = Embed(colour=Colour.blurple()) + + # How many staff members and staff channels do we have? + staff_member_count = len(ctx.guild.get_role(constants.Roles.helpers).members) + staff_channel_count = self.get_staff_channel_count(ctx.guild) + + # Because channel_counts lacks leading whitespace, it breaks the dedent if it's inserted directly by the + # f-string. While this is correctly formatted by Discord, it makes unit testing difficult. To keep the + # formatting without joining a tuple of strings we can use a Template string to insert the already-formatted + # channel_counts after the dedent is made. + embed.description = Template( + textwrap.dedent(f""" **Server information** Created: {created} Voice region: {region} Features: {features} - **Counts** + **Channel counts** + $channel_counts + Staff channels: {staff_channel_count} + + **Member counts** Members: {member_count:,} + Staff members: {staff_member_count} Roles: {roles} - Text: {text_channels} - Voice: {voice_channels} - Channel categories: {category_channels} - - **Members** - {constants.Emojis.status_online} {online} - {constants.Emojis.status_idle} {idle} - {constants.Emojis.status_dnd} {dnd} - {constants.Emojis.status_offline} {offline} - """) - ) + **Member statuses** + {constants.Emojis.status_online} {online_presences:,} + {constants.Emojis.status_offline} {offline_presences:,} + """) + ).substitute({"channel_counts": channel_counts}) embed.set_thumbnail(url=ctx.guild.icon_url) await ctx.send(embed=embed) @command(name="user", aliases=["user_info", "member", "member_info"]) - async def user_info(self, ctx: Context, user: Member = None) -> None: + async def user_info(self, ctx: Context, user: FetchedMember = None) -> None: """Returns info about a user.""" if user is None: user = ctx.author # Do a role check if this is being executed on someone other than the caller - if user != ctx.author and not with_role_check(ctx, *constants.MODERATION_ROLES): + elif user != ctx.author and await has_no_roles_check(ctx, *constants.MODERATION_ROLES): await ctx.send("You may not use this command on users other than yourself.") return - # Non-staff may only do this in #bot-commands - if not with_role_check(ctx, *constants.STAFF_ROLES): - if not ctx.channel.id == constants.Channels.bot: - raise InChannelCheckFailure(constants.Channels.bot) - - embed = await self.create_user_embed(ctx, user) - - await ctx.send(embed=embed) + # Will redirect to #bot-commands if it fails. + if in_whitelist_check(ctx, roles=constants.STAFF_ROLES): + embed = await self.create_user_embed(ctx, user) + await ctx.send(embed=embed) - async def create_user_embed(self, ctx: Context, user: Member) -> Embed: + async def create_user_embed(self, ctx: Context, user: FetchedMember) -> Embed: """Creates an embed containing information on the `user`.""" - created = time_since(user.created_at, max_units=3) + on_server = bool(ctx.guild.get_member(user.id)) - # Custom status - custom_status = '' - for activity in user.activities: - # Check activity.state for None value if user has a custom status set - # This guards against a custom status with an emoji but no text, which will cause - # escape_markdown to raise an exception - # This can be reworked after a move to d.py 1.3.0+, which adds a CustomActivity class - if activity.name == 'Custom Status' and activity.state: - state = escape_markdown(activity.state) - custom_status = f'Status: {state}\n' + created = time_since(user.created_at, max_units=3) name = str(user) - if user.nick: + if on_server and user.nick: name = f"{user.nick} ({name})" - joined = time_since(user.joined_at, precision="days") - roles = ", ".join(role.mention for role in user.roles if role.name != "@everyone") + badges = [] - description = [ - textwrap.dedent(f""" - **User Information** - Created: {created} - Profile: {user.mention} - ID: {user.id} - {custom_status} - **Member Information** - Joined: {joined} - Roles: {roles or None} - """).strip() + for badge, is_set in user.public_flags: + if is_set and (emoji := getattr(constants.Emojis, f"badge_{badge}", None)): + badges.append(emoji) + + activity = await self.user_messages(user) + + if on_server: + joined = time_since(user.joined_at, max_units=3) + roles = ", ".join(role.mention for role in user.roles[1:]) + membership = {"Joined": joined, "Verified": not user.pending, "Roles": roles or None} + if not is_mod_channel(ctx.channel): + membership.pop("Verified") + + membership = textwrap.dedent("\n".join([f"{key}: {value}" for key, value in membership.items()])) + else: + roles = None + membership = "The user is not a member of the server" + + fields = [ + ( + "User information", + textwrap.dedent(f""" + Created: {created} + Profile: {user.mention} + ID: {user.id} + """).strip() + ), + ( + "Member information", + membership + ), ] # Show more verbose output in moderation channels for infractions and nominations - if ctx.channel.id in constants.MODERATION_CHANNELS: - description.append(await self.expanded_user_infraction_counts(user)) - description.append(await self.user_nomination_counts(user)) + if is_mod_channel(ctx.channel): + fields.append(activity) + + fields.append(await self.expanded_user_infraction_counts(user)) + fields.append(await self.user_nomination_counts(user)) else: - description.append(await self.basic_user_infraction_counts(user)) + fields.append(await self.basic_user_infraction_counts(user)) # Let's build the embed now embed = Embed( title=name, - description="\n\n".join(description) + description=" ".join(badges) ) - embed.set_thumbnail(url=user.avatar_url_as(format="png")) + for field_name, field_content in fields: + embed.add_field(name=field_name, value=field_content, inline=False) + + embed.set_thumbnail(url=user.avatar_url_as(static_format="png")) embed.colour = user.top_role.colour if roles else Colour.blurple() return embed - async def basic_user_infraction_counts(self, member: Member) -> str: + async def basic_user_infraction_counts(self, user: FetchedMember) -> Tuple[str, str]: """Gets the total and active infraction counts for the given `member`.""" infractions = await self.bot.api_client.get( 'bot/infractions', params={ 'hidden': 'False', - 'user__id': str(member.id) + 'user__id': str(user.id) } ) total_infractions = len(infractions) active_infractions = sum(infraction['active'] for infraction in infractions) - infraction_output = f"**Infractions**\nTotal: {total_infractions}\nActive: {active_infractions}" + infraction_output = f"Total: {total_infractions}\nActive: {active_infractions}" - return infraction_output + return "Infractions", infraction_output - async def expanded_user_infraction_counts(self, member: Member) -> str: + async def expanded_user_infraction_counts(self, user: FetchedMember) -> Tuple[str, str]: """ Gets expanded infraction counts for the given `member`. @@ -262,13 +303,13 @@ class Information(Cog): infractions = await self.bot.api_client.get( 'bot/infractions', params={ - 'user__id': str(member.id) + 'user__id': str(user.id) } ) - infraction_output = ["**Infractions**"] + infraction_output = [] if not infractions: - infraction_output.append("This user has never received an infraction.") + infraction_output.append("No infractions") else: # Count infractions split by `type` and `active` status for this user infraction_types = set() @@ -291,32 +332,56 @@ class Information(Cog): infraction_output.append(line) - return "\n".join(infraction_output) + return "Infractions", "\n".join(infraction_output) - async def user_nomination_counts(self, member: Member) -> str: + async def user_nomination_counts(self, user: FetchedMember) -> Tuple[str, str]: """Gets the active and historical nomination counts for the given `member`.""" nominations = await self.bot.api_client.get( 'bot/nominations', params={ - 'user__id': str(member.id) + 'user__id': str(user.id) } ) - output = ["**Nominations**"] + output = [] if not nominations: - output.append("This user has never been nominated.") + output.append("No nominations") else: count = len(nominations) is_currently_nominated = any(nomination["active"] for nomination in nominations) nomination_noun = "nomination" if count == 1 else "nominations" if is_currently_nominated: - output.append(f"This user is **currently** nominated ({count} {nomination_noun} in total).") + output.append(f"This user is **currently** nominated\n({count} {nomination_noun} in total)") else: output.append(f"This user has {count} historical {nomination_noun}, but is currently not nominated.") - return "\n".join(output) + return "Nominations", "\n".join(output) + + async def user_messages(self, user: FetchedMember) -> Tuple[Union[bool, str], Tuple[str, str]]: + """ + Gets the amount of messages for `member`. + + Fetches information from the metricity database that's hosted by the site. + If the database returns a code besides a 404, then many parts of the bot are broken including this one. + """ + activity_output = [] + + try: + user_activity = await self.bot.api_client.get(f"bot/users/{user.id}/metricity_data") + except ResponseCodeError as e: + if e.status == 404: + activity_output = "No activity" + else: + activity_output.append(user_activity["total_messages"] or "No messages") + activity_output.append(user_activity["activity_blocks"] or "No activity") + + activity_output = "\n".join( + f"{name}: {metric}" for name, metric in zip(["Messages", "Activity blocks"], activity_output) + ) + + return ("Activity", activity_output) def format_fields(self, mapping: Mapping[str, Any], field_width: Optional[int] = None) -> str: """Format a mapping to be readable to a human.""" @@ -354,15 +419,15 @@ class Information(Cog): return out.rstrip() @cooldown_with_role_bypass(2, 60 * 3, BucketType.member, bypass_roles=constants.STAFF_ROLES) - @group(invoke_without_command=True) - @in_channel(constants.Channels.bot, bypass_roles=constants.STAFF_ROLES) - async def raw(self, ctx: Context, *, message: discord.Message, json: bool = False) -> None: + @group(invoke_without_command=True, enabled=False) + @in_whitelist(channels=(constants.Channels.bot_commands,), roles=constants.STAFF_ROLES) + async def raw(self, ctx: Context, *, message: Message, json: bool = False) -> None: """Shows information about the raw API response.""" # I *guess* it could be deleted right as the command is invoked but I felt like it wasn't worth handling # doing this extra request is also much easier than trying to convert everything back into a dictionary again raw_data = await ctx.bot.http.get_message(message.channel.id, message.id) - paginator = commands.Paginator() + paginator = Paginator() def add_content(title: str, content: str) -> None: paginator.add_line(f'== {title} ==\n') @@ -389,8 +454,8 @@ class Information(Cog): for page in paginator.pages: await ctx.send(page) - @raw.command() - async def json(self, ctx: Context, message: discord.Message) -> None: + @raw.command(enabled=False) + async def json(self, ctx: Context, message: Message) -> None: """Shows information about the raw API response in a copy-pasteable Python format.""" await ctx.invoke(self.raw, message=message, json=True) diff --git a/bot/exts/info/pep.py b/bot/exts/info/pep.py new file mode 100644 index 000000000..8ac96bbdb --- /dev/null +++ b/bot/exts/info/pep.py @@ -0,0 +1,164 @@ +import logging +from datetime import datetime, timedelta +from email.parser import HeaderParser +from io import StringIO +from typing import Dict, Optional, Tuple + +from discord import Colour, Embed +from discord.ext.commands import Cog, Context, command + +from bot.bot import Bot +from bot.constants import Keys +from bot.utils.cache import AsyncCache + +log = logging.getLogger(__name__) + +ICON_URL = "https://www.python.org/static/opengraph-icon-200x200.png" +BASE_PEP_URL = "http://www.python.org/dev/peps/pep-" +PEPS_LISTING_API_URL = "https://api.github.com/repos/python/peps/contents?ref=master" + +pep_cache = AsyncCache() + +GITHUB_API_HEADERS = {} +if Keys.github: + GITHUB_API_HEADERS["Authorization"] = f"token {Keys.github}" + + +class PythonEnhancementProposals(Cog): + """Cog for displaying information about PEPs.""" + + def __init__(self, bot: Bot): + self.bot = bot + self.peps: Dict[int, str] = {} + # To avoid situations where we don't have last datetime, set this to now. + self.last_refreshed_peps: datetime = datetime.now() + self.bot.loop.create_task(self.refresh_peps_urls()) + + async def refresh_peps_urls(self) -> None: + """Refresh PEP URLs listing in every 3 hours.""" + # Wait until HTTP client is available + await self.bot.wait_until_ready() + log.trace("Started refreshing PEP URLs.") + self.last_refreshed_peps = datetime.now() + + async with self.bot.http_session.get( + PEPS_LISTING_API_URL, + headers=GITHUB_API_HEADERS + ) as resp: + if resp.status != 200: + log.warning(f"Fetching PEP URLs from GitHub API failed with code {resp.status}") + return + + listing = await resp.json() + + log.trace("Got PEP URLs listing from GitHub API") + + for file in listing: + name = file["name"] + if name.startswith("pep-") and name.endswith((".rst", ".txt")): + pep_number = name.replace("pep-", "").split(".")[0] + self.peps[int(pep_number)] = file["download_url"] + + log.info("Successfully refreshed PEP URLs listing.") + + @staticmethod + def get_pep_zero_embed() -> Embed: + """Get information embed about PEP 0.""" + pep_embed = Embed( + title="**PEP 0 - Index of Python Enhancement Proposals (PEPs)**", + url="https://www.python.org/dev/peps/" + ) + pep_embed.set_thumbnail(url=ICON_URL) + pep_embed.add_field(name="Status", value="Active") + pep_embed.add_field(name="Created", value="13-Jul-2000") + pep_embed.add_field(name="Type", value="Informational") + + return pep_embed + + async def validate_pep_number(self, pep_nr: int) -> Optional[Embed]: + """Validate is PEP number valid. When it isn't, return error embed, otherwise None.""" + if ( + pep_nr not in self.peps + and (self.last_refreshed_peps + timedelta(minutes=30)) <= datetime.now() + and len(str(pep_nr)) < 5 + ): + await self.refresh_peps_urls() + + if pep_nr not in self.peps: + log.trace(f"PEP {pep_nr} was not found") + return Embed( + title="PEP not found", + description=f"PEP {pep_nr} does not exist.", + colour=Colour.red() + ) + + return None + + def generate_pep_embed(self, pep_header: Dict, pep_nr: int) -> Embed: + """Generate PEP embed based on PEP headers data.""" + # Assemble the embed + pep_embed = Embed( + title=f"**PEP {pep_nr} - {pep_header['Title']}**", + description=f"[Link]({BASE_PEP_URL}{pep_nr:04})", + ) + + pep_embed.set_thumbnail(url=ICON_URL) + + # Add the interesting information + fields_to_check = ("Status", "Python-Version", "Created", "Type") + for field in fields_to_check: + # Check for a PEP metadata field that is present but has an empty value + # embed field values can't contain an empty string + if pep_header.get(field, ""): + pep_embed.add_field(name=field, value=pep_header[field]) + + return pep_embed + + @pep_cache(arg_offset=1) + async def get_pep_embed(self, pep_nr: int) -> Tuple[Embed, bool]: + """Fetch, generate and return PEP embed. Second item of return tuple show does getting success.""" + response = await self.bot.http_session.get(self.peps[pep_nr]) + + if response.status == 200: + log.trace(f"PEP {pep_nr} found") + pep_content = await response.text() + + # Taken from https://github.com/python/peps/blob/master/pep0/pep.py#L179 + pep_header = HeaderParser().parse(StringIO(pep_content)) + return self.generate_pep_embed(pep_header, pep_nr), True + else: + log.trace( + f"The user requested PEP {pep_nr}, but the response had an unexpected status code: {response.status}." + ) + return Embed( + title="Unexpected error", + description="Unexpected HTTP error during PEP search. Please let us know.", + colour=Colour.red() + ), False + + @command(name='pep', aliases=('get_pep', 'p')) + async def pep_command(self, ctx: Context, pep_number: int) -> None: + """Fetches information about a PEP and sends it to the channel.""" + # Trigger typing in chat to show users that bot is responding + await ctx.trigger_typing() + + # Handle PEP 0 directly because it's not in .rst or .txt so it can't be accessed like other PEPs. + if pep_number == 0: + pep_embed = self.get_pep_zero_embed() + success = True + else: + success = False + if not (pep_embed := await self.validate_pep_number(pep_number)): + pep_embed, success = await self.get_pep_embed(pep_number) + + await ctx.send(embed=pep_embed) + if success: + log.trace(f"PEP {pep_number} getting and sending finished successfully. Increasing stat.") + self.bot.stats.incr(f"pep_fetches.{pep_number}") + else: + log.trace(f"Getting PEP {pep_number} failed. Error embed sent.") + + +def setup(bot: Bot) -> None: + """Load the PEP cog.""" + bot.add_cog(PythonEnhancementProposals(bot)) diff --git a/bot/exts/info/python_news.py b/bot/exts/info/python_news.py new file mode 100644 index 000000000..0ab5738a4 --- /dev/null +++ b/bot/exts/info/python_news.py @@ -0,0 +1,232 @@ +import logging +import typing as t +from datetime import date, datetime + +import discord +import feedparser +from bs4 import BeautifulSoup +from discord.ext.commands import Cog +from discord.ext.tasks import loop + +from bot import constants +from bot.bot import Bot +from bot.utils.webhooks import send_webhook + +PEPS_RSS_URL = "https://www.python.org/dev/peps/peps.rss/" + +RECENT_THREADS_TEMPLATE = "https://mail.python.org/archives/list/{name}@python.org/recent-threads" +THREAD_TEMPLATE_URL = "https://mail.python.org/archives/api/list/{name}@python.org/thread/{id}/" +MAILMAN_PROFILE_URL = "https://mail.python.org/archives/users/{id}/" +THREAD_URL = "https://mail.python.org/archives/list/{list}@python.org/thread/{id}/" + +AVATAR_URL = "https://www.python.org/static/opengraph-icon-200x200.png" + +log = logging.getLogger(__name__) + + +class PythonNews(Cog): + """Post new PEPs and Python News to `#python-news`.""" + + def __init__(self, bot: Bot): + self.bot = bot + self.webhook_names = {} + self.webhook: t.Optional[discord.Webhook] = None + + self.bot.loop.create_task(self.get_webhook_names()) + self.bot.loop.create_task(self.get_webhook_and_channel()) + + async def start_tasks(self) -> None: + """Start the tasks for fetching new PEPs and mailing list messages.""" + self.fetch_new_media.start() + + @loop(minutes=20) + async def fetch_new_media(self) -> None: + """Fetch new mailing list messages and then new PEPs.""" + await self.post_maillist_news() + await self.post_pep_news() + + async def sync_maillists(self) -> None: + """Sync currently in-use maillists with API.""" + # Wait until guild is available to avoid running before everything is ready + await self.bot.wait_until_guild_available() + + response = await self.bot.api_client.get("bot/bot-settings/news") + for mail in constants.PythonNews.mail_lists: + if mail not in response["data"]: + response["data"][mail] = [] + + # Because we are handling PEPs differently, we don't include it to mail lists + if "pep" not in response["data"]: + response["data"]["pep"] = [] + + await self.bot.api_client.put("bot/bot-settings/news", json=response) + + async def get_webhook_names(self) -> None: + """Get webhook author names from maillist API.""" + await self.bot.wait_until_guild_available() + + async with self.bot.http_session.get("https://mail.python.org/archives/api/lists") as resp: + lists = await resp.json() + + for mail in lists: + if mail["name"].split("@")[0] in constants.PythonNews.mail_lists: + self.webhook_names[mail["name"].split("@")[0]] = mail["display_name"] + + async def post_pep_news(self) -> None: + """Fetch new PEPs and when they don't have announcement in #python-news, create it.""" + # Wait until everything is ready and http_session available + await self.bot.wait_until_guild_available() + await self.sync_maillists() + + async with self.bot.http_session.get(PEPS_RSS_URL) as resp: + data = feedparser.parse(await resp.text("utf-8")) + + news_listing = await self.bot.api_client.get("bot/bot-settings/news") + payload = news_listing.copy() + pep_numbers = news_listing["data"]["pep"] + + # Reverse entries to send oldest first + data["entries"].reverse() + for new in data["entries"]: + try: + new_datetime = datetime.strptime(new["published"], "%a, %d %b %Y %X %Z") + except ValueError: + log.warning(f"Wrong datetime format passed in PEP new: {new['published']}") + continue + pep_nr = new["title"].split(":")[0].split()[1] + if ( + pep_nr in pep_numbers + or new_datetime.date() < date.today() + ): + continue + + # Build an embed and send a webhook + embed = discord.Embed( + title=new["title"], + description=new["summary"], + timestamp=new_datetime, + url=new["link"], + colour=constants.Colours.soft_green + ) + embed.set_footer(text=data["feed"]["title"], icon_url=AVATAR_URL) + msg = await send_webhook( + webhook=self.webhook, + username=data["feed"]["title"], + embed=embed, + avatar_url=AVATAR_URL, + wait=True, + ) + payload["data"]["pep"].append(pep_nr) + + # Increase overall PEP new stat + self.bot.stats.incr("python_news.posted.pep") + + if msg.channel.is_news(): + log.trace("Publishing PEP annnouncement because it was in a news channel") + await msg.publish() + + # Apply new sent news to DB to avoid duplicate sending + await self.bot.api_client.put("bot/bot-settings/news", json=payload) + + async def post_maillist_news(self) -> None: + """Send new maillist threads to #python-news that is listed in configuration.""" + await self.bot.wait_until_guild_available() + await self.sync_maillists() + existing_news = await self.bot.api_client.get("bot/bot-settings/news") + payload = existing_news.copy() + + for maillist in constants.PythonNews.mail_lists: + async with self.bot.http_session.get(RECENT_THREADS_TEMPLATE.format(name=maillist)) as resp: + recents = BeautifulSoup(await resp.text(), features="lxml") + + # When a <p> element is present in the response then the mailing list + # has not had any activity during the current month, so therefore it + # can be ignored. + if recents.p: + continue + + for thread in recents.html.body.div.find_all("a", href=True): + # We want only these threads that have identifiers + if "latest" in thread["href"]: + continue + + thread_information, email_information = await self.get_thread_and_first_mail( + maillist, thread["href"].split("/")[-2] + ) + + try: + new_date = datetime.strptime(email_information["date"], "%Y-%m-%dT%X%z") + except ValueError: + log.warning(f"Invalid datetime from Thread email: {email_information['date']}") + continue + + if ( + thread_information["thread_id"] in existing_news["data"][maillist] + or 'Re: ' in thread_information["subject"] + or new_date.date() < date.today() + ): + continue + + content = email_information["content"] + link = THREAD_URL.format(id=thread["href"].split("/")[-2], list=maillist) + + # Build an embed and send a message to the webhook + embed = discord.Embed( + title=thread_information["subject"], + description=content[:500] + f"... [continue reading]({link})" if len(content) > 500 else content, + timestamp=new_date, + url=link, + colour=constants.Colours.soft_green + ) + embed.set_author( + name=f"{email_information['sender_name']} ({email_information['sender']['address']})", + url=MAILMAN_PROFILE_URL.format(id=email_information["sender"]["mailman_id"]), + ) + embed.set_footer( + text=f"Posted to {self.webhook_names[maillist]}", + icon_url=AVATAR_URL, + ) + msg = await send_webhook( + webhook=self.webhook, + username=self.webhook_names[maillist], + embed=embed, + avatar_url=AVATAR_URL, + wait=True, + ) + payload["data"][maillist].append(thread_information["thread_id"]) + + # Increase this specific maillist counter in stats + self.bot.stats.incr(f"python_news.posted.{maillist.replace('-', '_')}") + + if msg.channel.is_news(): + log.trace("Publishing mailing list message because it was in a news channel") + await msg.publish() + + await self.bot.api_client.put("bot/bot-settings/news", json=payload) + + async def get_thread_and_first_mail(self, maillist: str, thread_identifier: str) -> t.Tuple[t.Any, t.Any]: + """Get mail thread and first mail from mail.python.org based on `maillist` and `thread_identifier`.""" + async with self.bot.http_session.get( + THREAD_TEMPLATE_URL.format(name=maillist, id=thread_identifier) + ) as resp: + thread_information = await resp.json() + + async with self.bot.http_session.get(thread_information["starting_email"]) as resp: + email_information = await resp.json() + return thread_information, email_information + + async def get_webhook_and_channel(self) -> None: + """Storage #python-news channel Webhook and `TextChannel` to `News.webhook` and `channel`.""" + await self.bot.wait_until_guild_available() + self.webhook = await self.bot.fetch_webhook(constants.PythonNews.webhook) + + await self.start_tasks() + + def cog_unload(self) -> None: + """Stop news posting tasks on cog unload.""" + self.fetch_new_media.cancel() + + +def setup(bot: Bot) -> None: + """Add `News` cog.""" + bot.add_cog(PythonNews(bot)) diff --git a/bot/cogs/reddit.py b/bot/exts/info/reddit.py index aa487f18e..6790be762 100644 --- a/bot/cogs/reddit.py +++ b/bot/exts/info/reddit.py @@ -8,14 +8,15 @@ from typing import List from aiohttp import BasicAuth, ClientError from discord import Colour, Embed, TextChannel -from discord.ext.commands import Cog, Context, group +from discord.ext.commands import Cog, Context, group, has_any_role from discord.ext.tasks import loop +from discord.utils import escape_markdown, sleep_until from bot.bot import Bot from bot.constants import Channels, ERROR_REPLIES, Emojis, Reddit as RedditConfig, STAFF_ROLES, Webhooks from bot.converters import Subreddit -from bot.decorators import with_role from bot.pagination import LinePaginator +from bot.utils.messages import sub_clyde log = logging.getLogger(__name__) @@ -43,12 +44,12 @@ class Reddit(Cog): def cog_unload(self) -> None: """Stop the loop task and revoke the access token when the cog is unloaded.""" self.auto_poster_loop.cancel() - if self.access_token.expires_at < datetime.utcnow(): - self.revoke_access_token() + if self.access_token and self.access_token.expires_at > datetime.utcnow(): + self.bot.closing_tasks.append(asyncio.create_task(self.revoke_access_token())) async def init_reddit_ready(self) -> None: """Sets the reddit webhook when the cog is loaded.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() if not self.webhook: self.webhook = await self.bot.fetch_webhook(Webhooks.reddit) @@ -83,7 +84,7 @@ class Reddit(Cog): expires_at=datetime.utcnow() + timedelta(seconds=expiration) ) - log.debug(f"New token acquired; expires on {self.access_token.expires_at}") + log.debug(f"New token acquired; expires on UTC {self.access_token.expires_at}") return else: log.debug( @@ -139,7 +140,10 @@ class Reddit(Cog): # Got appropriate response - process and return. content = await response.json() posts = content["data"]["children"] - return posts[:amount] + + filtered_posts = [post for post in posts if not post["data"]["over_18"]] + + return filtered_posts[:amount] await asyncio.sleep(3) @@ -162,12 +166,11 @@ class Reddit(Cog): amount=amount, params={"t": time} ) - if not posts: embed.title = random.choice(ERROR_REPLIES) embed.colour = Colour.red() embed.description = ( - "Sorry! We couldn't find any posts from that subreddit. " + "Sorry! We couldn't find any SFW posts from that subreddit. " "If this problem persists, please let us know." ) @@ -186,6 +189,8 @@ class Reddit(Cog): author = data["author"] title = textwrap.shorten(data["title"], width=64, placeholder="...") + # Normal brackets interfere with Markdown. + title = escape_markdown(title).replace("[", "⦋").replace("]", "⦌") link = self.URL + data["permalink"] embed.description += ( @@ -200,15 +205,15 @@ class Reddit(Cog): @loop() async def auto_poster_loop(self) -> None: """Post the top 5 posts daily, and the top 5 posts weekly.""" - # once we upgrade to d.py 1.3 this can be removed and the loop can use the `time=datetime.time.min` parameter + # once d.py get support for `time` parameter in loop decorator, + # this can be removed and the loop can use the `time=datetime.time.min` parameter now = datetime.utcnow() tomorrow = now + timedelta(days=1) midnight_tomorrow = tomorrow.replace(hour=0, minute=0, second=0) - seconds_until = (midnight_tomorrow - now).total_seconds() - await asyncio.sleep(seconds_until) + await sleep_until(midnight_tomorrow) - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() if not self.webhook: await self.bot.fetch_webhook(Webhooks.reddit) @@ -218,15 +223,19 @@ class Reddit(Cog): for subreddit in RedditConfig.subreddits: top_posts = await self.get_top_posts(subreddit=subreddit, time="day") - await self.webhook.send(username=f"{subreddit} Top Daily Posts", embed=top_posts) + username = sub_clyde(f"{subreddit} Top Daily Posts") + message = await self.webhook.send(username=username, embed=top_posts, wait=True) + + if message.channel.is_news(): + await message.publish() async def top_weekly_posts(self) -> None: """Post a summary of the top posts.""" for subreddit in RedditConfig.subreddits: # Send and pin the new weekly posts. top_posts = await self.get_top_posts(subreddit=subreddit, time="week") - - message = await self.webhook.send(wait=True, username=f"{subreddit} Top Weekly Posts", embed=top_posts) + username = sub_clyde(f"{subreddit} Top Weekly Posts") + message = await self.webhook.send(wait=True, username=username, embed=top_posts) if subreddit.lower() == "r/python": if not self.channel: @@ -242,10 +251,13 @@ class Reddit(Cog): await message.pin() + if message.channel.is_news(): + await message.publish() + @group(name="reddit", invoke_without_command=True) async def reddit_group(self, ctx: Context) -> None: """View the top posts from various subreddits.""" - await ctx.invoke(self.bot.get_command("help"), "reddit") + await ctx.send_help(ctx.command) @reddit_group.command(name="top") async def top_command(self, ctx: Context, subreddit: Subreddit = "r/Python") -> None: @@ -271,7 +283,7 @@ class Reddit(Cog): await ctx.send(content=f"Here are this week's top {subreddit} posts!", embed=embed) - @with_role(*STAFF_ROLES) + @has_any_role(*STAFF_ROLES) @reddit_group.command(name="subreddits", aliases=("subs",)) async def subreddits_command(self, ctx: Context) -> None: """Send a paginated embed of all the subreddits we're relaying.""" @@ -290,4 +302,7 @@ class Reddit(Cog): def setup(bot: Bot) -> None: """Load the Reddit cog.""" + if not RedditConfig.secret or not RedditConfig.client_id: + log.error("Credentials not provided, cog not loaded.") + return bot.add_cog(Reddit(bot)) diff --git a/bot/cogs/site.py b/bot/exts/info/site.py index 853e29568..fb5b99086 100644 --- a/bot/cogs/site.py +++ b/bot/exts/info/site.py @@ -1,7 +1,7 @@ import logging from discord import Colour, Embed -from discord.ext.commands import Cog, Context, group +from discord.ext.commands import Cog, Context, Greedy, group from bot.bot import Bot from bot.constants import URLs @@ -21,9 +21,9 @@ class Site(Cog): @group(name="site", aliases=("s",), invoke_without_command=True) async def site_group(self, ctx: Context) -> None: """Commands for getting info about our website.""" - await ctx.invoke(self.bot.get_command("help"), "site") + await ctx.send_help(ctx.command) - @site_group.command(name="home", aliases=("about",)) + @site_group.command(name="home", aliases=("about",), root_aliases=("home",)) async def site_main(self, ctx: Context) -> None: """Info about the website itself.""" url = f"{URLs.site_schema}{URLs.site}/" @@ -33,14 +33,14 @@ class Site(Cog): embed.colour = Colour.blurple() embed.description = ( f"[Our official website]({url}) is an open-source community project " - "created with Python and Flask. It contains information about the server " + "created with Python and Django. It contains information about the server " "itself, lets you sign up for upcoming events, has its own wiki, contains " "a list of valuable learning resources, and much more." ) await ctx.send(embed=embed) - @site_group.command(name="resources") + @site_group.command(name="resources", root_aliases=("resources", "resource")) async def site_resources(self, ctx: Context) -> None: """Info about the site's Resources page.""" learning_url = f"{PAGES_URL}/resources" @@ -56,7 +56,7 @@ class Site(Cog): await ctx.send(embed=embed) - @site_group.command(name="tools") + @site_group.command(name="tools", root_aliases=("tools",)) async def site_tools(self, ctx: Context) -> None: """Info about the site's Tools page.""" tools_url = f"{PAGES_URL}/resources/tools" @@ -87,7 +87,7 @@ class Site(Cog): await ctx.send(embed=embed) - @site_group.command(name="faq") + @site_group.command(name="faq", root_aliases=("faq",)) async def site_faq(self, ctx: Context) -> None: """Info about the site's FAQ page.""" url = f"{PAGES_URL}/frequently-asked-questions" @@ -104,11 +104,10 @@ class Site(Cog): await ctx.send(embed=embed) - @site_group.command(aliases=['r', 'rule'], name='rules') - async def site_rules(self, ctx: Context, *rules: int) -> None: + @site_group.command(name="rules", aliases=("r", "rule"), root_aliases=("rules", "rule")) + async def site_rules(self, ctx: Context, rules: Greedy[int]) -> None: """Provides a link to all rules or, if specified, displays specific rule(s).""" - rules_embed = Embed(title='Rules', color=Colour.blurple()) - rules_embed.url = f"{PAGES_URL}/rules" + rules_embed = Embed(title='Rules', color=Colour.blurple(), url=f'{PAGES_URL}/rules') if not rules: # Rules were not submitted. Return the default description. @@ -122,17 +121,18 @@ class Site(Cog): return full_rules = await self.bot.api_client.get('rules', params={'link_format': 'md'}) - invalid_indices = tuple( - pick - for pick in rules - if pick < 1 or pick > len(full_rules) - ) - if invalid_indices: - indices = ', '.join(map(str, invalid_indices)) - await ctx.send(f":x: Invalid rule indices: {indices}") + # Remove duplicates and sort the rule indices + rules = sorted(set(rules)) + invalid = ', '.join(str(index) for index in rules if index < 1 or index > len(full_rules)) + + if invalid: + await ctx.send(f":x: Invalid rule indices: {invalid}") return + for rule in rules: + self.bot.stats.incr(f"rule_uses.{rule}") + final_rules = tuple(f"**{pick}.** {full_rules[pick - 1]}" for pick in rules) await LinePaginator.paginate(final_rules, ctx, rules_embed, max_lines=3) diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py new file mode 100644 index 000000000..7b41352d4 --- /dev/null +++ b/bot/exts/info/source.py @@ -0,0 +1,131 @@ +import inspect +from pathlib import Path +from typing import Optional, Tuple, Union + +from discord import Embed, utils +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import URLs + +SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, str, commands.ExtensionNotLoaded] + + +class SourceConverter(commands.Converter): + """Convert an argument into a help command, tag, command, or cog.""" + + async def convert(self, ctx: commands.Context, argument: str) -> SourceType: + """Convert argument into source object.""" + if argument.lower().startswith("help"): + return ctx.bot.help_command + + cog = ctx.bot.get_cog(argument) + if cog: + return cog + + cmd = ctx.bot.get_command(argument) + if cmd: + return cmd + + tags_cog = ctx.bot.get_cog("Tags") + show_tag = True + + if not tags_cog: + show_tag = False + elif argument.lower() in tags_cog._cache: + return argument.lower() + + escaped_arg = utils.escape_markdown(argument) + + raise commands.BadArgument( + f"Unable to convert '{escaped_arg}' to valid command{', tag,' if show_tag else ''} or Cog." + ) + + +class BotSource(commands.Cog): + """Displays information about the bot's source code.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @commands.command(name="source", aliases=("src",)) + async def source_command(self, ctx: commands.Context, *, source_item: SourceConverter = None) -> None: + """Display information and a GitHub link to the source code of a command, tag, or cog.""" + if not source_item: + embed = Embed(title="Bot's GitHub Repository") + embed.add_field(name="Repository", value=f"[Go to GitHub]({URLs.github_bot_repo})") + embed.set_thumbnail(url="https://avatars1.githubusercontent.com/u/9919") + await ctx.send(embed=embed) + return + + embed = await self.build_embed(source_item) + await ctx.send(embed=embed) + + def get_source_link(self, source_item: SourceType) -> Tuple[str, str, Optional[int]]: + """ + Build GitHub link of source item, return this link, file location and first line number. + + Raise BadArgument if `source_item` is a dynamically-created object (e.g. via internal eval). + """ + if isinstance(source_item, commands.Command): + src = source_item.callback.__code__ + filename = src.co_filename + elif isinstance(source_item, str): + tags_cog = self.bot.get_cog("Tags") + filename = tags_cog._cache[source_item]["location"] + else: + src = type(source_item) + try: + filename = inspect.getsourcefile(src) + except TypeError: + raise commands.BadArgument("Cannot get source for a dynamically-created object.") + + if not isinstance(source_item, str): + try: + lines, first_line_no = inspect.getsourcelines(src) + except OSError: + raise commands.BadArgument("Cannot get source for a dynamically-created object.") + + lines_extension = f"#L{first_line_no}-L{first_line_no+len(lines)-1}" + else: + first_line_no = None + lines_extension = "" + + # Handle tag file location differently than others to avoid errors in some cases + if not first_line_no: + file_location = Path(filename).relative_to("/bot/") + else: + file_location = Path(filename).relative_to(Path.cwd()).as_posix() + + url = f"{URLs.github_bot_repo}/blob/master/{file_location}{lines_extension}" + + return url, file_location, first_line_no or None + + async def build_embed(self, source_object: SourceType) -> Optional[Embed]: + """Build embed based on source object.""" + url, location, first_line = self.get_source_link(source_object) + + if isinstance(source_object, commands.HelpCommand): + title = "Help Command" + description = source_object.__doc__.splitlines()[1] + elif isinstance(source_object, commands.Command): + description = source_object.short_doc + title = f"Command: {source_object.qualified_name}" + elif isinstance(source_object, str): + title = f"Tag: {source_object}" + description = "" + else: + title = f"Cog: {source_object.qualified_name}" + description = source_object.description.splitlines()[0] + + embed = Embed(title=title, description=description) + embed.add_field(name="Source Code", value=f"[Go to GitHub]({url})") + line_text = f":{first_line}" if first_line else "" + embed.set_footer(text=f"{location}{line_text}") + + return embed + + +def setup(bot: Bot) -> None: + """Load the BotSource cog.""" + bot.add_cog(BotSource(bot)) diff --git a/bot/exts/info/stats.py b/bot/exts/info/stats.py new file mode 100644 index 000000000..4d8bb645e --- /dev/null +++ b/bot/exts/info/stats.py @@ -0,0 +1,95 @@ +import string + +from discord import Member, Message +from discord.ext.commands import Cog, Context +from discord.ext.tasks import loop + +from bot.bot import Bot +from bot.constants import Categories, Channels, Guild +from bot.utils.channel import is_in_category + +CHANNEL_NAME_OVERRIDES = { + Channels.off_topic_0: "off_topic_0", + Channels.off_topic_1: "off_topic_1", + Channels.off_topic_2: "off_topic_2", + Channels.staff_lounge: "staff_lounge" +} + +ALLOWED_CHARS = string.ascii_letters + string.digits + "_" + + +class Stats(Cog): + """A cog which provides a way to hook onto Discord events and forward to stats.""" + + def __init__(self, bot: Bot): + self.bot = bot + self.last_presence_update = None + self.update_guild_boost.start() + + @Cog.listener() + async def on_message(self, message: Message) -> None: + """Report message events in the server to statsd.""" + if message.guild is None: + return + + if message.guild.id != Guild.id: + return + + if is_in_category(message.channel, Categories.modmail): + if message.channel.id != Channels.incidents: + # Do not report modmail channels to stats, there are too many + # of them for interesting statistics to be drawn out of this. + return + + reformatted_name = message.channel.name.replace('-', '_') + + if CHANNEL_NAME_OVERRIDES.get(message.channel.id): + reformatted_name = CHANNEL_NAME_OVERRIDES.get(message.channel.id) + + reformatted_name = "".join(char for char in reformatted_name if char in ALLOWED_CHARS) + + stat_name = f"channels.{reformatted_name}" + self.bot.stats.incr(stat_name) + + # Increment the total message count + self.bot.stats.incr("messages") + + @Cog.listener() + async def on_command_completion(self, ctx: Context) -> None: + """Report completed commands to statsd.""" + command_name = ctx.command.qualified_name.replace(" ", "_") + + self.bot.stats.incr(f"commands.{command_name}") + + @Cog.listener() + async def on_member_join(self, member: Member) -> None: + """Update member count stat on member join.""" + if member.guild.id != Guild.id: + return + + self.bot.stats.gauge("guild.total_members", len(member.guild.members)) + + @Cog.listener() + async def on_member_leave(self, member: Member) -> None: + """Update member count stat on member leave.""" + if member.guild.id != Guild.id: + return + + self.bot.stats.gauge("guild.total_members", len(member.guild.members)) + + @loop(hours=1) + async def update_guild_boost(self) -> None: + """Post the server boost level and tier every hour.""" + await self.bot.wait_until_guild_available() + g = self.bot.get_guild(Guild.id) + self.bot.stats.gauge("boost.amount", g.premium_subscription_count) + self.bot.stats.gauge("boost.tier", g.premium_tier) + + def cog_unload(self) -> None: + """Stop the boost statistic task on unload of the Cog.""" + self.update_guild_boost.stop() + + +def setup(bot: Bot) -> None: + """Load the stats cog.""" + bot.add_cog(Stats(bot)) diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py new file mode 100644 index 000000000..e3b9d6d5b --- /dev/null +++ b/bot/exts/info/tags.py @@ -0,0 +1,291 @@ +import logging +import re +import time +from pathlib import Path +from typing import Callable, Dict, Iterable, List, Optional + +from discord import Colour, Embed, Member +from discord.ext.commands import Cog, Context, group + +from bot import constants +from bot.bot import Bot +from bot.converters import TagNameConverter +from bot.pagination import LinePaginator +from bot.utils.messages import wait_for_deletion + +log = logging.getLogger(__name__) + +TEST_CHANNELS = ( + constants.Channels.bot_commands, + constants.Channels.helpers +) + +REGEX_NON_ALPHABET = re.compile(r"[^a-z]", re.MULTILINE & re.IGNORECASE) +FOOTER_TEXT = f"To show a tag, type {constants.Bot.prefix}tags <tagname>." + + +class Tags(Cog): + """Save new tags and fetch existing tags.""" + + def __init__(self, bot: Bot): + self.bot = bot + self.tag_cooldowns = {} + self._cache = self.get_tags() + + @staticmethod + def get_tags() -> dict: + """Get all tags.""" + cache = {} + + base_path = Path("bot", "resources", "tags") + for file in base_path.glob("**/*"): + if file.is_file(): + tag_title = file.stem + tag = { + "title": tag_title, + "embed": { + "description": file.read_text(encoding="utf8"), + }, + "restricted_to": "developers", + "location": f"/bot/{file}" + } + + # Convert to a list to allow negative indexing. + parents = list(file.relative_to(base_path).parents) + if len(parents) > 1: + # -1 would be '.' hence -2 is used as the index. + tag["restricted_to"] = parents[-2].name + + cache[tag_title] = tag + + return cache + + @staticmethod + def check_accessibility(user: Member, tag: dict) -> bool: + """Check if user can access a tag.""" + return tag["restricted_to"].lower() in [role.name.lower() for role in user.roles] + + @staticmethod + def _fuzzy_search(search: str, target: str) -> float: + """A simple scoring algorithm based on how many letters are found / total, with order in mind.""" + current, index = 0, 0 + _search = REGEX_NON_ALPHABET.sub('', search.lower()) + _targets = iter(REGEX_NON_ALPHABET.split(target.lower())) + _target = next(_targets) + try: + while True: + while index < len(_target) and _search[current] == _target[index]: + current += 1 + index += 1 + index, _target = 0, next(_targets) + except (StopIteration, IndexError): + pass + return current / len(_search) * 100 + + def _get_suggestions(self, tag_name: str, thresholds: Optional[List[int]] = None) -> List[str]: + """Return a list of suggested tags.""" + scores: Dict[str, int] = { + tag_title: Tags._fuzzy_search(tag_name, tag['title']) + for tag_title, tag in self._cache.items() + } + + thresholds = thresholds or [100, 90, 80, 70, 60] + + for threshold in thresholds: + suggestions = [ + self._cache[tag_title] + for tag_title, matching_score in scores.items() + if matching_score >= threshold + ] + if suggestions: + return suggestions + + return [] + + def _get_tag(self, tag_name: str) -> list: + """Get a specific tag.""" + found = [self._cache.get(tag_name.lower(), None)] + if not found[0]: + return self._get_suggestions(tag_name) + return found + + def _get_tags_via_content(self, check: Callable[[Iterable], bool], keywords: str, user: Member) -> list: + """ + Search for tags via contents. + + `predicate` will be the built-in any, all, or a custom callable. Must return a bool. + """ + keywords_processed: List[str] = [] + for keyword in keywords.split(','): + keyword_sanitized = keyword.strip().casefold() + if not keyword_sanitized: + # this happens when there are leading / trailing / consecutive comma. + continue + keywords_processed.append(keyword_sanitized) + + if not keywords_processed: + # after sanitizing, we can end up with an empty list, for example when keywords is ',' + # in that case, we simply want to search for such keywords directly instead. + keywords_processed = [keywords] + + matching_tags = [] + for tag in self._cache.values(): + matches = (query in tag['embed']['description'].casefold() for query in keywords_processed) + if self.check_accessibility(user, tag) and check(matches): + matching_tags.append(tag) + + return matching_tags + + async def _send_matching_tags(self, ctx: Context, keywords: str, matching_tags: list) -> None: + """Send the result of matching tags to user.""" + if not matching_tags: + pass + elif len(matching_tags) == 1: + await ctx.send(embed=Embed().from_dict(matching_tags[0]['embed'])) + else: + is_plural = keywords.strip().count(' ') > 0 or keywords.strip().count(',') > 0 + embed = Embed( + title=f"Here are the tags containing the given keyword{'s' * is_plural}:", + description='\n'.join(tag['title'] for tag in matching_tags[:10]) + ) + await LinePaginator.paginate( + sorted(f"**»** {tag['title']}" for tag in matching_tags), + ctx, + embed, + footer_text=FOOTER_TEXT, + empty=False, + max_lines=15 + ) + + @group(name='tags', aliases=('tag', 't'), invoke_without_command=True) + async def tags_group(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None: + """Show all known tags, a single tag, or run a subcommand.""" + await self.get_command(ctx, tag_name=tag_name) + + @tags_group.group(name='search', invoke_without_command=True) + async def search_tag_content(self, ctx: Context, *, keywords: str) -> None: + """ + Search inside tags' contents for tags. Allow searching for multiple keywords separated by comma. + + Only search for tags that has ALL the keywords. + """ + matching_tags = self._get_tags_via_content(all, keywords, ctx.author) + await self._send_matching_tags(ctx, keywords, matching_tags) + + @search_tag_content.command(name='any') + async def search_tag_content_any_keyword(self, ctx: Context, *, keywords: Optional[str] = 'any') -> None: + """ + Search inside tags' contents for tags. Allow searching for multiple keywords separated by comma. + + Search for tags that has ANY of the keywords. + """ + matching_tags = self._get_tags_via_content(any, keywords or 'any', ctx.author) + await self._send_matching_tags(ctx, keywords, matching_tags) + + async def display_tag(self, ctx: Context, tag_name: str = None) -> bool: + """ + If a tag is not found, display similar tag names as suggestions. + + If a tag is not specified, display a paginated embed of all tags. + + Tags are on cooldowns on a per-tag, per-channel basis. If a tag is on cooldown, display + nothing and return False. + """ + def _command_on_cooldown(tag_name: str) -> bool: + """ + Check if the command is currently on cooldown, on a per-tag, per-channel basis. + + The cooldown duration is set in constants.py. + """ + now = time.time() + + cooldown_conditions = ( + tag_name + and tag_name in self.tag_cooldowns + and (now - self.tag_cooldowns[tag_name]["time"]) < constants.Cooldowns.tags + and self.tag_cooldowns[tag_name]["channel"] == ctx.channel.id + ) + + if cooldown_conditions: + return True + return False + + if _command_on_cooldown(tag_name): + time_elapsed = time.time() - self.tag_cooldowns[tag_name]["time"] + time_left = constants.Cooldowns.tags - time_elapsed + log.info( + f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. " + f"Cooldown ends in {time_left:.1f} seconds." + ) + return False + + if tag_name is not None: + temp_founds = self._get_tag(tag_name) + + founds = [] + + for found_tag in temp_founds: + if self.check_accessibility(ctx.author, found_tag): + founds.append(found_tag) + + if len(founds) == 1: + tag = founds[0] + if ctx.channel.id not in TEST_CHANNELS: + self.tag_cooldowns[tag_name] = { + "time": time.time(), + "channel": ctx.channel.id + } + + self.bot.stats.incr(f"tags.usages.{tag['title'].replace('-', '_')}") + + await wait_for_deletion( + await ctx.send(embed=Embed.from_dict(tag['embed'])), + [ctx.author.id], + ) + return True + elif founds and len(tag_name) >= 3: + await wait_for_deletion( + await ctx.send( + embed=Embed( + title='Did you mean ...', + description='\n'.join(tag['title'] for tag in founds[:10]) + ) + ), + [ctx.author.id], + ) + return True + + else: + tags = self._cache.values() + if not tags: + await ctx.send(embed=Embed( + description="**There are no tags in the database!**", + colour=Colour.red() + )) + return True + else: + embed: Embed = Embed(title="**Current tags**") + await LinePaginator.paginate( + sorted( + f"**»** {tag['title']}" for tag in tags + if self.check_accessibility(ctx.author, tag) + ), + ctx, + embed, + footer_text=FOOTER_TEXT, + empty=False, + max_lines=15 + ) + return True + + return False + + @tags_group.command(name='get', aliases=('show', 'g')) + async def get_command(self, ctx: Context, *, tag_name: TagNameConverter = None) -> None: + """Get a specified tag, or a list of all tags if no tag is specified.""" + await self.display_tag(ctx, tag_name) + + +def setup(bot: Bot) -> None: + """Load the Tags cog.""" + bot.add_cog(Tags(bot)) diff --git a/bot/exts/moderation/__init__.py b/bot/exts/moderation/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/moderation/__init__.py diff --git a/bot/cogs/defcon.py b/bot/exts/moderation/defcon.py index a0d8fedd5..caa6fb917 100644 --- a/bot/cogs/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -6,12 +6,12 @@ from datetime import datetime, timedelta from enum import Enum from discord import Colour, Embed, Member -from discord.ext.commands import Cog, Context, group +from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot -from bot.cogs.moderation import ModLog -from bot.constants import Channels, Colours, Emojis, Event, Icons, Roles -from bot.decorators import with_role +from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles +from bot.exts.moderation.modlog import ModLog +from bot.utils.messages import format_user log = logging.getLogger(__name__) @@ -59,7 +59,7 @@ class Defcon(Cog): async def sync_settings(self) -> None: """On cog load, try to synchronize DEFCON settings to the API.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() self.channel = await self.bot.fetch_channel(Channels.defcon) try: @@ -68,8 +68,8 @@ class Defcon(Cog): except Exception: # Yikes! log.exception("Unable to get DEFCON settings!") - await self.bot.get_channel(Channels.devlog).send( - f"<@&{Roles.admin}> **WARNING**: Unable to get DEFCON settings!" + await self.bot.get_channel(Channels.dev_log).send( + f"<@&{Roles.admins}> **WARNING**: Unable to get DEFCON settings!" ) else: @@ -81,7 +81,7 @@ class Defcon(Cog): else: self.enabled = False self.days = timedelta(days=0) - log.info(f"DEFCON disabled") + log.info("DEFCON disabled") await self.update_channel_topic() @@ -104,9 +104,10 @@ class Defcon(Cog): log.exception(f"Unable to send rejection message to user: {member}") await member.kick(reason="DEFCON active, user is too new") + self.bot.stats.incr("defcon.leaves") message = ( - f"{member} (`{member.id}`) was denied entry because their account is too new." + f"{format_user(member)} was denied entry because their account is too new." ) if not message_sent: @@ -118,13 +119,26 @@ class Defcon(Cog): ) @group(name='defcon', aliases=('dc',), invoke_without_command=True) - @with_role(Roles.admin, Roles.owner) + @has_any_role(*MODERATION_ROLES) async def defcon_group(self, ctx: Context) -> None: """Check the DEFCON status or run a subcommand.""" - await ctx.invoke(self.bot.get_command("help"), "defcon") + await ctx.send_help(ctx.command) async def _defcon_action(self, ctx: Context, days: int, action: Action) -> None: """Providing a structured way to do an defcon action.""" + try: + response = await self.bot.api_client.get('bot/bot-settings/defcon') + data = response['data'] + + if "enable_date" in data and action is Action.DISABLED: + enabled = datetime.fromisoformat(data["enable_date"]) + + delta = datetime.now() - enabled + + self.bot.stats.timing("defcon.enabled", delta) + except Exception: + pass + error = None try: await self.bot.api_client.put( @@ -135,6 +149,7 @@ class Defcon(Cog): # TODO: retrieve old days count 'days': days, 'enabled': action is not Action.DISABLED, + 'enable_date': datetime.now().isoformat() } } ) @@ -145,8 +160,10 @@ class Defcon(Cog): await ctx.send(self.build_defcon_msg(action, error)) await self.send_defcon_log(action, ctx.author, error) - @defcon_group.command(name='enable', aliases=('on', 'e')) - @with_role(Roles.admin, Roles.owner) + self.bot.stats.gauge("defcon.threshold", days) + + @defcon_group.command(name='enable', aliases=('on', 'e'), root_aliases=("defon",)) + @has_any_role(*MODERATION_ROLES) async def enable_command(self, ctx: Context) -> None: """ Enable DEFCON mode. Useful in a pinch, but be sure you know what you're doing! @@ -158,8 +175,8 @@ class Defcon(Cog): await self._defcon_action(ctx, days=0, action=Action.ENABLED) await self.update_channel_topic() - @defcon_group.command(name='disable', aliases=('off', 'd')) - @with_role(Roles.admin, Roles.owner) + @defcon_group.command(name='disable', aliases=('off', 'd'), root_aliases=("defoff",)) + @has_any_role(*MODERATION_ROLES) async def disable_command(self, ctx: Context) -> None: """Disable DEFCON mode. Useful in a pinch, but be sure you know what you're doing!""" self.enabled = False @@ -167,7 +184,7 @@ class Defcon(Cog): await self.update_channel_topic() @defcon_group.command(name='status', aliases=('s',)) - @with_role(Roles.admin, Roles.owner) + @has_any_role(*MODERATION_ROLES) async def status_command(self, ctx: Context) -> None: """Check the current status of DEFCON mode.""" embed = Embed( @@ -179,7 +196,7 @@ class Defcon(Cog): await ctx.send(embed=embed) @defcon_group.command(name='days') - @with_role(Roles.admin, Roles.owner) + @has_any_role(*MODERATION_ROLES) async def days_command(self, ctx: Context, days: int) -> None: """Set how old an account must be to join the server, in days, with DEFCON mode enabled.""" self.days = timedelta(days=days) diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py new file mode 100644 index 000000000..4d5142b55 --- /dev/null +++ b/bot/exts/moderation/dm_relay.py @@ -0,0 +1,128 @@ +import logging +from typing import Optional + +import discord +from async_rediscache import RedisCache +from discord import Color +from discord.ext import commands +from discord.ext.commands import Cog + +from bot import constants +from bot.bot import Bot +from bot.converters import UserMentionOrID +from bot.utils.checks import in_whitelist_check +from bot.utils.messages import send_attachments +from bot.utils.webhooks import send_webhook + +log = logging.getLogger(__name__) + + +class DMRelay(Cog): + """Relay direct messages to and from the bot.""" + + # RedisCache[str, t.Union[discord.User.id, discord.Member.id]] + dm_cache = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self.webhook_id = constants.Webhooks.dm_log + self.webhook = None + self.bot.loop.create_task(self.fetch_webhook()) + + @commands.command(aliases=("reply",)) + async def send_dm(self, ctx: commands.Context, member: Optional[UserMentionOrID], *, message: str) -> None: + """ + Allows you to send a DM to a user from the bot. + + If `member` is not provided, it will send to the last user who DM'd the bot. + + This feature should be used extremely sparingly. Use ModMail if you need to have a serious + conversation with a user. This is just for responding to extraordinary DMs, having a little + fun with users, and telling people they are DMing the wrong bot. + + NOTE: This feature will be removed if it is overused. + """ + if not member: + user_id = await self.dm_cache.get("last_user") + member = ctx.guild.get_member(user_id) if user_id else None + + # If we still don't have a Member at this point, give up + if not member: + log.debug("This bot has never gotten a DM, or the RedisCache has been cleared.") + await ctx.message.add_reaction("❌") + return + + try: + await member.send(message) + except discord.errors.Forbidden: + log.debug("User has disabled DMs.") + await ctx.message.add_reaction("❌") + else: + await ctx.message.add_reaction("✅") + self.bot.stats.incr("dm_relay.dm_sent") + + async def fetch_webhook(self) -> None: + """Fetches the webhook object, so we can post to it.""" + await self.bot.wait_until_guild_available() + + try: + self.webhook = await self.bot.fetch_webhook(self.webhook_id) + except discord.HTTPException: + log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`") + + @Cog.listener() + async def on_message(self, message: discord.Message) -> None: + """Relays the message's content and attachments to the dm_log channel.""" + # Only relay DMs from humans + if message.author.bot or message.guild or self.webhook is None: + return + + if message.clean_content: + await send_webhook( + webhook=self.webhook, + content=message.clean_content, + username=f"{message.author.display_name} ({message.author.id})", + avatar_url=message.author.avatar_url + ) + await self.dm_cache.set("last_user", message.author.id) + self.bot.stats.incr("dm_relay.dm_received") + + # Handle any attachments + if message.attachments: + try: + await send_attachments( + message, + self.webhook, + username=f"{message.author.display_name} ({message.author.id})" + ) + except (discord.errors.Forbidden, discord.errors.NotFound): + e = discord.Embed( + description=":x: **This message contained an attachment, but it could not be retrieved**", + color=Color.red() + ) + await send_webhook( + webhook=self.webhook, + embed=e, + username=f"{message.author.display_name} ({message.author.id})", + avatar_url=message.author.avatar_url + ) + except discord.HTTPException: + log.exception("Failed to send an attachment to the webhook") + + async def cog_check(self, ctx: commands.Context) -> bool: + """Only allow moderators to invoke the commands in this cog.""" + checks = [ + await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx), + in_whitelist_check( + ctx, + channels=[constants.Channels.dm_log], + redirect=None, + fail_silently=True, + ) + ] + return all(checks) + + +def setup(bot: Bot) -> None: + """Load the DMRelay cog.""" + bot.add_cog(DMRelay(bot)) diff --git a/bot/exts/moderation/incidents.py b/bot/exts/moderation/incidents.py new file mode 100644 index 000000000..0e479d33f --- /dev/null +++ b/bot/exts/moderation/incidents.py @@ -0,0 +1,412 @@ +import asyncio +import logging +import typing as t +from datetime import datetime +from enum import Enum + +import discord +from discord.ext.commands import Cog + +from bot.bot import Bot +from bot.constants import Channels, Colours, Emojis, Guild, Webhooks +from bot.utils.messages import sub_clyde + +log = logging.getLogger(__name__) + +# Amount of messages for `crawl_task` to process at most on start-up - limited to 50 +# as in practice, there should never be this many messages, and if there are, +# something has likely gone very wrong +CRAWL_LIMIT = 50 + +# Seconds for `crawl_task` to sleep after adding reactions to a message +CRAWL_SLEEP = 2 + + +class Signal(Enum): + """ + Recognized incident status signals. + + This binds emoji to actions. The bot will only react to emoji linked here. + All other signals are seen as invalid. + """ + + ACTIONED = Emojis.incident_actioned + NOT_ACTIONED = Emojis.incident_unactioned + INVESTIGATING = Emojis.incident_investigating + + +# Reactions from non-mod roles will be removed +ALLOWED_ROLES: t.Set[int] = set(Guild.moderation_roles) + +# Message must have all of these emoji to pass the `has_signals` check +ALL_SIGNALS: t.Set[str] = {signal.value for signal in Signal} + +# An embed coupled with an optional file to be dispatched +# If the file is not None, the embed attempts to show it in its body +FileEmbed = t.Tuple[discord.Embed, t.Optional[discord.File]] + + +async def download_file(attachment: discord.Attachment) -> t.Optional[discord.File]: + """ + Download & return `attachment` file. + + If the download fails, the reason is logged and None will be returned. + 404 and 403 errors are only logged at debug level. + """ + log.debug(f"Attempting to download attachment: {attachment.filename}") + try: + return await attachment.to_file() + except (discord.NotFound, discord.Forbidden) as exc: + log.debug(f"Failed to download attachment: {exc}") + except Exception: + log.exception("Failed to download attachment") + + +async def make_embed(incident: discord.Message, outcome: Signal, actioned_by: discord.Member) -> FileEmbed: + """ + Create an embed representation of `incident` for the #incidents-archive channel. + + The name & discriminator of `actioned_by` and `outcome` will be presented in the + embed footer. Additionally, the embed is coloured based on `outcome`. + + The author of `incident` is not shown in the embed. It is assumed that this piece + of information will be relayed in other ways, e.g. webhook username. + + As mentions in embeds do not ping, we do not need to use `incident.clean_content`. + + If `incident` contains attachments, the first attachment will be downloaded and + returned alongside the embed. The embed attempts to display the attachment. + Should the download fail, we fallback on linking the `proxy_url`, which should + remain functional for some time after the original message is deleted. + """ + log.trace(f"Creating embed for {incident.id=}") + + if outcome is Signal.ACTIONED: + colour = Colours.soft_green + footer = f"Actioned by {actioned_by}" + else: + colour = Colours.soft_red + footer = f"Rejected by {actioned_by}" + + embed = discord.Embed( + description=incident.content, + timestamp=datetime.utcnow(), + colour=colour, + ) + embed.set_footer(text=footer, icon_url=actioned_by.avatar_url) + + if incident.attachments: + attachment = incident.attachments[0] # User-sent messages can only contain one attachment + file = await download_file(attachment) + + if file is not None: + embed.set_image(url=f"attachment://{attachment.filename}") # Embed displays the attached file + else: + embed.set_author(name="[Failed to relay attachment]", url=attachment.proxy_url) # Embed links the file + else: + file = None + + return embed, file + + +def is_incident(message: discord.Message) -> bool: + """True if `message` qualifies as an incident, False otherwise.""" + conditions = ( + message.channel.id == Channels.incidents, # Message sent in #incidents + not message.author.bot, # Not by a bot + not message.content.startswith("#"), # Doesn't start with a hash + not message.pinned, # And isn't header + ) + return all(conditions) + + +def own_reactions(message: discord.Message) -> t.Set[str]: + """Get the set of reactions placed on `message` by the bot itself.""" + return {str(reaction.emoji) for reaction in message.reactions if reaction.me} + + +def has_signals(message: discord.Message) -> bool: + """True if `message` already has all `Signal` reactions, False otherwise.""" + return ALL_SIGNALS.issubset(own_reactions(message)) + + +async def add_signals(incident: discord.Message) -> None: + """ + Add `Signal` member emoji to `incident` as reactions. + + If the emoji has already been placed on `incident` by the bot, it will be skipped. + """ + existing_reacts = own_reactions(incident) + + for signal_emoji in Signal: + if signal_emoji.value in existing_reacts: # This would not raise, but it is a superfluous API call + log.trace(f"Skipping emoji as it's already been placed: {signal_emoji}") + else: + log.trace(f"Adding reaction: {signal_emoji}") + await incident.add_reaction(signal_emoji.value) + + +class Incidents(Cog): + """ + Automation for the #incidents channel. + + This cog does not provide a command API, it only reacts to the following events. + + On start-up: + * Crawl #incidents and add missing `Signal` emoji where appropriate + * This is to retro-actively add the available options for messages which + were sent while the bot wasn't listening + * Pinned messages and message starting with # do not qualify as incidents + * See: `crawl_incidents` + + On message: + * Add `Signal` member emoji if message qualifies as an incident + * Ignore messages starting with # + * Use this if verbal communication is necessary + * Each such message must be deleted manually once appropriate + * See: `on_message` + + On reaction: + * Remove reaction if not permitted + * User does not have any of the roles in `ALLOWED_ROLES` + * Used emoji is not a `Signal` member + * If `Signal.ACTIONED` or `Signal.NOT_ACTIONED` were chosen, attempt to + relay the incident message to #incidents-archive + * If relay successful, delete original message + * See: `on_raw_reaction_add` + + Please refer to function docstrings for implementation details. + """ + + def __init__(self, bot: Bot) -> None: + """Prepare `event_lock` and schedule `crawl_task` on start-up.""" + self.bot = bot + + self.event_lock = asyncio.Lock() + self.crawl_task = self.bot.loop.create_task(self.crawl_incidents()) + + async def crawl_incidents(self) -> None: + """ + Crawl #incidents and add missing emoji where necessary. + + This is to catch-up should an incident be reported while the bot wasn't listening. + After adding each reaction, we take a short break to avoid drowning in ratelimits. + + Once this task is scheduled, listeners that change messages should await it. + The crawl assumes that the channel history doesn't change as we go over it. + + Behaviour is configured by: `CRAWL_LIMIT`, `CRAWL_SLEEP`. + """ + await self.bot.wait_until_guild_available() + incidents: discord.TextChannel = self.bot.get_channel(Channels.incidents) + + log.debug(f"Crawling messages in #incidents: {CRAWL_LIMIT=}, {CRAWL_SLEEP=}") + async for message in incidents.history(limit=CRAWL_LIMIT): + + if not is_incident(message): + log.trace(f"Skipping message {message.id}: not an incident") + continue + + if has_signals(message): + log.trace(f"Skipping message {message.id}: already has all signals") + continue + + await add_signals(message) + await asyncio.sleep(CRAWL_SLEEP) + + log.debug("Crawl task finished!") + + async def archive(self, incident: discord.Message, outcome: Signal, actioned_by: discord.Member) -> bool: + """ + Relay an embed representation of `incident` to the #incidents-archive channel. + + The following pieces of information are relayed: + * Incident message content (as embed description) + * Incident attachment (if image, shown in archive embed) + * Incident author name (as webhook author) + * Incident author avatar (as webhook avatar) + * Resolution signal `outcome` (as embed colour & footer) + * Moderator `actioned_by` (name & discriminator shown in footer) + + If `incident` contains an attachment, we try to add it to the archive embed. There is + no handing of extensions / file types - we simply dispatch the attachment file with the + webhook, and try to display it in the embed. Testing indicates that if the attachment + cannot be displayed (e.g. a text file), it's invisible in the embed, with no error. + + Return True if the relay finishes successfully. If anything goes wrong, meaning + not all information was relayed, return False. This signals that the original + message is not safe to be deleted, as we will lose some information. + """ + log.info(f"Archiving incident: {incident.id} (outcome: {outcome}, actioned by: {actioned_by})") + embed, attachment_file = await make_embed(incident, outcome, actioned_by) + + try: + webhook = await self.bot.fetch_webhook(Webhooks.incidents_archive) + await webhook.send( + embed=embed, + username=sub_clyde(incident.author.name), + avatar_url=incident.author.avatar_url, + file=attachment_file, + ) + except Exception: + log.exception(f"Failed to archive incident {incident.id} to #incidents-archive") + return False + else: + log.trace("Message archived successfully!") + return True + + def make_confirmation_task(self, incident: discord.Message, timeout: int = 5) -> asyncio.Task: + """ + Create a task to wait `timeout` seconds for `incident` to be deleted. + + If `timeout` passes, this will raise `asyncio.TimeoutError`, signaling that we haven't + been able to confirm that the message was deleted. + """ + log.trace(f"Confirmation task will wait {timeout=} seconds for {incident.id=} to be deleted") + + def check(payload: discord.RawReactionActionEvent) -> bool: + return payload.message_id == incident.id + + coroutine = self.bot.wait_for(event="raw_message_delete", check=check, timeout=timeout) + return self.bot.loop.create_task(coroutine) + + async def process_event(self, reaction: str, incident: discord.Message, member: discord.Member) -> None: + """ + Process a `reaction_add` event in #incidents. + + First, we check that the reaction is a recognized `Signal` member, and that it was sent by + a permitted user (at least one role in `ALLOWED_ROLES`). If not, the reaction is removed. + + If the reaction was either `Signal.ACTIONED` or `Signal.NOT_ACTIONED`, we attempt to relay + the report to #incidents-archive. If successful, the original message is deleted. + + We do not release `event_lock` until we receive the corresponding `message_delete` event. + This ensures that if there is a racing event awaiting the lock, it will fail to find the + message, and will abort. There is a `timeout` to ensure that this doesn't hold the lock + forever should something go wrong. + """ + members_roles: t.Set[int] = {role.id for role in member.roles} + if not members_roles & ALLOWED_ROLES: # Intersection is truthy on at least 1 common element + log.debug(f"Removing invalid reaction: user {member} is not permitted to send signals") + await incident.remove_reaction(reaction, member) + return + + try: + signal = Signal(reaction) + except ValueError: + log.debug(f"Removing invalid reaction: emoji {reaction} is not a valid signal") + await incident.remove_reaction(reaction, member) + return + + log.trace(f"Received signal: {signal}") + + if signal not in (Signal.ACTIONED, Signal.NOT_ACTIONED): + log.debug("Reaction was valid, but no action is currently defined for it") + return + + relay_successful = await self.archive(incident, signal, actioned_by=member) + if not relay_successful: + log.trace("Original message will not be deleted as we failed to relay it to the archive") + return + + timeout = 5 # Seconds + confirmation_task = self.make_confirmation_task(incident, timeout) + + log.trace("Deleting original message") + await incident.delete() + + log.trace(f"Awaiting deletion confirmation: {timeout=} seconds") + try: + await confirmation_task + except asyncio.TimeoutError: + log.info(f"Did not receive incident deletion confirmation within {timeout} seconds!") + else: + log.trace("Deletion was confirmed") + + async def resolve_message(self, message_id: int) -> t.Optional[discord.Message]: + """ + Get `discord.Message` for `message_id` from cache, or API. + + We first look into the local cache to see if the message is present. + + If not, we try to fetch the message from the API. This is necessary for messages + which were sent before the bot's current session. + + In an edge-case, it is also possible that the message was already deleted, and + the API will respond with a 404. In such a case, None will be returned. + This signals that the event for `message_id` should be ignored. + """ + await self.bot.wait_until_guild_available() # First make sure that the cache is ready + log.trace(f"Resolving message for: {message_id=}") + message: t.Optional[discord.Message] = self.bot._connection._get_message(message_id) + + if message is not None: + log.trace("Message was found in cache") + return message + + log.trace("Message not found, attempting to fetch") + try: + message = await self.bot.get_channel(Channels.incidents).fetch_message(message_id) + except discord.NotFound: + log.trace("Message doesn't exist, it was likely already relayed") + except Exception: + log.exception(f"Failed to fetch message {message_id}!") + else: + log.trace("Message fetched successfully!") + return message + + @Cog.listener() + async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent) -> None: + """ + Pre-process `payload` and pass it to `process_event` if appropriate. + + We abort instantly if `payload` doesn't relate to a message sent in #incidents, + or if it was sent by a bot. + + If `payload` relates to a message in #incidents, we first ensure that `crawl_task` has + finished, to make sure we don't mutate channel state as we're crawling it. + + Next, we acquire `event_lock` - to prevent racing, events are processed one at a time. + + Once we have the lock, the `discord.Message` object for this event must be resolved. + If the lock was previously held by an event which successfully relayed the incident, + this will fail and we abort the current event. + + Finally, with both the lock and the `discord.Message` instance in our hands, we delegate + to `process_event` to handle the event. + + The justification for using a raw listener is the need to receive events for messages + which were not cached in the current session. As a result, a certain amount of + complexity is introduced, but at the moment this doesn't appear to be avoidable. + """ + if payload.channel_id != Channels.incidents or payload.member.bot: + return + + log.trace(f"Received reaction add event in #incidents, waiting for crawler: {self.crawl_task.done()=}") + await self.crawl_task + + log.trace(f"Acquiring event lock: {self.event_lock.locked()=}") + async with self.event_lock: + message = await self.resolve_message(payload.message_id) + + if message is None: + log.debug("Listener will abort as related message does not exist!") + return + + if not is_incident(message): + log.debug("Ignoring event for a non-incident message") + return + + await self.process_event(str(payload.emoji), message, payload.member) + log.trace("Releasing event lock") + + @Cog.listener() + async def on_message(self, message: discord.Message) -> None: + """Pass `message` to `add_signals` if and only if it satisfies `is_incident`.""" + if is_incident(message): + await add_signals(message) + + +def setup(bot: Bot) -> None: + """Load the Incidents cog.""" + bot.add_cog(Incidents(bot)) diff --git a/bot/exts/moderation/infraction/__init__.py b/bot/exts/moderation/infraction/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/moderation/infraction/__init__.py diff --git a/bot/cogs/moderation/scheduler.py b/bot/exts/moderation/infraction/_scheduler.py index c0de0e4da..242b2d30f 100644 --- a/bot/cogs/moderation/scheduler.py +++ b/bot/exts/moderation/infraction/_scheduler.py @@ -12,25 +12,29 @@ from discord.ext.commands import Context from bot import constants from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Colours, STAFF_CHANNELS -from bot.utils import time -from bot.utils.scheduling import Scheduler -from . import utils -from .modlog import ModLog -from .utils import UserSnowflake +from bot.constants import Colours +from bot.exts.moderation.infraction import _utils +from bot.exts.moderation.infraction._utils import UserSnowflake +from bot.exts.moderation.modlog import ModLog +from bot.utils import messages, scheduling, time +from bot.utils.channel import is_mod_channel log = logging.getLogger(__name__) -class InfractionScheduler(Scheduler): +class InfractionScheduler: """Handles the application, pardoning, and expiration of infractions.""" def __init__(self, bot: Bot, supported_infractions: t.Container[str]): - super().__init__() - self.bot = bot + self.scheduler = scheduling.Scheduler(self.__class__.__name__) + self.bot.loop.create_task(self.reschedule_infractions(supported_infractions)) + def cog_unload(self) -> None: + """Cancel scheduled tasks.""" + self.scheduler.cancel_all() + @property def mod_log(self) -> ModLog: """Get the currently loaded ModLog cog instance.""" @@ -38,7 +42,7 @@ class InfractionScheduler(Scheduler): async def reschedule_infractions(self, supported_infractions: t.Container[str]) -> None: """Schedule expiration for previous infractions.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() log.trace(f"Rescheduling infractions for {self.__class__.__name__}.") @@ -48,11 +52,11 @@ class InfractionScheduler(Scheduler): ) for infraction in infractions: if infraction["expires_at"] is not None and infraction["type"] in supported_infractions: - self.schedule_task(self.bot.loop, infraction["id"], infraction) + self.schedule_expiration(infraction) async def reapply_infraction( self, - infraction: utils.Infraction, + infraction: _utils.Infraction, apply_coro: t.Optional[t.Awaitable] ) -> None: """Reapply an infraction if it's still active or deactivate it if less than 60 sec left.""" @@ -70,27 +74,52 @@ class InfractionScheduler(Scheduler): return # Allowing mod log since this is a passive action that should be logged. - await apply_coro - log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.") + try: + await apply_coro + except discord.HTTPException as e: + # When user joined and then right after this left again before action completed, this can't apply roles + if e.code == 10007 or e.status == 404: + log.info( + f"Can't reapply {infraction['type']} to user {infraction['user']} because user left the guild." + ) + else: + log.exception( + f"Got unexpected HTTPException (HTTP {e.status}, Discord code {e.code})" + f"when awaiting {infraction['type']} coroutine for {infraction['user']}." + ) + else: + log.info(f"Re-applied {infraction['type']} to user {infraction['user']} upon rejoining.") async def apply_infraction( self, ctx: Context, - infraction: utils.Infraction, + infraction: _utils.Infraction, user: UserSnowflake, - action_coro: t.Optional[t.Awaitable] = None - ) -> None: - """Apply an infraction to the user, log the infraction, and optionally notify the user.""" + action_coro: t.Optional[t.Awaitable] = None, + user_reason: t.Optional[str] = None, + additional_info: str = "", + ) -> bool: + """ + Apply an infraction to the user, log the infraction, and optionally notify the user. + + `user_reason`, if provided, will be sent to the user in place of the infraction reason. + `additional_info` will be attached to the text field in the mod-log embed. + + Returns whether or not the infraction succeeded. + """ infr_type = infraction["type"] - icon = utils.INFRACTION_ICONS[infr_type][0] + icon = _utils.INFRACTION_ICONS[infr_type][0] reason = infraction["reason"] expiry = time.format_infraction_with_duration(infraction["expires_at"]) id_ = infraction['id'] + if user_reason is None: + user_reason = reason + log.trace(f"Applying {infr_type} infraction #{id_} to {user}.") # Default values for the confirmation message and mod log. - confirm_msg = f":ok_hand: applied" + confirm_msg = ":ok_hand: applied" # Specifying an expiry for a note or warning makes no sense. if infr_type in ("note", "warning"): @@ -100,11 +129,16 @@ class InfractionScheduler(Scheduler): dm_result = "" dm_log_text = "" - expiry_log_text = f"Expires: {expiry}" if expiry else "" + expiry_log_text = f"\nExpires: {expiry}" if expiry else "" log_title = "applied" log_content = None + failed = False # DM the user about the infraction if it's not a shadow/hidden infraction. + # This needs to happen before we apply the infraction, as the bot cannot + # send DMs to user that it doesn't share a guild with. If we were to + # apply kick/ban infractions first, this would mean that we'd make it + # impossible for us to deliver a DM. See python-discord/bot#982. if not infraction["hidden"]: dm_result = f"{constants.Emojis.failmail} " dm_log_text = "\nDM: **Failed**" @@ -117,23 +151,18 @@ class InfractionScheduler(Scheduler): log.error(f"Failed to DM {user.id}: could not fetch user (status {e.status})") else: # Accordingly display whether the user was successfully notified via DM. - if await utils.notify_infraction(user, infr_type, expiry, reason, icon): + if await _utils.notify_infraction(user, infr_type.replace("_", " ").title(), expiry, user_reason, icon): dm_result = ":incoming_envelope: " dm_log_text = "\nDM: Sent" + end_msg = "" if infraction["actor"] == self.bot.user.id: log.trace( f"Infraction #{id_} actor is bot; including the reason in the confirmation message." ) - - end_msg = f" (reason: {infraction['reason']})" - elif ctx.channel.id not in STAFF_CHANNELS: - log.trace( - f"Infraction #{id_} context is not in a staff channel; omitting infraction count." - ) - - end_msg = "" - else: + if reason: + end_msg = f" (reason: {textwrap.shorten(reason, width=1500, placeholder='...')})" + elif is_mod_channel(ctx.channel): log.trace(f"Fetching total infraction count for {user}.") infractions = await self.bot.api_client.get( @@ -141,7 +170,7 @@ class InfractionScheduler(Scheduler): params={"user__id": str(user.id)} ) total = len(infractions) - end_msg = f" ({total} infraction{ngettext('', 's', total)} total)" + end_msg = f" (#{id_} ; {total} infraction{ngettext('', 's', total)} total)" # Execute the necessary actions to apply the infraction on Discord. if action_coro: @@ -150,47 +179,76 @@ class InfractionScheduler(Scheduler): await action_coro if expiry: # Schedule the expiration of the infraction. - self.schedule_task(ctx.bot.loop, infraction["id"], infraction) + self.schedule_expiration(infraction) except discord.HTTPException as e: # Accordingly display that applying the infraction failed. - confirm_msg = f":x: failed to apply" + # Don't use ctx.message.author; antispam only patches ctx.author. + confirm_msg = ":x: failed to apply" expiry_msg = "" log_content = ctx.author.mention log_title = "failed to apply" - log_msg = f"Failed to apply {infr_type} infraction #{id_} to {user}" + log_msg = f"Failed to apply {' '.join(infr_type.split('_'))} infraction #{id_} to {user}" if isinstance(e, discord.Forbidden): log.warning(f"{log_msg}: bot lacks permissions.") + elif e.code == 10007 or e.status == 404: + log.info( + f"Can't apply {infraction['type']} to user {infraction['user']} because user left from guild." + ) else: log.exception(log_msg) + failed = True + + if failed: + log.trace(f"Deleted infraction {infraction['id']} from database because applying infraction failed.") + try: + await self.bot.api_client.delete(f"bot/infractions/{id_}") + except ResponseCodeError as e: + confirm_msg += " and failed to delete" + log_title += " and failed to delete" + log.error(f"Deletion of {infr_type} infraction #{id_} failed with error code {e.status}.") + infr_message = "" + else: + infr_message = f" **{' '.join(infr_type.split('_'))}** to {user.mention}{expiry_msg}{end_msg}" # Send a confirmation message to the invoking context. log.trace(f"Sending infraction #{id_} confirmation message.") - await ctx.send( - f"{dm_result}{confirm_msg} **{infr_type}** to {user.mention}{expiry_msg}{end_msg}." - ) + await ctx.send(f"{dm_result}{confirm_msg}{infr_message}.") # Send a log message to the mod log. + # Don't use ctx.message.author for the actor; antispam only patches ctx.author. log.trace(f"Sending apply mod log for infraction #{id_}.") await self.mod_log.send_log_message( icon_url=icon, colour=Colours.soft_red, - title=f"Infraction {log_title}: {infr_type}", + title=f"Infraction {log_title}: {' '.join(infr_type.split('_'))}", thumbnail=user.avatar_url_as(static_format="png"), text=textwrap.dedent(f""" - Member: {user.mention} (`{user.id}`) - Actor: {ctx.message.author}{dm_log_text} + Member: {messages.format_user(user)} + Actor: {ctx.author.mention}{dm_log_text}{expiry_log_text} Reason: {reason} - {expiry_log_text} + {additional_info} """), content=log_content, footer=f"ID {infraction['id']}" ) log.info(f"Applied {infr_type} infraction #{id_} to {user}.") + return not failed + + async def pardon_infraction( + self, + ctx: Context, + infr_type: str, + user: UserSnowflake, + send_msg: bool = True + ) -> None: + """ + Prematurely end an infraction for a user and log the action in the mod log. - async def pardon_infraction(self, ctx: Context, infr_type: str, user: UserSnowflake) -> None: - """Prematurely end an infraction for a user and log the action in the mod log.""" + If `send_msg` is True, then a pardoning confirmation message will be sent to + the context channel. Otherwise, no such message will be sent. + """ log.trace(f"Pardoning {infr_type} infraction for {user}.") # Check the current active infraction @@ -212,48 +270,12 @@ class InfractionScheduler(Scheduler): # Deactivate the infraction and cancel its scheduled expiration task. log_text = await self.deactivate_infraction(response[0], send_log=False) - log_text["Member"] = f"{user.mention}(`{user.id}`)" - log_text["Actor"] = str(ctx.message.author) + log_text["Member"] = messages.format_user(user) + log_text["Actor"] = ctx.author.mention log_content = None id_ = response[0]['id'] footer = f"ID: {id_}" - # If multiple active infractions were found, mark them as inactive in the database - # and cancel their expiration tasks. - if len(response) > 1: - log.warning( - f"Found more than one active {infr_type} infraction for user {user.id}; " - "deactivating the extra active infractions too." - ) - - footer = f"Infraction IDs: {', '.join(str(infr['id']) for infr in response)}" - - log_note = f"Found multiple **active** {infr_type} infractions in the database." - if "Note" in log_text: - log_text["Note"] = f" {log_note}" - else: - log_text["Note"] = log_note - - # deactivate_infraction() is not called again because: - # 1. Discord cannot store multiple active bans or assign multiples of the same role - # 2. It would send a pardon DM for each active infraction, which is redundant - for infraction in response[1:]: - id_ = infraction['id'] - try: - # Mark infraction as inactive in the database. - await self.bot.api_client.patch( - f"bot/infractions/{id_}", - json={"active": False} - ) - except ResponseCodeError: - log.exception(f"Failed to deactivate infraction #{id_} ({infr_type})") - # This is simpler and cleaner than trying to concatenate all the errors. - log_text["Failure"] = "See bot's logs for details." - - # Cancel pending expiration task. - if infraction["expires_at"] is not None: - self.cancel_task(infraction["id"]) - # Accordingly display whether the user was successfully notified via DM. dm_emoji = "" if log_text.get("DM") == "Sent": @@ -269,23 +291,27 @@ class InfractionScheduler(Scheduler): log.warning(f"Failed to pardon {infr_type} infraction #{id_} for {user}.") else: - confirm_msg = f":ok_hand: pardoned" + confirm_msg = ":ok_hand: pardoned" log_title = "pardoned" log.info(f"Pardoned {infr_type} infraction #{id_} for {user}.") # Send a confirmation message to the invoking context. - log.trace(f"Sending infraction #{id_} pardon confirmation message.") - await ctx.send( - f"{dm_emoji}{confirm_msg} infraction **{infr_type}** for {user.mention}. " - f"{log_text.get('Failure', '')}" - ) + if send_msg: + log.trace(f"Sending infraction #{id_} pardon confirmation message.") + await ctx.send( + f"{dm_emoji}{confirm_msg} infraction **{' '.join(infr_type.split('_'))}** for {user.mention}. " + f"{log_text.get('Failure', '')}" + ) + + # Move reason to end of entry to avoid cutting out some keys + log_text["Reason"] = log_text.pop("Reason") # Send a log message to the mod log. await self.mod_log.send_log_message( - icon_url=utils.INFRACTION_ICONS[infr_type][1], + icon_url=_utils.INFRACTION_ICONS[infr_type][1], colour=Colours.soft_green, - title=f"Infraction {log_title}: {infr_type}", + title=f"Infraction {log_title}: {' '.join(infr_type.split('_'))}", thumbnail=user.avatar_url_as(static_format="png"), text="\n".join(f"{k}: {v}" for k, v in log_text.items()), footer=footer, @@ -294,7 +320,7 @@ class InfractionScheduler(Scheduler): async def deactivate_infraction( self, - infraction: utils.Infraction, + infraction: _utils.Infraction, send_log: bool = True ) -> t.Dict[str, str]: """ @@ -307,7 +333,7 @@ class InfractionScheduler(Scheduler): Infractions of unsupported types will raise a ValueError. """ guild = self.bot.get_guild(constants.Guild.id) - mod_role = guild.get_role(constants.Roles.moderator) + mod_role = guild.get_role(constants.Roles.moderators) user_id = infraction["user"] actor = infraction["actor"] type_ = infraction["type"] @@ -323,7 +349,7 @@ class InfractionScheduler(Scheduler): log_content = None log_text = { "Member": f"<@{user_id}>", - "Actor": str(self.bot.get_user(actor) or actor), + "Actor": f"<@{actor}>", "Reason": infraction["reason"], "Created": created, } @@ -340,12 +366,19 @@ class InfractionScheduler(Scheduler): ) except discord.Forbidden: log.warning(f"Failed to deactivate infraction #{id_} ({type_}): bot lacks permissions.") - log_text["Failure"] = f"The bot lacks permissions to do this (role hierarchy?)" + log_text["Failure"] = "The bot lacks permissions to do this (role hierarchy?)" log_content = mod_role.mention except discord.HTTPException as e: - log.exception(f"Failed to deactivate infraction #{id_} ({type_})") - log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}." - log_content = mod_role.mention + if e.code == 10007 or e.status == 404: + log.info( + f"Can't pardon {infraction['type']} for user {infraction['user']} because user left the guild." + ) + log_text["Failure"] = "User left the guild." + log_content = mod_role.mention + else: + log.exception(f"Failed to deactivate infraction #{id_} ({type_})") + log_text["Failure"] = f"HTTPException with status {e.status} and code {e.code}." + log_content = mod_role.mention # Check if the user is currently being watched by Big Brother. try: @@ -385,31 +418,33 @@ class InfractionScheduler(Scheduler): # Cancel the expiration task. if infraction["expires_at"] is not None: - self.cancel_task(infraction["id"]) + self.scheduler.cancel(infraction["id"]) # Send a log message to the mod log. if send_log: - log_title = f"expiration failed" if "Failure" in log_text else "expired" + log_title = "expiration failed" if "Failure" in log_text else "expired" user = self.bot.get_user(user_id) avatar = user.avatar_url_as(static_format="png") if user else None + # Move reason to end so when reason is too long, this is not gonna cut out required items. + log_text["Reason"] = log_text.pop("Reason") + log.trace(f"Sending deactivation mod log for infraction #{id_}.") await self.mod_log.send_log_message( - icon_url=utils.INFRACTION_ICONS[type_][1], + icon_url=_utils.INFRACTION_ICONS[type_][1], colour=Colours.soft_green, title=f"Infraction {log_title}: {type_}", thumbnail=avatar, text="\n".join(f"{k}: {v}" for k, v in log_text.items()), footer=f"ID: {id_}", content=log_content, - ) return log_text @abstractmethod - async def _pardon_action(self, infraction: utils.Infraction) -> t.Optional[t.Dict[str, str]]: + async def _pardon_action(self, infraction: _utils.Infraction) -> t.Optional[t.Dict[str, str]]: """ Execute deactivation steps specific to the infraction's type and return a log dict. @@ -417,7 +452,7 @@ class InfractionScheduler(Scheduler): """ raise NotImplementedError - async def _scheduled_task(self, infraction: utils.Infraction) -> None: + def schedule_expiration(self, infraction: _utils.Infraction) -> None: """ Marks an infraction expired after the delay from time of scheduling to time of expiration. @@ -425,6 +460,4 @@ class InfractionScheduler(Scheduler): expiration task is cancelled. """ expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) - await time.wait_until(expiry) - - await self.deactivate_infraction(infraction) + self.scheduler.schedule_at(expiry, infraction["id"], self.deactivate_infraction(infraction)) diff --git a/bot/cogs/moderation/utils.py b/bot/exts/moderation/infraction/_utils.py index 5052b9048..d0dc3f0a1 100644 --- a/bot/cogs/moderation/utils.py +++ b/bot/exts/moderation/infraction/_utils.py @@ -1,5 +1,4 @@ import logging -import textwrap import typing as t from datetime import datetime @@ -19,15 +18,28 @@ INFRACTION_ICONS = { "note": (Icons.user_warn, None), "superstar": (Icons.superstarify, Icons.unsuperstarify), "warning": (Icons.user_warn, None), + "voice_ban": (Icons.voice_state_red, Icons.voice_state_green), } RULES_URL = "https://pythondiscord.com/pages/rules" -APPEALABLE_INFRACTIONS = ("ban", "mute") +APPEALABLE_INFRACTIONS = ("ban", "mute", "voice_ban") # Type aliases UserObject = t.Union[discord.Member, discord.User] UserSnowflake = t.Union[UserObject, discord.Object] Infraction = t.Dict[str, t.Union[str, int, bool]] +APPEAL_EMAIL = "[email protected]" + +INFRACTION_TITLE = f"Please review our rules over at {RULES_URL}" +INFRACTION_APPEAL_FOOTER = f"To appeal this infraction, send an e-mail to {APPEAL_EMAIL}" +INFRACTION_AUTHOR_NAME = "Infraction information" + +INFRACTION_DESCRIPTION_TEMPLATE = ( + "**Type:** {type}\n" + "**Expires:** {expires}\n" + "**Reason:** {reason}\n" +) + async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]: """ @@ -38,10 +50,9 @@ async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]: log.trace(f"Attempting to add user {user.id} to the database.") if not isinstance(user, (discord.Member, discord.User)): - log.warning("The user being added to the DB is not a Member or User object.") + log.debug("The user being added to the DB is not a Member or User object.") payload = { - 'avatar_hash': getattr(user, 'avatar', 0), 'discriminator': int(getattr(user, 'discriminator', 0)), 'id': user.id, 'in_guild': False, @@ -71,7 +82,7 @@ async def post_infraction( log.trace(f"Posting {infr_type} infraction for {user} to the API.") payload = { - "actor": ctx.message.author.id, + "actor": ctx.author.id, # Don't use ctx.message.author; antispam only patches ctx.author. "hidden": hidden, "reason": reason, "type": infr_type, @@ -97,8 +108,19 @@ async def post_infraction( return -async def has_active_infraction(ctx: Context, user: UserSnowflake, infr_type: str) -> bool: - """Checks if a user already has an active infraction of the given type.""" +async def get_active_infraction( + ctx: Context, + user: UserSnowflake, + infr_type: str, + send_msg: bool = True +) -> t.Optional[dict]: + """ + Retrieves an active infraction of the given type for the user. + + If `send_msg` is True and the user has an active infraction matching the `infr_type` parameter, + then a message for the moderator will be sent to the context channel letting them know. + Otherwise, no message will be sent. + """ log.trace(f"Checking if {user} has active infractions of type {infr_type}.") active_infractions = await ctx.bot.api_client.get( @@ -110,15 +132,16 @@ async def has_active_infraction(ctx: Context, user: UserSnowflake, infr_type: st } ) if active_infractions: - log.trace(f"{user} has active infractions of type {infr_type}.") - await ctx.send( - f":x: According to my records, this user already has a {infr_type} infraction. " - f"See infraction **#{active_infractions[0]['id']}**." - ) - return True + # Checks to see if the moderator should be told there is an active infraction + if send_msg: + log.trace(f"{user} has active infractions of type {infr_type}.") + await ctx.send( + f":x: According to my records, this user already has a {infr_type} infraction. " + f"See infraction **#{active_infractions[0]['id']}**." + ) + return active_infractions[0] else: log.trace(f"{user} does not have active infractions of type {infr_type}.") - return False async def notify_infraction( @@ -131,23 +154,27 @@ async def notify_infraction( """DM a user about their new infraction and return True if the DM is successful.""" log.trace(f"Sending {user} a DM about their {infr_type} infraction.") + text = INFRACTION_DESCRIPTION_TEMPLATE.format( + type=infr_type.title(), + expires=expires_at or "N/A", + reason=reason or "No reason provided." + ) + + # For case when other fields than reason is too long and this reach limit, then force-shorten string + if len(text) > 2048: + text = f"{text[:2045]}..." + embed = discord.Embed( - description=textwrap.dedent(f""" - **Type:** {infr_type.capitalize()} - **Expires:** {expires_at or "N/A"} - **Reason:** {reason or "No reason provided."} - """), + description=text, colour=Colours.soft_red ) - embed.set_author(name="Infraction information", icon_url=icon_url, url=RULES_URL) - embed.title = f"Please review our rules over at {RULES_URL}" + embed.set_author(name=INFRACTION_AUTHOR_NAME, icon_url=icon_url, url=RULES_URL) + embed.title = INFRACTION_TITLE embed.url = RULES_URL if infr_type in APPEALABLE_INFRACTIONS: - embed.set_footer( - text="To appeal this infraction, send an e-mail to [email protected]" - ) + embed.set_footer(text=INFRACTION_APPEAL_FOOTER) return await send_private_embed(user, embed) diff --git a/bot/cogs/moderation/infractions.py b/bot/exts/moderation/infraction/infractions.py index f4e296df9..18e937e87 100644 --- a/bot/cogs/moderation/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -1,4 +1,5 @@ import logging +import textwrap import typing as t import discord @@ -9,12 +10,12 @@ from discord.ext.commands import Context, command from bot import constants from bot.bot import Bot from bot.constants import Event -from bot.converters import Expiry, FetchedMember +from bot.converters import Duration, Expiry, FetchedMember from bot.decorators import respect_role_hierarchy -from bot.utils.checks import with_role_check -from . import utils -from .scheduler import InfractionScheduler -from .utils import UserSnowflake +from bot.exts.moderation.infraction import _utils +from bot.exts.moderation.infraction._scheduler import InfractionScheduler +from bot.exts.moderation.infraction._utils import UserSnowflake +from bot.utils.messages import format_user log = logging.getLogger(__name__) @@ -26,10 +27,11 @@ class Infractions(InfractionScheduler, commands.Cog): category_description = "Server moderation tools." def __init__(self, bot: Bot): - super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning"}) + super().__init__(bot, supported_infractions={"ban", "kick", "mute", "note", "warning", "voice_ban"}) self.category = "Moderation" self._muted_role = discord.Object(constants.Roles.muted) + self._voice_verified_role = discord.Object(constants.Roles.voice_verified) @commands.Cog.listener() async def on_member_join(self, member: Member) -> None: @@ -52,29 +54,57 @@ class Infractions(InfractionScheduler, commands.Cog): # region: Permanent infractions @command() - async def warn(self, ctx: Context, user: Member, *, reason: str = None) -> None: + async def warn(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None: """Warn a user for the given reason.""" - infraction = await utils.post_infraction(ctx, user, "warning", reason, active=False) + infraction = await _utils.post_infraction(ctx, user, "warning", reason, active=False) if infraction is None: return await self.apply_infraction(ctx, infraction, user) @command() - async def kick(self, ctx: Context, user: Member, *, reason: str = None) -> None: + async def kick(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None: """Kick a user for the given reason.""" - await self.apply_kick(ctx, user, reason, active=False) + await self.apply_kick(ctx, user, reason) @command() - async def ban(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None: - """Permanently ban a user for the given reason.""" + async def ban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: + """Permanently ban a user for the given reason and stop watching them with Big Brother.""" await self.apply_ban(ctx, user, reason) + @command(aliases=('pban',)) + async def purgeban( + self, + ctx: Context, + user: FetchedMember, + purge_days: t.Optional[int] = 1, + *, + reason: t.Optional[str] = None + ) -> None: + """ + Same as ban but removes all their messages for the given number of days, default being 1. + + `purge_days` can only be values between 0 and 7. + Anything outside these bounds are automatically adjusted to their respective limits. + """ + await self.apply_ban(ctx, user, reason, max(min(purge_days, 7), 0)) + + @command(aliases=('vban',)) + async def voiceban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str]) -> None: + """Permanently ban user from using voice channels.""" + await self.apply_voice_ban(ctx, user, reason) + # endregion # region: Temporary infractions @command(aliases=["mute"]) - async def tempmute(self, ctx: Context, user: Member, duration: Expiry, *, reason: str = None) -> None: + async def tempmute( + self, ctx: Context, + user: Member, + duration: t.Optional[Expiry] = None, + *, + reason: t.Optional[str] = None + ) -> None: """ Temporarily mute a user for the given reason and duration. @@ -89,11 +119,22 @@ class Infractions(InfractionScheduler, commands.Cog): \u2003`s` - seconds Alternatively, an ISO 8601 timestamp can be provided for the duration. + + If no duration is given, a one hour duration is used by default. """ + if duration is None: + duration = await Duration().convert(ctx, "1h") await self.apply_mute(ctx, user, reason, expires_at=duration) @command() - async def tempban(self, ctx: Context, user: FetchedMember, duration: Expiry, *, reason: str = None) -> None: + async def tempban( + self, + ctx: Context, + user: FetchedMember, + duration: Expiry, + *, + reason: t.Optional[str] = None + ) -> None: """ Temporarily ban a user for the given reason and duration. @@ -111,50 +152,52 @@ class Infractions(InfractionScheduler, commands.Cog): """ await self.apply_ban(ctx, user, reason, expires_at=duration) + @command(aliases=("tempvban", "tvban")) + async def tempvoiceban( + self, + ctx: Context, + user: FetchedMember, + duration: Expiry, + *, + reason: t.Optional[str] + ) -> None: + """ + Temporarily voice ban a user for the given reason and duration. + + A unit of time should be appended to the duration. + Units (∗case-sensitive): + \u2003`y` - years + \u2003`m` - months∗ + \u2003`w` - weeks + \u2003`d` - days + \u2003`h` - hours + \u2003`M` - minutes∗ + \u2003`s` - seconds + + Alternatively, an ISO 8601 timestamp can be provided for the duration. + """ + await self.apply_voice_ban(ctx, user, reason, expires_at=duration) + # endregion # region: Permanent shadow infractions @command(hidden=True) - async def note(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None: + async def note(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: """Create a private note for a user with the given reason without notifying the user.""" - infraction = await utils.post_infraction(ctx, user, "note", reason, hidden=True, active=False) + infraction = await _utils.post_infraction(ctx, user, "note", reason, hidden=True, active=False) if infraction is None: return await self.apply_infraction(ctx, infraction, user) - @command(hidden=True, aliases=['shadowkick', 'skick']) - async def shadow_kick(self, ctx: Context, user: Member, *, reason: str = None) -> None: - """Kick a user for the given reason without notifying the user.""" - await self.apply_kick(ctx, user, reason, hidden=True, active=False) - @command(hidden=True, aliases=['shadowban', 'sban']) - async def shadow_ban(self, ctx: Context, user: FetchedMember, *, reason: str = None) -> None: + async def shadow_ban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: """Permanently ban a user for the given reason without notifying the user.""" await self.apply_ban(ctx, user, reason, hidden=True) # endregion # region: Temporary shadow infractions - @command(hidden=True, aliases=["shadowtempmute, stempmute", "shadowmute", "smute"]) - async def shadow_tempmute(self, ctx: Context, user: Member, duration: Expiry, *, reason: str = None) -> None: - """ - Temporarily mute a user for the given reason and duration without notifying the user. - - A unit of time should be appended to the duration. - Units (∗case-sensitive): - \u2003`y` - years - \u2003`m` - months∗ - \u2003`w` - weeks - \u2003`d` - days - \u2003`h` - hours - \u2003`M` - minutes∗ - \u2003`s` - seconds - - Alternatively, an ISO 8601 timestamp can be provided for the duration. - """ - await self.apply_mute(ctx, user, reason, expires_at=duration, hidden=True) - @command(hidden=True, aliases=["shadowtempban, stempban"]) async def shadow_tempban( self, @@ -162,7 +205,7 @@ class Infractions(InfractionScheduler, commands.Cog): user: FetchedMember, duration: Expiry, *, - reason: str = None + reason: t.Optional[str] = None ) -> None: """ Temporarily ban a user for the given reason and duration without notifying the user. @@ -194,15 +237,20 @@ class Infractions(InfractionScheduler, commands.Cog): """Prematurely end the active ban infraction for the user.""" await self.pardon_infraction(ctx, "ban", user) + @command(aliases=("uvban",)) + async def unvoiceban(self, ctx: Context, user: FetchedMember) -> None: + """Prematurely end the active voice ban infraction for the user.""" + await self.pardon_infraction(ctx, "voice_ban", user) + # endregion # region: Base apply functions - async def apply_mute(self, ctx: Context, user: Member, reason: str, **kwargs) -> None: + async def apply_mute(self, ctx: Context, user: Member, reason: t.Optional[str], **kwargs) -> None: """Apply a mute infraction with kwargs passed to `post_infraction`.""" - if await utils.has_active_infraction(ctx, user, "mute"): + if await _utils.get_active_infraction(ctx, user, "mute"): return - infraction = await utils.post_infraction(ctx, user, "mute", reason, active=True, **kwargs) + infraction = await _utils.post_infraction(ctx, user, "mute", reason, active=True, **kwargs) if infraction is None: return @@ -216,37 +264,102 @@ class Infractions(InfractionScheduler, commands.Cog): await self.apply_infraction(ctx, infraction, user, action()) - @respect_role_hierarchy() - async def apply_kick(self, ctx: Context, user: Member, reason: str, **kwargs) -> None: + @respect_role_hierarchy(member_arg=2) + async def apply_kick(self, ctx: Context, user: Member, reason: t.Optional[str], **kwargs) -> None: """Apply a kick infraction with kwargs passed to `post_infraction`.""" - infraction = await utils.post_infraction(ctx, user, "kick", reason, active=False, **kwargs) + infraction = await _utils.post_infraction(ctx, user, "kick", reason, active=False, **kwargs) if infraction is None: return self.mod_log.ignore(Event.member_remove, user.id) + if reason: + reason = textwrap.shorten(reason, width=512, placeholder="...") + action = user.kick(reason=reason) await self.apply_infraction(ctx, infraction, user, action) - @respect_role_hierarchy() - async def apply_ban(self, ctx: Context, user: UserSnowflake, reason: str, **kwargs) -> None: - """Apply a ban infraction with kwargs passed to `post_infraction`.""" - if await utils.has_active_infraction(ctx, user, "ban"): - return + @respect_role_hierarchy(member_arg=2) + async def apply_ban( + self, + ctx: Context, + user: UserSnowflake, + reason: t.Optional[str], + purge_days: t.Optional[int] = 0, + **kwargs + ) -> None: + """ + Apply a ban infraction with kwargs passed to `post_infraction`. + + Will also remove the banned user from the Big Brother watch list if applicable. + """ + # In the case of a permanent ban, we don't need get_active_infractions to tell us if one is active + is_temporary = kwargs.get("expires_at") is not None + active_infraction = await _utils.get_active_infraction(ctx, user, "ban", is_temporary) - infraction = await utils.post_infraction(ctx, user, "ban", reason, active=True, **kwargs) + if active_infraction: + if is_temporary: + log.trace("Tempban ignored as it cannot overwrite an active ban.") + return + + if active_infraction.get('expires_at') is None: + log.trace("Permaban already exists, notify.") + await ctx.send(f":x: User is already permanently banned (#{active_infraction['id']}).") + return + + log.trace("Old tempban is being replaced by new permaban.") + await self.pardon_infraction(ctx, "ban", user, is_temporary) + + infraction = await _utils.post_infraction(ctx, user, "ban", reason, active=True, **kwargs) if infraction is None: return self.mod_log.ignore(Event.member_remove, user.id) - action = ctx.guild.ban(user, reason=reason, delete_message_days=0) + if reason: + reason = textwrap.shorten(reason, width=512, placeholder="...") + + action = ctx.guild.ban(user, reason=reason, delete_message_days=purge_days) + await self.apply_infraction(ctx, infraction, user, action) + + if infraction.get('expires_at') is not None: + log.trace(f"Ban isn't permanent; user {user} won't be unwatched by Big Brother.") + return + + bb_cog = self.bot.get_cog("Big Brother") + if not bb_cog: + log.error(f"Big Brother cog not loaded; perma-banned user {user} won't be unwatched.") + return + + log.trace(f"Big Brother cog loaded; attempting to unwatch perma-banned user {user}.") + + bb_reason = "User has been permanently banned from the server. Automatically removed." + await bb_cog.apply_unwatch(ctx, user, bb_reason, send_message=False) + + @respect_role_hierarchy(member_arg=2) + async def apply_voice_ban(self, ctx: Context, user: UserSnowflake, reason: t.Optional[str], **kwargs) -> None: + """Apply a voice ban infraction with kwargs passed to `post_infraction`.""" + if await _utils.get_active_infraction(ctx, user, "voice_ban"): + return + + infraction = await _utils.post_infraction(ctx, user, "voice_ban", reason, active=True, **kwargs) + if infraction is None: + return + + self.mod_log.ignore(Event.member_update, user.id) + + if reason: + reason = textwrap.shorten(reason, width=512, placeholder="...") + + await user.move_to(None, reason="Disconnected from voice to apply voiceban.") + + action = user.remove_roles(self._voice_verified_role, reason=reason) await self.apply_infraction(ctx, infraction, user, action) # endregion # region: Base pardon functions - async def pardon_mute(self, user_id: int, guild: discord.Guild, reason: str) -> t.Dict[str, str]: + async def pardon_mute(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]: """Remove a user's muted role, DM them a notification, and return a log dict.""" user = guild.get_member(user_id) log_text = {} @@ -257,14 +370,14 @@ class Infractions(InfractionScheduler, commands.Cog): await user.remove_roles(self._muted_role, reason=reason) # DM the user about the expiration. - notified = await utils.notify_pardon( + notified = await _utils.notify_pardon( user=user, title="You have been unmuted", content="You may now send messages in the server.", - icon_url=utils.INFRACTION_ICONS["mute"][1] + icon_url=_utils.INFRACTION_ICONS["mute"][1] ) - log_text["Member"] = f"{user.mention}(`{user.id}`)" + log_text["Member"] = format_user(user) log_text["DM"] = "Sent" if notified else "**Failed**" else: log.info(f"Failed to unmute user {user_id}: user not found") @@ -272,7 +385,7 @@ class Infractions(InfractionScheduler, commands.Cog): return log_text - async def pardon_ban(self, user_id: int, guild: discord.Guild, reason: str) -> t.Dict[str, str]: + async def pardon_ban(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]: """Remove a user's ban on the Discord guild and return a log dict.""" user = discord.Object(user_id) log_text = {} @@ -287,7 +400,28 @@ class Infractions(InfractionScheduler, commands.Cog): return log_text - async def _pardon_action(self, infraction: utils.Infraction) -> t.Optional[t.Dict[str, str]]: + async def pardon_voice_ban(self, user_id: int, guild: discord.Guild, reason: t.Optional[str]) -> t.Dict[str, str]: + """Add Voice Verified role back to user, DM them a notification, and return a log dict.""" + user = guild.get_member(user_id) + log_text = {} + + if user: + # DM user about infraction expiration + notified = await _utils.notify_pardon( + user=user, + title="Voice ban ended", + content="You have been unbanned and can verify yourself again in the server.", + icon_url=_utils.INFRACTION_ICONS["voice_ban"][1] + ) + + log_text["Member"] = format_user(user) + log_text["DM"] = "Sent" if notified else "**Failed**" + else: + log_text["Info"] = "User was not found in the guild." + + return log_text + + async def _pardon_action(self, infraction: _utils.Infraction) -> t.Optional[t.Dict[str, str]]: """ Execute deactivation steps specific to the infraction's type and return a log dict. @@ -301,18 +435,25 @@ class Infractions(InfractionScheduler, commands.Cog): return await self.pardon_mute(user_id, guild, reason) elif infraction["type"] == "ban": return await self.pardon_ban(user_id, guild, reason) + elif infraction["type"] == "voice_ban": + return await self.pardon_voice_ban(user_id, guild, reason) # endregion # This cannot be static (must have a __func__ attribute). - def cog_check(self, ctx: Context) -> bool: + async def cog_check(self, ctx: Context) -> bool: """Only allow moderators to invoke the commands in this cog.""" - return with_role_check(ctx, *constants.MODERATION_ROLES) + return await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx) # This cannot be static (must have a __func__ attribute). async def cog_command_error(self, ctx: Context, error: Exception) -> None: """Send a notification to the invoking context on a Union failure.""" if isinstance(error, commands.BadUnionArgument): - if discord.User in error.converters: + if discord.User in error.converters or discord.Member in error.converters: await ctx.send(str(error.errors[0])) error.handled = True + + +def setup(bot: Bot) -> None: + """Load the Infractions cog.""" + bot.add_cog(Infractions(bot)) diff --git a/bot/cogs/moderation/management.py b/bot/exts/moderation/infraction/management.py index f2964cd78..b3783cd60 100644 --- a/bot/cogs/moderation/management.py +++ b/bot/exts/moderation/infraction/management.py @@ -1,4 +1,3 @@ -import asyncio import logging import textwrap import typing as t @@ -7,16 +6,16 @@ from datetime import datetime import discord from discord.ext import commands from discord.ext.commands import Context +from discord.utils import escape_markdown from bot import constants from bot.bot import Bot -from bot.converters import Expiry, InfractionSearchQuery, allowed_strings, proxy_user +from bot.converters import Expiry, Infraction, Snowflake, UserMention, allowed_strings, proxy_user +from bot.exts.moderation.infraction.infractions import Infractions +from bot.exts.moderation.modlog import ModLog from bot.pagination import LinePaginator -from bot.utils import time -from bot.utils.checks import in_channel_check, with_role_check -from . import utils -from .infractions import Infractions -from .modlog import ModLog +from bot.utils import messages, time +from bot.utils.channel import is_mod_channel log = logging.getLogger(__name__) @@ -41,17 +40,56 @@ class ModManagement(commands.Cog): # region: Edit infraction commands - @commands.group(name='infraction', aliases=('infr', 'infractions', 'inf'), invoke_without_command=True) + @commands.group(name='infraction', aliases=('infr', 'infractions', 'inf', 'i'), invoke_without_command=True) async def infraction_group(self, ctx: Context) -> None: """Infraction manipulation commands.""" - await ctx.invoke(self.bot.get_command("help"), "infraction") + await ctx.send_help(ctx.command) - @infraction_group.command(name='edit') + @infraction_group.command(name="append", aliases=("amend", "add", "a")) + async def infraction_append( + self, + ctx: Context, + infraction: Infraction, + duration: t.Union[Expiry, allowed_strings("p", "permanent"), None], # noqa: F821 + *, + reason: str = None + ) -> None: + """ + Append text and/or edit the duration of an infraction. + + Durations are relative to the time of updating and should be appended with a unit of time. + Units (∗case-sensitive): + \u2003`y` - years + \u2003`m` - months∗ + \u2003`w` - weeks + \u2003`d` - days + \u2003`h` - hours + \u2003`M` - minutes∗ + \u2003`s` - seconds + + Use "l", "last", or "recent" as the infraction ID to specify that the most recent infraction + authored by the command invoker should be edited. + + Use "p" or "permanent" to mark the infraction as permanent. Alternatively, an ISO 8601 + timestamp can be provided for the duration. + + If a previous infraction reason does not end with an ending punctuation mark, this automatically + adds a period before the amended reason. + """ + old_reason = infraction["reason"] + + if old_reason is not None: + add_period = not old_reason.endswith((".", "!", "?")) + reason = old_reason + (". " if add_period else " ") + reason + + await self.infraction_edit(ctx, infraction, duration, reason=reason) + + @infraction_group.command(name='edit', aliases=('e',)) async def infraction_edit( self, ctx: Context, - infraction_id: t.Union[int, allowed_strings("l", "last", "recent")], - duration: t.Union[Expiry, allowed_strings("p", "permanent"), None], + infraction: Infraction, + duration: t.Union[Expiry, allowed_strings("p", "permanent"), None], # noqa: F821 *, reason: str = None ) -> None: @@ -78,30 +116,18 @@ class ModManagement(commands.Cog): # Unlike UserInputError, the error handler will show a specified message for BadArgument raise commands.BadArgument("Neither a new expiry nor a new reason was specified.") - # Retrieve the previous infraction for its information. - if isinstance(infraction_id, str): - params = { - "actor__id": ctx.author.id, - "ordering": "-inserted_at" - } - infractions = await self.bot.api_client.get(f"bot/infractions", params=params) - - if infractions: - old_infraction = infractions[0] - infraction_id = old_infraction["id"] - else: - await ctx.send( - f":x: Couldn't find most recent infraction; you have never given an infraction." - ) - return - else: - old_infraction = await self.bot.api_client.get(f"bot/infractions/{infraction_id}") + infraction_id = infraction["id"] request_data = {} confirm_messages = [] log_text = "" - if isinstance(duration, str): + if duration is not None and not infraction['active']: + if reason is None: + await ctx.send(":x: Cannot edit the expiration of an expired infraction.") + return + confirm_messages.append("expiry unchanged (infraction already expired)") + elif isinstance(duration, str): request_data['expires_at'] = None confirm_messages.append("marked as permanent") elif duration is not None: @@ -115,7 +141,7 @@ class ModManagement(commands.Cog): request_data['reason'] = reason confirm_messages.append("set a new reason") log_text += f""" - Previous reason: {old_infraction['reason']} + Previous reason: {infraction['reason']} New reason: {reason} """.rstrip() else: @@ -129,15 +155,16 @@ class ModManagement(commands.Cog): # Re-schedule infraction if the expiration has been updated if 'expires_at' in request_data: - self.infractions_cog.cancel_task(new_infraction['id']) + # A scheduled task should only exist if the old infraction wasn't permanent + if infraction['expires_at']: + self.infractions_cog.scheduler.cancel(new_infraction['id']) # If the infraction was not marked as permanent, schedule a new expiration task if request_data['expires_at']: - loop = asyncio.get_event_loop() - self.infractions_cog.schedule_task(loop, new_infraction['id'], new_infraction) + self.infractions_cog.schedule_expiration(new_infraction) log_text += f""" - Previous expiry: {old_infraction['expires_at'] or "Permanent"} + Previous expiry: {infraction['expires_at'] or "Permanent"} New expiry: {new_infraction['expires_at'] or "Permanent"} """.rstrip() @@ -149,16 +176,12 @@ class ModManagement(commands.Cog): user = ctx.guild.get_member(user_id) if user: - user_text = f"{user.mention} (`{user.id}`)" + user_text = messages.format_user(user) thumbnail = user.avatar_url_as(static_format="png") else: - user_text = f"`{user_id}`" + user_text = f"<@{user_id}>" thumbnail = None - # The infraction's actor - actor_id = new_infraction['actor'] - actor = ctx.guild.get_member(actor_id) or f"`{actor_id}`" - await self.mod_log.send_log_message( icon_url=constants.Icons.pencil, colour=discord.Colour.blurple(), @@ -166,29 +189,36 @@ class ModManagement(commands.Cog): thumbnail=thumbnail, text=textwrap.dedent(f""" Member: {user_text} - Actor: {actor} - Edited by: {ctx.message.author}{log_text} + Actor: <@{new_infraction['actor']}> + Edited by: {ctx.message.author.mention}{log_text} """) ) # endregion # region: Search infractions - @infraction_group.group(name="search", invoke_without_command=True) - async def infraction_search_group(self, ctx: Context, query: InfractionSearchQuery) -> None: + @infraction_group.group(name="search", aliases=('s',), invoke_without_command=True) + async def infraction_search_group(self, ctx: Context, query: t.Union[UserMention, Snowflake, str]) -> None: """Searches for infractions in the database.""" - if isinstance(query, discord.User): - await ctx.invoke(self.search_user, query) + if isinstance(query, int): + await self.search_user(ctx, discord.Object(query)) else: - await ctx.invoke(self.search_reason, query) + await self.search_reason(ctx, query) @infraction_search_group.command(name="user", aliases=("member", "id")) async def search_user(self, ctx: Context, user: t.Union[discord.User, proxy_user]) -> None: """Search for infractions by member.""" infraction_list = await self.bot.api_client.get( - 'bot/infractions', + 'bot/infractions/expanded', params={'user__id': str(user.id)} ) + + user = self.bot.get_user(user.id) + if not user and infraction_list: + # Use the user data retrieved from the DB for the username. + user = infraction_list[0]["user"] + user = escape_markdown(user["name"]) + f"#{user['discriminator']:04}" + embed = discord.Embed( title=f"Infractions for {user} ({len(infraction_list)} total)", colour=discord.Colour.orange() @@ -199,7 +229,7 @@ class ModManagement(commands.Cog): async def search_reason(self, ctx: Context, reason: str) -> None: """Search for infractions by their reason. Use Re2 for matching.""" infraction_list = await self.bot.api_client.get( - 'bot/infractions', + 'bot/infractions/expanded', params={'search': reason} ) embed = discord.Embed( @@ -215,11 +245,11 @@ class ModManagement(commands.Cog): self, ctx: Context, embed: discord.Embed, - infractions: t.Iterable[utils.Infraction] + infractions: t.Iterable[t.Dict[str, t.Any]] ) -> None: """Send a paginated embed of infractions for the specified user.""" if not infractions: - await ctx.send(f":warning: No infractions could be found for that query.") + await ctx.send(":warning: No infractions could be found for that query.") return lines = tuple( @@ -236,39 +266,45 @@ class ModManagement(commands.Cog): max_size=1000 ) - def infraction_to_string(self, infraction: utils.Infraction) -> str: + def infraction_to_string(self, infraction: t.Dict[str, t.Any]) -> str: """Convert the infraction object to a string representation.""" - actor_id = infraction["actor"] - guild = self.bot.get_guild(constants.Guild.id) - actor = guild.get_member(actor_id) active = infraction["active"] - user_id = infraction["user"] - hidden = infraction["hidden"] + user = infraction["user"] + expires_at = infraction["expires_at"] created = time.format_infraction(infraction["inserted_at"]) + # Format the user string. + if user_obj := self.bot.get_user(user["id"]): + # The user is in the cache. + user_str = messages.format_user(user_obj) + else: + # Use the user data retrieved from the DB. + name = escape_markdown(user['name']) + user_str = f"<@{user['id']}> ({name}#{user['discriminator']:04})" + if active: - remaining = time.until_expiration(infraction["expires_at"]) or "Expired" + remaining = time.until_expiration(expires_at) or "Expired" else: remaining = "Inactive" - if infraction["expires_at"] is None: + if expires_at is None: expires = "*Permanent*" else: date_from = datetime.strptime(created, time.INFRACTION_FORMAT) - expires = time.format_infraction_with_duration(infraction["expires_at"], date_from) + expires = time.format_infraction_with_duration(expires_at, date_from) lines = textwrap.dedent(f""" {"**===============**" if active else "==============="} Status: {"__**Active**__" if active else "Inactive"} - User: {self.bot.get_user(user_id)} (`{user_id}`) + User: {user_str} Type: **{infraction["type"]}** - Shadow: {hidden} - Reason: {infraction["reason"] or "*None*"} + Shadow: {infraction["hidden"]} Created: {created} Expires: {expires} Remaining: {remaining} - Actor: {actor.mention if actor else actor_id} + Actor: <@{infraction["actor"]["id"]}> ID: `{infraction["id"]}` + Reason: {infraction["reason"] or "*None*"} {"**===============**" if active else "==============="} """) @@ -277,11 +313,11 @@ class ModManagement(commands.Cog): # endregion # This cannot be static (must have a __func__ attribute). - def cog_check(self, ctx: Context) -> bool: - """Only allow moderators from moderator channels to invoke the commands in this cog.""" + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators inside moderator channels to invoke the commands in this cog.""" checks = [ - with_role_check(ctx, *constants.MODERATION_ROLES), - in_channel_check(ctx, *constants.MODERATION_CHANNELS) + await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx), + is_mod_channel(ctx.channel) ] return all(checks) @@ -292,3 +328,8 @@ class ModManagement(commands.Cog): if discord.User in error.converters: await ctx.send(str(error.errors[0])) error.handled = True + + +def setup(bot: Bot) -> None: + """Load the ModManagement cog.""" + bot.add_cog(ModManagement(bot)) diff --git a/bot/cogs/moderation/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 050c847ac..ffc470c54 100644 --- a/bot/cogs/moderation/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -5,16 +5,17 @@ import textwrap import typing as t from pathlib import Path -from discord import Colour, Embed, Member -from discord.ext.commands import Cog, Context, command +from discord import Embed, Member +from discord.ext.commands import Cog, Context, command, has_any_role +from discord.utils import escape_markdown from bot import constants from bot.bot import Bot from bot.converters import Expiry -from bot.utils.checks import with_role_check +from bot.exts.moderation.infraction import _utils +from bot.exts.moderation.infraction._scheduler import InfractionScheduler +from bot.utils.messages import format_user from bot.utils.time import format_infraction -from . import utils -from .scheduler import InfractionScheduler log = logging.getLogger(__name__) NICKNAME_POLICY_URL = "https://pythondiscord.com/pages/rules/#nickname-policy" @@ -59,7 +60,7 @@ class Superstarify(InfractionScheduler, Cog): return # Nick change was triggered by this event. Ignore. log.info( - f"{after.display_name} is currently in superstar-prison. " + f"{after.display_name} ({after.id}) tried to escape superstar prison. " f"Changing the nick back to {before.display_name}." ) await after.edit( @@ -67,7 +68,7 @@ class Superstarify(InfractionScheduler, Cog): reason=f"Superstarified member tried to escape the prison: {infraction['id']}" ) - notified = await utils.notify_infraction( + notified = await _utils.notify_infraction( user=after, infr_type="Superstarify", expires_at=format_infraction(infraction["expires_at"]), @@ -76,11 +77,11 @@ class Superstarify(InfractionScheduler, Cog): f"from **{before.display_name}** to **{after.display_name}**, but as you " "are currently in superstar-prison, you do not have permission to do so." ), - icon_url=utils.INFRACTION_ICONS["superstar"][0] + icon_url=_utils.INFRACTION_ICONS["superstar"][0] ) if not notified: - log.warning("Failed to DM user about why they cannot change their nickname.") + log.info("Failed to DM user about why they cannot change their nickname.") @Cog.listener() async def on_member_join(self, member: Member) -> None: @@ -103,13 +104,14 @@ class Superstarify(InfractionScheduler, Cog): await self.reapply_infraction(infraction, action) - @command(name="superstarify", aliases=("force_nick", "star")) + @command(name="superstarify", aliases=("force_nick", "star", "starify")) async def superstarify( self, ctx: Context, member: Member, duration: Expiry, - reason: str = None + *, + reason: str = '', ) -> None: """ Temporarily force a random superstar name (like Taylor Swift) to be the user's nickname. @@ -126,76 +128,67 @@ class Superstarify(InfractionScheduler, Cog): Alternatively, an ISO 8601 timestamp can be provided for the duration. - An optional reason can be provided. If no reason is given, the original name will be shown - in a generated reason. + An optional reason can be provided, which would be added to a message stating their old nickname + and linking to the nickname policy. """ - if await utils.has_active_infraction(ctx, member, "superstar"): + if await _utils.get_active_infraction(ctx, member, "superstar"): return # Post the infraction to the API - reason = reason or f"old nick: {member.display_name}" - infraction = await utils.post_infraction(ctx, member, "superstar", reason, duration, active=True) + old_nick = member.display_name + infraction_reason = f'Old nickname: {old_nick}. {reason}' + infraction = await _utils.post_infraction(ctx, member, "superstar", infraction_reason, duration, active=True) id_ = infraction["id"] - old_nick = member.display_name forced_nick = self.get_nick(id_, member.id) expiry_str = format_infraction(infraction["expires_at"]) - # Apply the infraction and schedule the expiration task. - log.debug(f"Changing nickname of {member} to {forced_nick}.") - self.mod_log.ignore(constants.Event.member_update, member.id) - await member.edit(nick=forced_nick, reason=reason) - self.schedule_task(ctx.bot.loop, id_, infraction) - - # Send a DM to the user to notify them of their new infraction. - await utils.notify_infraction( - user=member, - infr_type="Superstarify", - expires_at=expiry_str, - icon_url=utils.INFRACTION_ICONS["superstar"][0], - reason=f"Your nickname didn't comply with our [nickname policy]({NICKNAME_POLICY_URL})." + # Apply the infraction + async def action() -> None: + log.debug(f"Changing nickname of {member} to {forced_nick}.") + self.mod_log.ignore(constants.Event.member_update, member.id) + await member.edit(nick=forced_nick, reason=reason) + + old_nick = escape_markdown(old_nick) + forced_nick = escape_markdown(forced_nick) + + nickname_info = textwrap.dedent(f""" + Old nickname: `{old_nick}` + New nickname: `{forced_nick}` + """).strip() + + user_message = ( + f"Your previous nickname, **{old_nick}**, " + f"was so bad that we have decided to change it. " + f"Your new nickname will be **{forced_nick}**.\n\n" + "{reason}" + f"You will be unable to change your nickname until **{expiry_str}**. " + "If you're confused by this, please read our " + f"[official nickname policy]({NICKNAME_POLICY_URL})." + ).format + + successful = await self.apply_infraction( + ctx, infraction, member, action(), + user_reason=user_message(reason=f'**Additional details:** {reason}\n\n' if reason else ''), + additional_info=nickname_info ) - # Send an embed with the infraction information to the invoking context. - log.trace(f"Sending superstar #{id_} embed.") - embed = Embed( - title="Congratulations!", - colour=constants.Colours.soft_orange, - description=( - f"Your previous nickname, **{old_nick}**, " - f"was so bad that we have decided to change it. " - f"Your new nickname will be **{forced_nick}**.\n\n" - f"You will be unable to change your nickname until **{expiry_str}**.\n\n" - "If you're confused by this, please read our " - f"[official nickname policy]({NICKNAME_POLICY_URL})." + # Send an embed with to the invoking context if superstar was successful. + if successful: + log.trace(f"Sending superstar #{id_} embed.") + embed = Embed( + title="Superstarified!", + colour=constants.Colours.soft_orange, + description=user_message(reason='') ) - ) - await ctx.send(embed=embed) - - # Log to the mod log channel. - log.trace(f"Sending apply mod log for superstar #{id_}.") - await self.mod_log.send_log_message( - icon_url=utils.INFRACTION_ICONS["superstar"][0], - colour=Colour.gold(), - title="Member achieved superstardom", - thumbnail=member.avatar_url_as(static_format="png"), - text=textwrap.dedent(f""" - Member: {member.mention} (`{member.id}`) - Actor: {ctx.message.author} - Reason: {reason} - Expires: {expiry_str} - Old nickname: `{old_nick}` - New nickname: `{forced_nick}` - """), - footer=f"ID {id_}" - ) + await ctx.send(embed=embed) - @command(name="unsuperstarify", aliases=("release_nick", "unstar")) + @command(name="unsuperstarify", aliases=("release_nick", "unstar", "unstarify")) async def unsuperstarify(self, ctx: Context, member: Member) -> None: """Remove the superstarify infraction and allow the user to change their nickname.""" await self.pardon_infraction(ctx, "superstar", member) - async def _pardon_action(self, infraction: utils.Infraction) -> t.Optional[t.Dict[str, str]]: + async def _pardon_action(self, infraction: _utils.Infraction) -> t.Optional[t.Dict[str, str]]: """Pardon a superstar infraction and return a log dict.""" if infraction["type"] != "superstar": return @@ -212,15 +205,15 @@ class Superstarify(InfractionScheduler, Cog): return {} # DM the user about the expiration. - notified = await utils.notify_pardon( + notified = await _utils.notify_pardon( user=user, title="You are no longer superstarified", content="You may now change your nickname on the server.", - icon_url=utils.INFRACTION_ICONS["superstar"][1] + icon_url=_utils.INFRACTION_ICONS["superstar"][1] ) return { - "Member": f"{user.mention}(`{user.id}`)", + "Member": format_user(user), "DM": "Sent" if notified else "**Failed**" } @@ -233,6 +226,11 @@ class Superstarify(InfractionScheduler, Cog): return rng.choice(STAR_NAMES) # This cannot be static (must have a __func__ attribute). - def cog_check(self, ctx: Context) -> bool: + async def cog_check(self, ctx: Context) -> bool: """Only allow moderators to invoke the commands in this cog.""" - return with_role_check(ctx, *constants.MODERATION_ROLES) + return await has_any_role(*constants.MODERATION_ROLES).predicate(ctx) + + +def setup(bot: Bot) -> None: + """Load the Superstarify cog.""" + bot.add_cog(Superstarify(bot)) diff --git a/bot/cogs/moderation/modlog.py b/bot/exts/moderation/modlog.py index e8ae0dbe6..b01de0ee3 100644 --- a/bot/cogs/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -14,7 +14,8 @@ from discord.abc import GuildChannel from discord.ext.commands import Cog, Context from bot.bot import Bot -from bot.constants import Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, URLs +from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, URLs +from bot.utils.messages import format_user from bot.utils.time import humanize_delta log = logging.getLogger(__name__) @@ -23,7 +24,6 @@ GUILD_CHANNEL = t.Union[discord.CategoryChannel, discord.TextChannel, discord.Vo CHANNEL_CHANGES_UNSUPPORTED = ("permissions",) CHANNEL_CHANGES_SUPPRESSED = ("_overwrites", "position") -MEMBER_CHANGES_SUPPRESSED = ("status", "activities", "_client_status", "nick") ROLE_CHANGES_UNSUPPORTED = ("colour", "permissions") VOICE_STATE_ATTRIBUTES = { @@ -63,11 +63,11 @@ class ModLog(Cog, name="ModLog"): 'id': message.id, 'author': message.author.id, 'channel_id': message.channel.id, - 'content': message.content, + 'content': message.content.replace("\0", ""), # Null chars cause 400. 'embeds': [embed.to_dict() for embed in message.embeds], 'attachments': attachment, } - for message, attachment in zip_longest(messages, attachments) + for message, attachment in zip_longest(messages, attachments, fillvalue=[]) ] } ) @@ -87,7 +87,7 @@ class ModLog(Cog, name="ModLog"): title: t.Optional[str], text: str, thumbnail: t.Optional[t.Union[str, discord.Asset]] = None, - channel_id: int = Channels.modlog, + channel_id: int = Channels.mod_log, ping_everyone: bool = False, files: t.Optional[t.List[discord.File]] = None, content: t.Optional[str] = None, @@ -97,7 +97,10 @@ class ModLog(Cog, name="ModLog"): footer: t.Optional[str] = None, ) -> Context: """Generate log embed and send to logging channel.""" - embed = discord.Embed(description=text) + # Truncate string directly here to avoid removing newlines + embed = discord.Embed( + description=text[:2045] + "..." if len(text) > 2048 else text + ) if title and icon_url: embed.set_author(name=title, icon_url=icon_url) @@ -117,8 +120,17 @@ class ModLog(Cog, name="ModLog"): else: content = "@everyone" + # Truncate content to 2000 characters and append an ellipsis. + if content and len(content) > 2000: + content = content[:2000 - 3] + "..." + channel = self.bot.get_channel(channel_id) - log_message = await channel.send(content=content, embed=embed, files=files) + log_message = await channel.send( + content=content, + embed=embed, + files=files, + allowed_mentions=discord.AllowedMentions(everyone=True) + ) if additional_embeds: if additional_embeds_msg: @@ -187,6 +199,12 @@ class ModLog(Cog, name="ModLog"): self._ignored[Event.guild_channel_update].remove(before.id) return + # Two channel updates are sent for a single edit: 1 for topic and 1 for category change. + # TODO: remove once support is added for ignoring multiple occurrences for the same channel. + help_categories = (Categories.help_available, Categories.help_dormant, Categories.help_in_use) + if after.category and after.category.id in help_categories: + return + diff = DeepDiff(before, after) changes = [] done = [] @@ -215,7 +233,10 @@ class ModLog(Cog, name="ModLog"): new = value["new_value"] old = value["old_value"] - changes.append(f"**{key.title()}:** `{old}` **→** `{new}`") + # Discord does not treat consecutive backticks ("``") as an empty inline code block, so the markdown + # formatting is broken when `new` and/or `old` are empty values. "None" is used for these cases so + # formatting is preserved. + changes.append(f"**{key.title()}:** `{old or 'None'}` **→** `{new or 'None'}`") done.append(key) @@ -375,9 +396,9 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.user_ban, Colours.soft_red, - "User banned", f"{member} (`{member.id}`)", + "User banned", format_user(member), thumbnail=member.avatar_url_as(static_format="png"), - channel_id=Channels.userlog + channel_id=Channels.user_log ) @Cog.listener() @@ -386,11 +407,10 @@ class ModLog(Cog, name="ModLog"): if member.guild.id != GuildConstant.id: return - message = f"{member} (`{member.id}`)" now = datetime.utcnow() difference = abs(relativedelta(now, member.created_at)) - message += "\n\n**Account age:** " + humanize_delta(difference) + message = format_user(member) + "\n\n**Account age:** " + humanize_delta(difference) if difference.days < 1 and difference.months < 1 and difference.years < 1: # New user account! message = f"{Emojis.new} {message}" @@ -399,7 +419,7 @@ class ModLog(Cog, name="ModLog"): Icons.sign_in, Colours.soft_green, "User joined", message, thumbnail=member.avatar_url_as(static_format="png"), - channel_id=Channels.userlog + channel_id=Channels.user_log ) @Cog.listener() @@ -414,9 +434,9 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.sign_out, Colours.soft_red, - "User left", f"{member} (`{member.id}`)", + "User left", format_user(member), thumbnail=member.avatar_url_as(static_format="png"), - channel_id=Channels.userlog + channel_id=Channels.user_log ) @Cog.listener() @@ -431,11 +451,26 @@ class ModLog(Cog, name="ModLog"): await self.send_log_message( Icons.user_unban, Colour.blurple(), - "User unbanned", f"{member} (`{member.id}`)", + "User unbanned", format_user(member), thumbnail=member.avatar_url_as(static_format="png"), - channel_id=Channels.modlog + channel_id=Channels.mod_log ) + @staticmethod + def get_role_diff(before: t.List[discord.Role], after: t.List[discord.Role]) -> t.List[str]: + """Return a list of strings describing the roles added and removed.""" + changes = [] + before_roles = set(before) + after_roles = set(after) + + for role in (before_roles - after_roles): + changes.append(f"**Role removed:** {role.name} (`{role.id}`)") + + for role in (after_roles - before_roles): + changes.append(f"**Role added:** {role.name} (`{role.id}`)") + + return changes + @Cog.listener() async def on_member_update(self, before: discord.Member, after: discord.Member) -> None: """Log member update event to user log.""" @@ -446,74 +481,27 @@ class ModLog(Cog, name="ModLog"): self._ignored[Event.member_update].remove(before.id) return - diff = DeepDiff(before, after) - changes = [] - done = [] + changes = self.get_role_diff(before.roles, after.roles) - diff_values = {} - - diff_values.update(diff.get("values_changed", {})) - diff_values.update(diff.get("type_changes", {})) - diff_values.update(diff.get("iterable_item_removed", {})) - diff_values.update(diff.get("iterable_item_added", {})) - - diff_user = DeepDiff(before._user, after._user) - - diff_values.update(diff_user.get("values_changed", {})) - diff_values.update(diff_user.get("type_changes", {})) - diff_values.update(diff_user.get("iterable_item_removed", {})) - diff_values.update(diff_user.get("iterable_item_added", {})) - - for key, value in diff_values.items(): - if not key: # Not sure why, but it happens - continue - - key = key[5:] # Remove "root." prefix - - if "[" in key: - key = key.split("[", 1)[0] + # The regex is a simple way to exclude all sequence and mapping types. + diff = DeepDiff(before, after, exclude_regex_paths=r".*\[.*") - if "." in key: - key = key.split(".", 1)[0] + # A type change seems to always take precedent over a value change. Furthermore, it will + # include the value change along with the type change anyway. Therefore, it's OK to + # "overwrite" values_changed; in practice there will never even be anything to overwrite. + diff_values = {**diff.get("values_changed", {}), **diff.get("type_changes", {})} - if key in done or key in MEMBER_CHANGES_SUPPRESSED: + for attr, value in diff_values.items(): + if not attr: # Not sure why, but it happens. continue - if key == "_roles": - new_roles = after.roles - old_roles = before.roles - - for role in old_roles: - if role not in new_roles: - changes.append(f"**Role removed:** {role.name} (`{role.id}`)") - - for role in new_roles: - if role not in old_roles: - changes.append(f"**Role added:** {role.name} (`{role.id}`)") - - else: - new = value.get("new_value") - old = value.get("old_value") - - if new and old: - changes.append(f"**{key.title()}:** `{old}` **→** `{new}`") - - done.append(key) + attr = attr[5:] # Remove "root." prefix. + attr = attr.replace("_", " ").replace(".", " ").capitalize() - if before.name != after.name: - changes.append( - f"**Username:** `{before.name}` **→** `{after.name}`" - ) + new = value.get("new_value") + old = value.get("old_value") - if before.discriminator != after.discriminator: - changes.append( - f"**Discriminator:** `{before.discriminator}` **→** `{after.discriminator}`" - ) - - if before.display_name != after.display_name: - changes.append( - f"**Display name:** `{before.display_name}` **→** `{after.display_name}`" - ) + changes.append(f"**{attr}:** `{old}` **→** `{new}`") if not changes: return @@ -523,13 +511,15 @@ class ModLog(Cog, name="ModLog"): for item in sorted(changes): message += f"{Emojis.bullet} {item}\n" - message = f"**{after}** (`{after.id}`)\n{message}" + message = f"{format_user(after)}\n{message}" await self.send_log_message( - Icons.user_update, Colour.blurple(), - "Member updated", message, + icon_url=Icons.user_update, + colour=Colour.blurple(), + title="Member updated", + text=message, thumbnail=after.avatar_url_as(static_format="png"), - channel_id=Channels.userlog + channel_id=Channels.user_log ) @Cog.listener() @@ -538,7 +528,11 @@ class ModLog(Cog, name="ModLog"): channel = message.channel author = message.author - if message.guild.id != GuildConstant.id or channel.id in GuildConstant.ignored: + # Ignore DMs. + if not message.guild: + return + + if message.guild.id != GuildConstant.id or channel.id in GuildConstant.modlog_blacklist: return self._cached_deletes.append(message.id) @@ -552,14 +546,14 @@ class ModLog(Cog, name="ModLog"): if channel.category: response = ( - f"**Author:** {author} (`{author.id}`)\n" + f"**Author:** {format_user(author)}\n" f"**Channel:** {channel.category}/#{channel.name} (`{channel.id}`)\n" f"**Message ID:** `{message.id}`\n" "\n" ) else: response = ( - f"**Author:** {author} (`{author.id}`)\n" + f"**Author:** {format_user(author)}\n" f"**Channel:** #{channel.name} (`{channel.id}`)\n" f"**Message ID:** `{message.id}`\n" "\n" @@ -591,7 +585,7 @@ class ModLog(Cog, name="ModLog"): @Cog.listener() async def on_raw_message_delete(self, event: discord.RawMessageDeleteEvent) -> None: """Log raw message delete event to message change log.""" - if event.guild_id != GuildConstant.id or event.channel_id in GuildConstant.ignored: + if event.guild_id != GuildConstant.id or event.channel_id in GuildConstant.modlog_blacklist: return await asyncio.sleep(1) # Wait here in case the normal event was fired @@ -635,7 +629,7 @@ class ModLog(Cog, name="ModLog"): if ( not msg_before.guild or msg_before.guild.id != GuildConstant.id - or msg_before.channel.id in GuildConstant.ignored + or msg_before.channel.id in GuildConstant.modlog_blacklist or msg_before.author.bot ): return @@ -645,7 +639,6 @@ class ModLog(Cog, name="ModLog"): if msg_before.content == msg_after.content: return - author = msg_before.author channel = msg_before.channel channel_name = f"{channel.category}/#{channel.name}" if channel.category else f"#{channel.name}" @@ -677,7 +670,7 @@ class ModLog(Cog, name="ModLog"): content_after.append(sub) response = ( - f"**Author:** {author} (`{author.id}`)\n" + f"**Author:** {format_user(msg_before.author)}\n" f"**Channel:** {channel_name} (`{channel.id}`)\n" f"**Message ID:** `{msg_before.id}`\n" "\n" @@ -717,7 +710,7 @@ class ModLog(Cog, name="ModLog"): if ( not message.guild or message.guild.id != GuildConstant.id - or message.channel.id in GuildConstant.ignored + or message.channel.id in GuildConstant.modlog_blacklist or message.author.bot ): return @@ -729,12 +722,11 @@ class ModLog(Cog, name="ModLog"): self._cached_edits.remove(event.message_id) return - author = message.author channel = message.channel channel_name = f"{channel.category}/#{channel.name}" if channel.category else f"#{channel.name}" before_response = ( - f"**Author:** {author} (`{author.id}`)\n" + f"**Author:** {format_user(message.author)}\n" f"**Channel:** {channel_name} (`{channel.id}`)\n" f"**Message ID:** `{message.id}`\n" "\n" @@ -742,7 +734,7 @@ class ModLog(Cog, name="ModLog"): ) after_response = ( - f"**Author:** {author} (`{author.id}`)\n" + f"**Author:** {format_user(message.author)}\n" f"**Channel:** {channel_name} (`{channel.id}`)\n" f"**Message ID:** `{message.id}`\n" "\n" @@ -769,7 +761,7 @@ class ModLog(Cog, name="ModLog"): """Log member voice state changes to the voice log channel.""" if ( member.guild.id != GuildConstant.id - or (before.channel and before.channel.id in GuildConstant.ignored) + or (before.channel and before.channel.id in GuildConstant.modlog_blacklist) ): return @@ -821,7 +813,7 @@ class ModLog(Cog, name="ModLog"): return message = "\n".join(f"{Emojis.bullet} {item}" for item in sorted(changes)) - message = f"**{member}** (`{member.id}`)\n{message}" + message = f"{format_user(member)}\n{message}" await self.send_log_message( icon_url=icon, @@ -831,3 +823,8 @@ class ModLog(Cog, name="ModLog"): thumbnail=member.avatar_url_as(static_format="png"), channel_id=Channels.voice_log ) + + +def setup(bot: Bot) -> None: + """Load the ModLog cog.""" + bot.add_cog(ModLog(bot)) diff --git a/bot/exts/moderation/silence.py b/bot/exts/moderation/silence.py new file mode 100644 index 000000000..a942d5294 --- /dev/null +++ b/bot/exts/moderation/silence.py @@ -0,0 +1,255 @@ +import json +import logging +from contextlib import suppress +from datetime import datetime, timedelta, timezone +from operator import attrgetter +from typing import Optional + +from async_rediscache import RedisCache +from discord import TextChannel +from discord.ext import commands, tasks +from discord.ext.commands import Context + +from bot.bot import Bot +from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, Roles +from bot.converters import HushDurationConverter +from bot.utils.lock import LockedResourceError, lock_arg +from bot.utils.scheduling import Scheduler + +log = logging.getLogger(__name__) + +LOCK_NAMESPACE = "silence" + +MSG_SILENCE_FAIL = f"{Emojis.cross_mark} current channel is already silenced." +MSG_SILENCE_PERMANENT = f"{Emojis.check_mark} silenced current channel indefinitely." +MSG_SILENCE_SUCCESS = f"{Emojis.check_mark} silenced current channel for {{duration}} minute(s)." + +MSG_UNSILENCE_FAIL = f"{Emojis.cross_mark} current channel was not silenced." +MSG_UNSILENCE_MANUAL = ( + f"{Emojis.cross_mark} current channel was not unsilenced because the current overwrites were " + f"set manually or the cache was prematurely cleared. " + f"Please edit the overwrites manually to unsilence." +) +MSG_UNSILENCE_SUCCESS = f"{Emojis.check_mark} unsilenced current channel." + + +class SilenceNotifier(tasks.Loop): + """Loop notifier for posting notices to `alert_channel` containing added channels.""" + + def __init__(self, alert_channel: TextChannel): + super().__init__(self._notifier, seconds=1, minutes=0, hours=0, count=None, reconnect=True, loop=None) + self._silenced_channels = {} + self._alert_channel = alert_channel + + def add_channel(self, channel: TextChannel) -> None: + """Add channel to `_silenced_channels` and start loop if not launched.""" + if not self._silenced_channels: + self.start() + log.info("Starting notifier loop.") + self._silenced_channels[channel] = self._current_loop + + def remove_channel(self, channel: TextChannel) -> None: + """Remove channel from `_silenced_channels` and stop loop if no channels remain.""" + with suppress(KeyError): + del self._silenced_channels[channel] + if not self._silenced_channels: + self.stop() + log.info("Stopping notifier loop.") + + async def _notifier(self) -> None: + """Post notice of `_silenced_channels` with their silenced duration to `_alert_channel` periodically.""" + # Wait for 15 minutes between notices with pause at start of loop. + if self._current_loop and not self._current_loop/60 % 15: + log.debug( + f"Sending notice with channels: " + f"{', '.join(f'#{channel} ({channel.id})' for channel in self._silenced_channels)}." + ) + channels_text = ', '.join( + f"{channel.mention} for {(self._current_loop-start)//60} min" + for channel, start in self._silenced_channels.items() + ) + await self._alert_channel.send(f"<@&{Roles.moderators}> currently silenced channels: {channels_text}") + + +class Silence(commands.Cog): + """Commands for stopping channel messages for `verified` role in a channel.""" + + # Maps muted channel IDs to their previous overwrites for send_message and add_reactions. + # Overwrites are stored as JSON. + previous_overwrites = RedisCache() + + # Maps muted channel IDs to POSIX timestamps of when they'll be unsilenced. + # A timestamp equal to -1 means it's indefinite. + unsilence_timestamps = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self.scheduler = Scheduler(self.__class__.__name__) + + self._init_task = self.bot.loop.create_task(self._async_init()) + + async def _async_init(self) -> None: + """Set instance attributes once the guild is available and reschedule unsilences.""" + await self.bot.wait_until_guild_available() + + guild = self.bot.get_guild(Guild.id) + self._everyone_role = guild.default_role + self._mod_alerts_channel = self.bot.get_channel(Channels.mod_alerts) + self.notifier = SilenceNotifier(self.bot.get_channel(Channels.mod_log)) + await self._reschedule() + + @commands.command(aliases=("hush",)) + @lock_arg(LOCK_NAMESPACE, "ctx", attrgetter("channel"), raise_error=True) + async def silence(self, ctx: Context, duration: HushDurationConverter = 10) -> None: + """ + Silence the current channel for `duration` minutes or `forever`. + + Duration is capped at 15 minutes, passing forever makes the silence indefinite. + Indefinitely silenced channels get added to a notifier which posts notices every 15 minutes from the start. + """ + await self._init_task + + channel_info = f"#{ctx.channel} ({ctx.channel.id})" + log.debug(f"{ctx.author} is silencing channel {channel_info}.") + + if not await self._set_silence_overwrites(ctx.channel): + log.info(f"Tried to silence channel {channel_info} but the channel was already silenced.") + await ctx.send(MSG_SILENCE_FAIL) + return + + await self._schedule_unsilence(ctx, duration) + + if duration is None: + self.notifier.add_channel(ctx.channel) + log.info(f"Silenced {channel_info} indefinitely.") + await ctx.send(MSG_SILENCE_PERMANENT) + else: + log.info(f"Silenced {channel_info} for {duration} minute(s).") + await ctx.send(MSG_SILENCE_SUCCESS.format(duration=duration)) + + @commands.command(aliases=("unhush",)) + async def unsilence(self, ctx: Context) -> None: + """ + Unsilence the current channel. + + If the channel was silenced indefinitely, notifications for the channel will stop. + """ + await self._init_task + log.debug(f"Unsilencing channel #{ctx.channel} from {ctx.author}'s command.") + await self._unsilence_wrapper(ctx.channel) + + @lock_arg(LOCK_NAMESPACE, "channel", raise_error=True) + async def _unsilence_wrapper(self, channel: TextChannel) -> None: + """Unsilence `channel` and send a success/failure message.""" + if not await self._unsilence(channel): + overwrite = channel.overwrites_for(self._everyone_role) + if overwrite.send_messages is False or overwrite.add_reactions is False: + await channel.send(MSG_UNSILENCE_MANUAL) + else: + await channel.send(MSG_UNSILENCE_FAIL) + else: + await channel.send(MSG_UNSILENCE_SUCCESS) + + async def _set_silence_overwrites(self, channel: TextChannel) -> bool: + """Set silence permission overwrites for `channel` and return True if successful.""" + overwrite = channel.overwrites_for(self._everyone_role) + prev_overwrites = dict(send_messages=overwrite.send_messages, add_reactions=overwrite.add_reactions) + + if channel.id in self.scheduler or all(val is False for val in prev_overwrites.values()): + return False + + overwrite.update(send_messages=False, add_reactions=False) + await channel.set_permissions(self._everyone_role, overwrite=overwrite) + await self.previous_overwrites.set(channel.id, json.dumps(prev_overwrites)) + + return True + + async def _schedule_unsilence(self, ctx: Context, duration: Optional[int]) -> None: + """Schedule `ctx.channel` to be unsilenced if `duration` is not None.""" + if duration is None: + await self.unsilence_timestamps.set(ctx.channel.id, -1) + else: + self.scheduler.schedule_later(duration * 60, ctx.channel.id, ctx.invoke(self.unsilence)) + unsilence_time = datetime.now(tz=timezone.utc) + timedelta(minutes=duration) + await self.unsilence_timestamps.set(ctx.channel.id, unsilence_time.timestamp()) + + async def _unsilence(self, channel: TextChannel) -> bool: + """ + Unsilence `channel`. + + If `channel` has a silence task scheduled or has its previous overwrites cached, unsilence + it, cancel the task, and remove it from the notifier. Notify admins if it has a task but + not cached overwrites. + + Return `True` if channel permissions were changed, `False` otherwise. + """ + prev_overwrites = await self.previous_overwrites.get(channel.id) + if channel.id not in self.scheduler and prev_overwrites is None: + log.info(f"Tried to unsilence channel #{channel} ({channel.id}) but the channel was not silenced.") + return False + + overwrite = channel.overwrites_for(self._everyone_role) + if prev_overwrites is None: + log.info(f"Missing previous overwrites for #{channel} ({channel.id}); defaulting to None.") + overwrite.update(send_messages=None, add_reactions=None) + else: + overwrite.update(**json.loads(prev_overwrites)) + + await channel.set_permissions(self._everyone_role, overwrite=overwrite) + log.info(f"Unsilenced channel #{channel} ({channel.id}).") + + self.scheduler.cancel(channel.id) + self.notifier.remove_channel(channel) + await self.previous_overwrites.delete(channel.id) + await self.unsilence_timestamps.delete(channel.id) + + if prev_overwrites is None: + await self._mod_alerts_channel.send( + f"<@&{Roles.admins}> Restored overwrites with default values after unsilencing " + f"{channel.mention}. Please check that the `Send Messages` and `Add Reactions` " + f"overwrites for {self._everyone_role.mention} are at their desired values." + ) + + return True + + async def _reschedule(self) -> None: + """Reschedule unsilencing of active silences and add permanent ones to the notifier.""" + for channel_id, timestamp in await self.unsilence_timestamps.items(): + channel = self.bot.get_channel(channel_id) + if channel is None: + log.info(f"Can't reschedule silence for {channel_id}: channel not found.") + continue + + if timestamp == -1: + log.info(f"Adding permanent silence for #{channel} ({channel.id}) to the notifier.") + self.notifier.add_channel(channel) + continue + + dt = datetime.fromtimestamp(timestamp, tz=timezone.utc) + delta = (dt - datetime.now(tz=timezone.utc)).total_seconds() + if delta <= 0: + # Suppress the error since it's not being invoked by a user via the command. + with suppress(LockedResourceError): + await self._unsilence_wrapper(channel) + else: + log.info(f"Rescheduling silence for #{channel} ({channel.id}).") + self.scheduler.schedule_later(delta, channel_id, self._unsilence_wrapper(channel)) + + def cog_unload(self) -> None: + """Cancel the init task and scheduled tasks.""" + # It's important to wait for _init_task (specifically for _reschedule) to be cancelled + # before cancelling scheduled tasks. Otherwise, it's possible for _reschedule to schedule + # more tasks after cancel_all has finished, despite _init_task.cancel being called first. + # This is cause cancel() on its own doesn't block until the task is cancelled. + self._init_task.cancel() + self._init_task.add_done_callback(lambda _: self.scheduler.cancel_all()) + + # This cannot be static (must have a __func__ attribute). + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators to invoke the commands in this cog.""" + return await commands.has_any_role(*MODERATION_ROLES).predicate(ctx) + + +def setup(bot: Bot) -> None: + """Load the Silence cog.""" + bot.add_cog(Silence(bot)) diff --git a/bot/exts/moderation/slowmode.py b/bot/exts/moderation/slowmode.py new file mode 100644 index 000000000..efd862aa5 --- /dev/null +++ b/bot/exts/moderation/slowmode.py @@ -0,0 +1,96 @@ +import logging +from datetime import datetime +from typing import Optional + +from dateutil.relativedelta import relativedelta +from discord import TextChannel +from discord.ext.commands import Cog, Context, group, has_any_role + +from bot.bot import Bot +from bot.constants import Emojis, MODERATION_ROLES +from bot.converters import DurationDelta +from bot.utils import time + +log = logging.getLogger(__name__) + +SLOWMODE_MAX_DELAY = 21600 # seconds + + +class Slowmode(Cog): + """Commands for getting and setting slowmode delays of text channels.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + @group(name='slowmode', aliases=['sm'], invoke_without_command=True) + async def slowmode_group(self, ctx: Context) -> None: + """Get or set the slowmode delay for the text channel this was invoked in or a given text channel.""" + await ctx.send_help(ctx.command) + + @slowmode_group.command(name='get', aliases=['g']) + async def get_slowmode(self, ctx: Context, channel: Optional[TextChannel]) -> None: + """Get the slowmode delay for a text channel.""" + # Use the channel this command was invoked in if one was not given + if channel is None: + channel = ctx.channel + + delay = relativedelta(seconds=channel.slowmode_delay) + humanized_delay = time.humanize_delta(delay) + + await ctx.send(f'The slowmode delay for {channel.mention} is {humanized_delay}.') + + @slowmode_group.command(name='set', aliases=['s']) + async def set_slowmode(self, ctx: Context, channel: Optional[TextChannel], delay: DurationDelta) -> None: + """Set the slowmode delay for a text channel.""" + # Use the channel this command was invoked in if one was not given + if channel is None: + channel = ctx.channel + + # Convert `dateutil.relativedelta.relativedelta` to `datetime.timedelta` + # Must do this to get the delta in a particular unit of time + utcnow = datetime.utcnow() + slowmode_delay = (utcnow + delay - utcnow).total_seconds() + + humanized_delay = time.humanize_delta(delay) + + # Ensure the delay is within discord's limits + if slowmode_delay <= SLOWMODE_MAX_DELAY: + log.info(f'{ctx.author} set the slowmode delay for #{channel} to {humanized_delay}.') + + await channel.edit(slowmode_delay=slowmode_delay) + await ctx.send( + f'{Emojis.check_mark} The slowmode delay for {channel.mention} is now {humanized_delay}.' + ) + + else: + log.info( + f'{ctx.author} tried to set the slowmode delay of #{channel} to {humanized_delay}, ' + 'which is not between 0 and 6 hours.' + ) + + await ctx.send( + f'{Emojis.cross_mark} The slowmode delay must be between 0 and 6 hours.' + ) + + @slowmode_group.command(name='reset', aliases=['r']) + async def reset_slowmode(self, ctx: Context, channel: Optional[TextChannel]) -> None: + """Reset the slowmode delay for a text channel to 0 seconds.""" + # Use the channel this command was invoked in if one was not given + if channel is None: + channel = ctx.channel + + log.info(f'{ctx.author} reset the slowmode delay for #{channel} to 0 seconds.') + + await channel.edit(slowmode_delay=0) + await ctx.send( + f'{Emojis.check_mark} The slowmode delay for {channel.mention} has been reset to 0 seconds.' + ) + + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators to invoke the commands in this cog.""" + return await has_any_role(*MODERATION_ROLES).predicate(ctx) + + +def setup(bot: Bot) -> None: + """Load the Slowmode cog.""" + bot.add_cog(Slowmode(bot)) diff --git a/bot/exts/moderation/verification.py b/bot/exts/moderation/verification.py new file mode 100644 index 000000000..ce91dcb15 --- /dev/null +++ b/bot/exts/moderation/verification.py @@ -0,0 +1,858 @@ +import asyncio +import logging +import typing as t +from contextlib import suppress +from datetime import datetime, timedelta + +import discord +from async_rediscache import RedisCache +from discord.ext import tasks +from discord.ext.commands import Cog, Context, command, group, has_any_role +from discord.utils import snowflake_time + +from bot import constants +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.decorators import has_no_roles, in_whitelist +from bot.exts.moderation.modlog import ModLog +from bot.utils.checks import InWhitelistCheckFailure, has_no_roles_check +from bot.utils.messages import format_user + +log = logging.getLogger(__name__) + +# Sent via DMs once user joins the guild +ON_JOIN_MESSAGE = f""" +Welcome to Python Discord! + +To show you what kind of community we are, we've created this video: +https://youtu.be/ZH26PuX3re0 + +As a new user, you have read-only access to a few select channels to give you a taste of what our server is like. \ +In order to see the rest of the channels and to send messages, you first have to accept our rules. + +Please visit <#{constants.Channels.verification}> to get started. Thank you! +""" + +# Sent via DMs once user verifies +VERIFIED_MESSAGE = f""" +Thanks for verifying yourself! + +For your records, these are the documents you accepted: + +`1)` Our rules, here: <https://pythondiscord.com/pages/rules> +`2)` Our privacy policy, here: <https://pythondiscord.com/pages/privacy> - you can find information on how to have \ +your information removed here as well. + +Feel free to review them at any point! + +Additionally, if you'd like to receive notifications for the announcements \ +we post in <#{constants.Channels.announcements}> +from time to time, you can send `!subscribe` to <#{constants.Channels.bot_commands}> at any time \ +to assign yourself the **Announcements** role. We'll mention this role every time we make an announcement. + +If you'd like to unsubscribe from the announcement notifications, simply send `!unsubscribe` to \ +<#{constants.Channels.bot_commands}>. +""" + +ALTERNATE_VERIFIED_MESSAGE = f""" +You are now verified! + +You can find a copy of our rules for reference at <https://pythondiscord.com/pages/rules>. + +Additionally, if you'd like to receive notifications for the announcements \ +we post in <#{constants.Channels.announcements}> +from time to time, you can send `!subscribe` to <#{constants.Channels.bot_commands}> at any time \ +to assign yourself the **Announcements** role. We'll mention this role every time we make an announcement. + +If you'd like to unsubscribe from the announcement notifications, simply send `!unsubscribe` to \ +<#{constants.Channels.bot_commands}>. + +To introduce you to our community, we've made the following video: +https://youtu.be/ZH26PuX3re0 +""" + +# Sent via DMs to users kicked for failing to verify +KICKED_MESSAGE = f""" +Hi! You have been automatically kicked from Python Discord as you have failed to accept our rules \ +within `{constants.Verification.kicked_after}` days. If this was an accident, please feel free to join us again! + +{constants.Guild.invite} +""" + +# Sent periodically in the verification channel +REMINDER_MESSAGE = f""" +<@&{constants.Roles.unverified}> + +Welcome to Python Discord! Please read the documents mentioned above and type `!accept` to gain permissions \ +to send messages in the community! + +You will be kicked if you don't verify within `{constants.Verification.kicked_after}` days. +""".strip() + +# An async function taking a Member param +Request = t.Callable[[discord.Member], t.Awaitable] + + +class StopExecution(Exception): + """Signals that a task should halt immediately & alert admins.""" + + def __init__(self, reason: discord.HTTPException) -> None: + super().__init__() + self.reason = reason + + +class Limit(t.NamedTuple): + """Composition over config for throttling requests.""" + + batch_size: int # Amount of requests after which to pause + sleep_secs: int # Sleep this many seconds after each batch + + +def mention_role(role_id: int) -> discord.AllowedMentions: + """Construct an allowed mentions instance that allows pinging `role_id`.""" + return discord.AllowedMentions(roles=[discord.Object(role_id)]) + + +def is_verified(member: discord.Member) -> bool: + """ + Check whether `member` is considered verified. + + Members are considered verified if they have at least 1 role other than + the default role (@everyone) and the @Unverified role. + """ + unverified_roles = { + member.guild.get_role(constants.Roles.unverified), + member.guild.default_role, + } + return len(set(member.roles) - unverified_roles) > 0 + + +async def safe_dm(coro: t.Coroutine) -> None: + """ + Execute `coro` ignoring disabled DM warnings. + + The 50_0007 error code indicates that the target user does not accept DMs. + As it turns out, this error code can appear on both 400 and 403 statuses, + we therefore catch any Discord exception. + + If the request fails on any other error code, the exception propagates, + and must be handled by the caller. + """ + try: + await coro + except discord.HTTPException as discord_exc: + log.trace(f"DM dispatch failed on status {discord_exc.status} with code: {discord_exc.code}") + if discord_exc.code != 50_007: # If any reason other than disabled DMs + raise + + +class Verification(Cog): + """ + User verification and role management. + + There are two internal tasks in this cog: + + * `update_unverified_members` + * Unverified members are given the @Unverified role after configured `unverified_after` days + * Unverified members are kicked after configured `kicked_after` days + * `ping_unverified` + * Periodically ping the @Unverified role in the verification channel + + Statistics are collected in the 'verification.' namespace. + + Moderators+ can use the `verification` command group to start or stop both internal + tasks, if necessary. Settings are persisted in Redis across sessions. + + Additionally, this cog offers the !accept, !subscribe and !unsubscribe commands, + and keeps the verification channel clean by deleting messages. + """ + + # Persist task settings & last sent `REMINDER_MESSAGE` id + # RedisCache[ + # "tasks_running": int (0 or 1), + # "last_reminder": int (discord.Message.id), + # ] + task_cache = RedisCache() + + def __init__(self, bot: Bot) -> None: + """Start internal tasks.""" + self.bot = bot + self.bot.loop.create_task(self._maybe_start_tasks()) + + self.pending_members = set() + + def cog_unload(self) -> None: + """ + Cancel internal tasks. + + This is necessary, as tasks are not automatically cancelled on cog unload. + """ + self._stop_tasks(gracefully=False) + + @property + def mod_log(self) -> ModLog: + """Get currently loaded ModLog cog instance.""" + return self.bot.get_cog("ModLog") + + async def _maybe_start_tasks(self) -> None: + """ + Poll Redis to check whether internal tasks should start. + + Redis must be interfaced with from an async function. + """ + log.trace("Checking whether background tasks should begin") + setting: t.Optional[int] = await self.task_cache.get("tasks_running") # This can be None if never set + + if setting: + log.trace("Background tasks will be started") + self.update_unverified_members.start() + self.ping_unverified.start() + + def _stop_tasks(self, *, gracefully: bool) -> None: + """ + Stop the update users & ping @Unverified tasks. + + If `gracefully` is True, the tasks will be able to finish their current iteration. + Otherwise, they are cancelled immediately. + """ + log.info(f"Stopping internal tasks ({gracefully=})") + if gracefully: + self.update_unverified_members.stop() + self.ping_unverified.stop() + else: + self.update_unverified_members.cancel() + self.ping_unverified.cancel() + + # region: automatically update unverified users + + async def _verify_kick(self, n_members: int) -> bool: + """ + Determine whether `n_members` is a reasonable amount of members to kick. + + First, `n_members` is checked against the size of the PyDis guild. If `n_members` are + more than the configured `kick_confirmation_threshold` of the guild, the operation + must be confirmed by staff in #core-dev. Otherwise, the operation is seen as safe. + """ + log.debug(f"Checking whether {n_members} members are safe to kick") + + await self.bot.wait_until_guild_available() # Ensure cache is populated before we grab the guild + pydis = self.bot.get_guild(constants.Guild.id) + + percentage = n_members / len(pydis.members) + if percentage < constants.Verification.kick_confirmation_threshold: + log.debug(f"Kicking {percentage:.2%} of the guild's population is seen as safe") + return True + + # Since `n_members` is a suspiciously large number, we will ask for confirmation + log.debug("Amount of users is too large, requesting staff confirmation") + + core_dev_channel = pydis.get_channel(constants.Channels.dev_core) + core_dev_ping = f"<@&{constants.Roles.core_developers}>" + + confirmation_msg = await core_dev_channel.send( + f"{core_dev_ping} Verification determined that `{n_members}` members should be kicked as they haven't " + f"verified in `{constants.Verification.kicked_after}` days. This is `{percentage:.2%}` of the guild's " + f"population. Proceed?", + allowed_mentions=mention_role(constants.Roles.core_developers), + ) + + options = (constants.Emojis.incident_actioned, constants.Emojis.incident_unactioned) + for option in options: + await confirmation_msg.add_reaction(option) + + core_dev_ids = [member.id for member in pydis.get_role(constants.Roles.core_developers).members] + + def check(reaction: discord.Reaction, user: discord.User) -> bool: + """Check whether `reaction` is a valid reaction to `confirmation_msg`.""" + return ( + reaction.message.id == confirmation_msg.id # Reacted to `confirmation_msg` + and str(reaction.emoji) in options # With one of `options` + and user.id in core_dev_ids # By a core developer + ) + + timeout = 60 * 5 # Seconds, i.e. 5 minutes + try: + choice, _ = await self.bot.wait_for("reaction_add", check=check, timeout=timeout) + except asyncio.TimeoutError: + log.debug("Staff prompt not answered, aborting operation") + return False + finally: + with suppress(discord.HTTPException): + await confirmation_msg.clear_reactions() + + result = str(choice) == constants.Emojis.incident_actioned + log.debug(f"Received answer: {choice}, result: {result}") + + # Edit the prompt message to reflect the final choice + if result is True: + result_msg = f":ok_hand: {core_dev_ping} Request to kick `{n_members}` members was authorized!" + else: + result_msg = f":warning: {core_dev_ping} Request to kick `{n_members}` members was denied!" + + with suppress(discord.HTTPException): + await confirmation_msg.edit(content=result_msg) + + return result + + async def _alert_admins(self, exception: discord.HTTPException) -> None: + """ + Ping @Admins with information about `exception`. + + This is used when a critical `exception` caused a verification task to abort. + """ + await self.bot.wait_until_guild_available() + log.info(f"Sending admin alert regarding exception: {exception}") + + admins_channel = self.bot.get_guild(constants.Guild.id).get_channel(constants.Channels.admins) + ping = f"<@&{constants.Roles.admins}>" + + await admins_channel.send( + f"{ping} Aborted updating unverified users due to the following exception:\n" + f"```{exception}```\n" + f"Internal tasks will be stopped.", + allowed_mentions=mention_role(constants.Roles.admins), + ) + + async def _send_requests(self, members: t.Collection[discord.Member], request: Request, limit: Limit) -> int: + """ + Pass `members` one by one to `request` handling Discord exceptions. + + This coroutine serves as a generic `request` executor for kicking members and adding + roles, as it allows us to define the error handling logic in one place only. + + Any `request` has the ability to completely abort the execution by raising `StopExecution`. + In such a case, the @Admins will be alerted of the reason attribute. + + To avoid rate-limits, pass a `limit` configuring the batch size and the amount of seconds + to sleep between batches. + + Returns the amount of successful requests. Failed requests are logged at info level. + """ + log.trace(f"Sending {len(members)} requests") + n_success, bad_statuses = 0, set() + + for progress, member in enumerate(members, start=1): + if is_verified(member): # Member could have verified in the meantime + continue + try: + await request(member) + except StopExecution as stop_execution: + await self._alert_admins(stop_execution.reason) + await self.task_cache.set("tasks_running", 0) + self._stop_tasks(gracefully=True) # Gracefully finish current iteration, then stop + break + except discord.HTTPException as http_exc: + bad_statuses.add(http_exc.status) + else: + n_success += 1 + + if progress % limit.batch_size == 0: + log.trace(f"Processed {progress} requests, pausing for {limit.sleep_secs} seconds") + await asyncio.sleep(limit.sleep_secs) + + if bad_statuses: + log.info(f"Failed to send {len(members) - n_success} requests due to following statuses: {bad_statuses}") + + return n_success + + async def _add_kick_note(self, member: discord.Member) -> None: + """ + Post a note regarding `member` being kicked to site. + + Allows keeping track of kicked members for auditing purposes. + """ + payload = { + "active": False, + "actor": self.bot.user.id, # Bot actions this autonomously + "expires_at": None, + "hidden": True, + "reason": "Verification kick", + "type": "note", + "user": member.id, + } + + log.trace(f"Posting kick note for member {member} ({member.id})") + try: + await self.bot.api_client.post("bot/infractions", json=payload) + except ResponseCodeError as api_exc: + log.warning("Failed to post kick note", exc_info=api_exc) + + async def _kick_members(self, members: t.Collection[discord.Member]) -> int: + """ + Kick `members` from the PyDis guild. + + Due to strict ratelimits on sending messages (120 requests / 60 secs), we sleep for a second + after each 2 requests to allow breathing room for other features. + + Note that this is a potentially destructive operation. Returns the amount of successful requests. + """ + log.info(f"Kicking {len(members)} members (not verified after {constants.Verification.kicked_after} days)") + + async def kick_request(member: discord.Member) -> None: + """Send `KICKED_MESSAGE` to `member` and kick them from the guild.""" + try: + await safe_dm(member.send(KICKED_MESSAGE)) # Suppress disabled DMs + except discord.HTTPException as suspicious_exception: + raise StopExecution(reason=suspicious_exception) + await member.kick(reason=f"User has not verified in {constants.Verification.kicked_after} days") + await self._add_kick_note(member) + + n_kicked = await self._send_requests(members, kick_request, Limit(batch_size=2, sleep_secs=1)) + self.bot.stats.incr("verification.kicked", count=n_kicked) + + return n_kicked + + async def _give_role(self, members: t.Collection[discord.Member], role: discord.Role) -> int: + """ + Give `role` to all `members`. + + We pause for a second after batches of 25 requests to ensure ratelimits aren't exceeded. + + Returns the amount of successful requests. + """ + log.info( + f"Assigning {role} role to {len(members)} members (not verified " + f"after {constants.Verification.unverified_after} days)" + ) + + async def role_request(member: discord.Member) -> None: + """Add `role` to `member`.""" + await member.add_roles(role, reason=f"Not verified after {constants.Verification.unverified_after} days") + + return await self._send_requests(members, role_request, Limit(batch_size=25, sleep_secs=1)) + + async def _check_members(self) -> t.Tuple[t.Set[discord.Member], t.Set[discord.Member]]: + """ + Check in on the verification status of PyDis members. + + This coroutine finds two sets of users: + * Not verified after configured `unverified_after` days, should be given the @Unverified role + * Not verified after configured `kicked_after` days, should be kicked from the guild + + These sets are always disjoint, i.e. share no common members. + """ + await self.bot.wait_until_guild_available() # Ensure cache is ready + pydis = self.bot.get_guild(constants.Guild.id) + + unverified = pydis.get_role(constants.Roles.unverified) + current_dt = datetime.utcnow() # Discord timestamps are UTC + + # Users to be given the @Unverified role, and those to be kicked, these should be entirely disjoint + for_role, for_kick = set(), set() + + log.debug("Checking verification status of guild members") + for member in pydis.members: + + # Skip verified members, bots, and members for which we do not know their join date, + # this should be extremely rare but docs mention that it can happen + if is_verified(member) or member.bot or member.joined_at is None: + continue + + # At this point, we know that `member` is an unverified user, and we will decide what + # to do with them based on time passed since their join date + since_join = current_dt - member.joined_at + + if since_join > timedelta(days=constants.Verification.kicked_after): + for_kick.add(member) # User should be removed from the guild + + elif ( + since_join > timedelta(days=constants.Verification.unverified_after) + and unverified not in member.roles + ): + for_role.add(member) # User should be given the @Unverified role + + log.debug(f"Found {len(for_role)} users for {unverified} role, {len(for_kick)} users to be kicked") + return for_role, for_kick + + @tasks.loop(minutes=30) + async def update_unverified_members(self) -> None: + """ + Periodically call `_check_members` and update unverified members accordingly. + + After each run, a summary will be sent to the modlog channel. If a suspiciously high + amount of members to be kicked is found, the operation is guarded by `_verify_kick`. + """ + log.info("Updating unverified guild members") + + await self.bot.wait_until_guild_available() + unverified = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.unverified) + + for_role, for_kick = await self._check_members() + + if not for_role: + role_report = f"Found no users to be assigned the {unverified.mention} role." + else: + n_roles = await self._give_role(for_role, unverified) + role_report = f"Assigned {unverified.mention} role to `{n_roles}`/`{len(for_role)}` members." + + if not for_kick: + kick_report = "Found no users to be kicked." + elif not await self._verify_kick(len(for_kick)): + kick_report = f"Not authorized to kick `{len(for_kick)}` members." + else: + n_kicks = await self._kick_members(for_kick) + kick_report = f"Kicked `{n_kicks}`/`{len(for_kick)}` members from the guild." + + await self.mod_log.send_log_message( + icon_url=self.bot.user.avatar_url, + colour=discord.Colour.blurple(), + title="Verification system", + text=f"{kick_report}\n{role_report}", + ) + + # endregion + # region: periodically ping @Unverified + + @tasks.loop(hours=constants.Verification.reminder_frequency) + async def ping_unverified(self) -> None: + """ + Delete latest `REMINDER_MESSAGE` and send it again. + + This utilizes RedisCache to persist the latest reminder message id. + """ + await self.bot.wait_until_guild_available() + verification = self.bot.get_guild(constants.Guild.id).get_channel(constants.Channels.verification) + + last_reminder: t.Optional[int] = await self.task_cache.get("last_reminder") + + if last_reminder is not None: + log.trace(f"Found verification reminder message in cache, deleting: {last_reminder}") + + with suppress(discord.HTTPException): # If something goes wrong, just ignore it + await self.bot.http.delete_message(verification.id, last_reminder) + + log.trace("Sending verification reminder") + new_reminder = await verification.send( + REMINDER_MESSAGE, allowed_mentions=mention_role(constants.Roles.unverified), + ) + + await self.task_cache.set("last_reminder", new_reminder.id) + + @ping_unverified.before_loop + async def _before_first_ping(self) -> None: + """ + Sleep until `REMINDER_MESSAGE` should be sent again. + + If latest reminder is not cached, exit instantly. Otherwise, wait wait until the + configured `reminder_frequency` has passed. + """ + last_reminder: t.Optional[int] = await self.task_cache.get("last_reminder") + + if last_reminder is None: + log.trace("Latest verification reminder message not cached, task will not wait") + return + + # Convert cached message id into a timestamp + time_since = datetime.utcnow() - snowflake_time(last_reminder) + log.trace(f"Time since latest verification reminder: {time_since}") + + to_sleep = timedelta(hours=constants.Verification.reminder_frequency) - time_since + log.trace(f"Time to sleep until next ping: {to_sleep}") + + # Delta can be negative if `reminder_frequency` has already passed + secs = max(to_sleep.total_seconds(), 0) + await asyncio.sleep(secs) + + # endregion + # region: listeners + + @Cog.listener() + async def on_member_join(self, member: discord.Member) -> None: + """Attempt to send initial direct message to each new member.""" + if member.guild.id != constants.Guild.id: + return # Only listen for PyDis events + + raw_member = await self.bot.http.get_member(member.guild.id, member.id) + + # If the user has the pending flag set, they will be using the alternate + # gate and will not need a welcome DM with verification instructions. + # We will send them an alternate DM once they verify with the welcome + # video when they pass the gate. + if raw_member.get("pending"): + return + + log.trace(f"Sending on join message to new member: {member.id}") + try: + await safe_dm(member.send(ON_JOIN_MESSAGE)) + except discord.HTTPException: + log.exception("DM dispatch failed on unexpected error code") + + @Cog.listener() + async def on_member_update(self, before: discord.Member, after: discord.Member) -> None: + """Check if we need to send a verification DM to a gated user.""" + if before.pending is True and after.pending is False: + try: + # If the member has not received a DM from our !accept command + # and has gone through the alternate gating system we should send + # our alternate welcome DM which includes info such as our welcome + # video. + await safe_dm(after.send(ALTERNATE_VERIFIED_MESSAGE)) + except discord.HTTPException: + log.exception("DM dispatch failed on unexpected error code") + + @Cog.listener() + async def on_message(self, message: discord.Message) -> None: + """Check new message event for messages to the checkpoint channel & process.""" + if message.channel.id != constants.Channels.verification: + return # Only listen for #checkpoint messages + + if message.content == REMINDER_MESSAGE: + return # Ignore bots own verification reminder + + if message.author.bot: + # They're a bot, delete their message after the delay. + await message.delete(delay=constants.Verification.bot_message_delete_delay) + return + + # if a user mentions a role or guild member + # alert the mods in mod-alerts channel + if message.mentions or message.role_mentions: + log.debug( + f"{message.author} mentioned one or more users " + f"and/or roles in {message.channel.name}" + ) + + embed_text = ( + f"{format_user(message.author)} sent a message in " + f"{message.channel.mention} that contained user and/or role mentions." + f"\n\n**Original message:**\n>>> {message.content}" + ) + + # Send pretty mod log embed to mod-alerts + await self.mod_log.send_log_message( + icon_url=constants.Icons.filtering, + colour=discord.Colour(constants.Colours.soft_red), + title=f"User/Role mentioned in {message.channel.name}", + text=embed_text, + thumbnail=message.author.avatar_url_as(static_format="png"), + channel_id=constants.Channels.mod_alerts, + ) + + ctx: Context = await self.bot.get_context(message) + if ctx.command is not None and ctx.command.name == "accept": + return + + if any(r.id == constants.Roles.verified for r in ctx.author.roles): + log.info( + f"{ctx.author} posted '{ctx.message.content}' " + "in the verification channel, but is already verified." + ) + return + + log.debug( + f"{ctx.author} posted '{ctx.message.content}' in the verification " + "channel. We are providing instructions how to verify." + ) + await ctx.send( + f"{ctx.author.mention} Please type `!accept` to verify that you accept our rules, " + f"and gain access to the rest of the server.", + delete_after=20 + ) + + log.trace(f"Deleting the message posted by {ctx.author}") + with suppress(discord.NotFound): + await ctx.message.delete() + + # endregion + # region: task management commands + + @has_any_role(*constants.MODERATION_ROLES) + @group(name="verification") + async def verification_group(self, ctx: Context) -> None: + """Manage internal verification tasks.""" + if ctx.invoked_subcommand is None: + await ctx.send_help(ctx.command) + + @verification_group.command(name="status") + async def status_cmd(self, ctx: Context) -> None: + """Check whether verification tasks are running.""" + log.trace("Checking status of verification tasks") + + if self.update_unverified_members.is_running(): + update_status = f"{constants.Emojis.incident_actioned} Member update task is running." + else: + update_status = f"{constants.Emojis.incident_unactioned} Member update task is **not** running." + + mention = f"<@&{constants.Roles.unverified}>" + if self.ping_unverified.is_running(): + ping_status = f"{constants.Emojis.incident_actioned} Ping {mention} task is running." + else: + ping_status = f"{constants.Emojis.incident_unactioned} Ping {mention} task is **not** running." + + embed = discord.Embed( + title="Verification system", + description=f"{update_status}\n{ping_status}", + colour=discord.Colour.blurple(), + ) + await ctx.send(embed=embed) + + @verification_group.command(name="start") + async def start_cmd(self, ctx: Context) -> None: + """Start verification tasks if they are not already running.""" + log.info("Starting verification tasks") + + if not self.update_unverified_members.is_running(): + self.update_unverified_members.start() + + if not self.ping_unverified.is_running(): + self.ping_unverified.start() + + await self.task_cache.set("tasks_running", 1) + + colour = discord.Colour.blurple() + await ctx.send(embed=discord.Embed(title="Verification system", description="Done. :ok_hand:", colour=colour)) + + @verification_group.command(name="stop", aliases=["kill"]) + async def stop_cmd(self, ctx: Context) -> None: + """Stop verification tasks.""" + log.info("Stopping verification tasks") + + self._stop_tasks(gracefully=False) + await self.task_cache.set("tasks_running", 0) + + colour = discord.Colour.blurple() + await ctx.send(embed=discord.Embed(title="Verification system", description="Tasks canceled.", colour=colour)) + + # endregion + # region: accept and subscribe commands + + def _bump_verified_stats(self, verified_member: discord.Member) -> None: + """ + Increment verification stats for `verified_member`. + + Each member falls into one of the three categories: + * Verified within 24 hours after joining + * Does not have @Unverified role yet + * Does have @Unverified role + + Stats for member kicking are handled separately. + """ + if verified_member.joined_at is None: # Docs mention this can happen + return + + if (datetime.utcnow() - verified_member.joined_at) < timedelta(hours=24): + category = "accepted_on_day_one" + elif constants.Roles.unverified not in [role.id for role in verified_member.roles]: + category = "accepted_before_unverified" + else: + category = "accepted_after_unverified" + + log.trace(f"Bumping verification stats in category: {category}") + self.bot.stats.incr(f"verification.{category}") + + @command(name='accept', aliases=('verified', 'accepted'), hidden=True) + @has_no_roles(constants.Roles.verified) + @in_whitelist(channels=(constants.Channels.verification,)) + async def accept_command(self, ctx: Context, *_) -> None: # We don't actually care about the args + """Accept our rules and gain access to the rest of the server.""" + log.debug(f"{ctx.author} called !accept. Assigning the 'Developer' role.") + await ctx.author.add_roles(discord.Object(constants.Roles.verified), reason="Accepted the rules") + + self._bump_verified_stats(ctx.author) # This checks for @Unverified so make sure it's not yet removed + + if constants.Roles.unverified in [role.id for role in ctx.author.roles]: + log.debug(f"Removing Unverified role from: {ctx.author}") + await ctx.author.remove_roles(discord.Object(constants.Roles.unverified)) + + try: + await safe_dm(ctx.author.send(VERIFIED_MESSAGE)) + except discord.HTTPException: + log.exception(f"Sending welcome message failed for {ctx.author}.") + finally: + log.trace(f"Deleting accept message by {ctx.author}.") + with suppress(discord.NotFound): + self.mod_log.ignore(constants.Event.message_delete, ctx.message.id) + await ctx.message.delete() + + @command(name='subscribe') + @in_whitelist(channels=(constants.Channels.bot_commands,)) + async def subscribe_command(self, ctx: Context, *_) -> None: # We don't actually care about the args + """Subscribe to announcement notifications by assigning yourself the role.""" + has_role = False + + for role in ctx.author.roles: + if role.id == constants.Roles.announcements: + has_role = True + break + + if has_role: + await ctx.send(f"{ctx.author.mention} You're already subscribed!") + return + + log.debug(f"{ctx.author} called !subscribe. Assigning the 'Announcements' role.") + await ctx.author.add_roles(discord.Object(constants.Roles.announcements), reason="Subscribed to announcements") + + log.trace(f"Deleting the message posted by {ctx.author}.") + + await ctx.send( + f"{ctx.author.mention} Subscribed to <#{constants.Channels.announcements}> notifications.", + ) + + @command(name='unsubscribe') + @in_whitelist(channels=(constants.Channels.bot_commands,)) + async def unsubscribe_command(self, ctx: Context, *_) -> None: # We don't actually care about the args + """Unsubscribe from announcement notifications by removing the role from yourself.""" + has_role = False + + for role in ctx.author.roles: + if role.id == constants.Roles.announcements: + has_role = True + break + + if not has_role: + await ctx.send(f"{ctx.author.mention} You're already unsubscribed!") + return + + log.debug(f"{ctx.author} called !unsubscribe. Removing the 'Announcements' role.") + await ctx.author.remove_roles( + discord.Object(constants.Roles.announcements), reason="Unsubscribed from announcements" + ) + + log.trace(f"Deleting the message posted by {ctx.author}.") + + await ctx.send( + f"{ctx.author.mention} Unsubscribed from <#{constants.Channels.announcements}> notifications." + ) + + # endregion + # region: miscellaneous + + # This cannot be static (must have a __func__ attribute). + async def cog_command_error(self, ctx: Context, error: Exception) -> None: + """Check for & ignore any InWhitelistCheckFailure.""" + if isinstance(error, InWhitelistCheckFailure): + error.handled = True + + @staticmethod + async def bot_check(ctx: Context) -> bool: + """Block any command within the verification channel that is not !accept.""" + is_verification = ctx.channel.id == constants.Channels.verification + if is_verification and await has_no_roles_check(ctx, *constants.MODERATION_ROLES): + return ctx.command.name == "accept" + else: + return True + + @command(name='verify') + @has_any_role(*constants.MODERATION_ROLES) + async def perform_manual_verification(self, ctx: Context, user: discord.Member) -> None: + """Command for moderators to verify any user.""" + log.trace(f'verify command called by {ctx.author} for {user.id}.') + + if not user.pending: + log.trace(f'{user.id} is already verified, aborting.') + await ctx.send(f'{constants.Emojis.cross_mark} {user.mention} is already verified.') + return + + # Adding a role automatically verifies the user, so we add and remove the Announcements role. + temporary_role = self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.announcements) + await user.add_roles(temporary_role) + await user.remove_roles(temporary_role) + log.trace(f'{user.id} manually verified.') + await ctx.send(f'{constants.Emojis.check_mark} {user.mention} is now verified.') + + # endregion + + +def setup(bot: Bot) -> None: + """Load the Verification cog.""" + bot.add_cog(Verification(bot)) diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py new file mode 100644 index 000000000..0cbce6a51 --- /dev/null +++ b/bot/exts/moderation/voice_gate.py @@ -0,0 +1,258 @@ +import asyncio +import logging +from contextlib import suppress +from datetime import datetime, timedelta + +import discord +from async_rediscache import RedisCache +from discord import Colour, Member, VoiceState +from discord.ext.commands import Cog, Context, command + +from bot.api import ResponseCodeError +from bot.bot import Bot +from bot.constants import Channels, Event, MODERATION_ROLES, Roles, VoiceGate as GateConf +from bot.decorators import has_no_roles, in_whitelist +from bot.exts.moderation.modlog import ModLog +from bot.utils.checks import InWhitelistCheckFailure + +log = logging.getLogger(__name__) + +# Flag written to the cog's RedisCache as a value when the Member's (key) notification +# was already removed ~ this signals both that no further notifications should be sent, +# and that the notification does not need to be removed. The implementation relies on +# this being falsey! +NO_MSG = 0 + +FAILED_MESSAGE = ( + """You are not currently eligible to use voice inside Python Discord for the following reasons:\n\n{reasons}""" +) + +MESSAGE_FIELD_MAP = { + "joined_at": f"have been on the server for less than {GateConf.minimum_days_member} days", + "voice_banned": "have an active voice ban infraction", + "total_messages": f"have sent less than {GateConf.minimum_messages} messages", + "activity_blocks": f"have been active for fewer than {GateConf.minimum_activity_blocks} ten-minute blocks", +} + +VOICE_PING = ( + "Wondering why you can't talk in the voice channels? " + "Use the `!voiceverify` command in here to verify. " + "If you don't yet qualify, you'll be told why!" +) + + +class VoiceGate(Cog): + """Voice channels verification management.""" + + # RedisCache[t.Union[discord.User.id, discord.Member.id], t.Union[discord.Message.id, int]] + # The cache's keys are the IDs of members who are verified or have joined a voice channel + # The cache's values are either the message ID of the ping message or 0 (NO_MSG) if no message is present + redis_cache = RedisCache() + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + @property + def mod_log(self) -> ModLog: + """Get the currently loaded ModLog cog instance.""" + return self.bot.get_cog("ModLog") + + @redis_cache.atomic_transaction # Fully process each call until starting the next + async def _delete_ping(self, member_id: int) -> None: + """ + If `redis_cache` holds a message ID for `member_id`, delete the message. + + If the message was deleted, the value under the `member_id` key is then set to `NO_MSG`. + When `member_id` is not in the cache, or has a value of `NO_MSG` already, this function + does nothing. + """ + if message_id := await self.redis_cache.get(member_id): + log.trace(f"Removing voice gate reminder message for user: {member_id}") + with suppress(discord.NotFound): + await self.bot.http.delete_message(Channels.voice_gate, message_id) + await self.redis_cache.set(member_id, NO_MSG) + else: + log.trace(f"Voice gate reminder message for user {member_id} was already removed") + + @redis_cache.atomic_transaction + async def _ping_newcomer(self, member: discord.Member) -> bool: + """ + See if `member` should be sent a voice verification notification, and send it if so. + + Returns False if the notification was not sent. This happens when: + * The `member` has already received the notification + * The `member` is already voice-verified + + Otherwise, the notification message ID is stored in `redis_cache` and True is returned. + """ + if await self.redis_cache.contains(member.id): + log.trace("User already in cache. Ignore.") + return False + + log.trace("User not in cache and is in a voice channel.") + verified = any(Roles.voice_verified == role.id for role in member.roles) + if verified: + log.trace("User is verified, add to the cache and ignore.") + await self.redis_cache.set(member.id, NO_MSG) + return False + + log.trace("User is unverified. Send ping.") + await self.bot.wait_until_guild_available() + voice_verification_channel = self.bot.get_channel(Channels.voice_gate) + + message = await voice_verification_channel.send(f"Hello, {member.mention}! {VOICE_PING}") + await self.redis_cache.set(member.id, message.id) + + return True + + @command(aliases=('voiceverify',)) + @has_no_roles(Roles.voice_verified) + @in_whitelist(channels=(Channels.voice_gate,), redirect=None) + async def voice_verify(self, ctx: Context, *_) -> None: + """ + Apply to be able to use voice within the Discord server. + + In order to use voice you must meet all three of the following criteria: + - You must have over a certain number of messages within the Discord server + - You must have accepted our rules over a certain number of days ago + - You must not be actively banned from using our voice channels + - You must have been active for over a certain number of 10-minute blocks + """ + await self._delete_ping(ctx.author.id) # If user has received a ping in voice_verification, delete the message + + try: + data = await self.bot.api_client.get(f"bot/users/{ctx.author.id}/metricity_data") + except ResponseCodeError as e: + if e.status == 404: + embed = discord.Embed( + title="Not found", + description=( + "We were unable to find user data for you. " + "Please try again shortly, " + "if this problem persists please contact the server staff through Modmail." + ), + color=Colour.red() + ) + log.info(f"Unable to find Metricity data about {ctx.author} ({ctx.author.id})") + else: + embed = discord.Embed( + title="Unexpected response", + description=( + "We encountered an error while attempting to find data for your user. " + "Please try again and let us know if the problem persists." + ), + color=Colour.red() + ) + log.warning(f"Got response code {e.status} while trying to get {ctx.author.id} Metricity data.") + + await ctx.author.send(embed=embed) + return + + checks = { + "joined_at": ctx.author.joined_at > datetime.utcnow() - timedelta(days=GateConf.minimum_days_member), + "total_messages": data["total_messages"] < GateConf.minimum_messages, + "voice_banned": data["voice_banned"], + "activity_blocks": data["activity_blocks"] < GateConf.minimum_activity_blocks + } + failed = any(checks.values()) + failed_reasons = [MESSAGE_FIELD_MAP[key] for key, value in checks.items() if value is True] + [self.bot.stats.incr(f"voice_gate.failed.{key}") for key, value in checks.items() if value is True] + + if failed: + embed = discord.Embed( + title="Voice Gate failed", + description=FAILED_MESSAGE.format(reasons="\n".join(f'• You {reason}.' for reason in failed_reasons)), + color=Colour.red() + ) + try: + await ctx.author.send(embed=embed) + await ctx.send(f"{ctx.author}, please check your DMs.") + except discord.Forbidden: + await ctx.channel.send(ctx.author.mention, embed=embed) + return + + self.mod_log.ignore(Event.member_update, ctx.author.id) + embed = discord.Embed( + title="Voice gate passed", + description="You have been granted permission to use voice channels in Python Discord.", + color=Colour.green() + ) + + if ctx.author.voice: + embed.description += "\n\nPlease reconnect to your voice channel to be granted your new permissions." + + try: + await ctx.author.send(embed=embed) + await ctx.send(f"{ctx.author}, please check your DMs.") + except discord.Forbidden: + await ctx.channel.send(ctx.author.mention, embed=embed) + + # wait a little bit so those who don't get DMs see the response in-channel before losing perms to see it. + await asyncio.sleep(3) + await ctx.author.add_roles(discord.Object(Roles.voice_verified), reason="Voice Gate passed") + + self.bot.stats.incr("voice_gate.passed") + + @Cog.listener() + async def on_message(self, message: discord.Message) -> None: + """Delete all non-staff messages from voice gate channel that don't invoke voice verify command.""" + # Check is channel voice gate + if message.channel.id != Channels.voice_gate: + return + + ctx = await self.bot.get_context(message) + is_verify_command = ctx.command is not None and ctx.command.name == "voice_verify" + + # When it's a bot sent message, delete it after some time + if message.author.bot: + # Comparing the message with the voice ping constant + if message.content.endswith(VOICE_PING): + log.trace("Message is the voice verification ping. Ignore.") + return + with suppress(discord.NotFound): + await message.delete(delay=GateConf.bot_message_delete_delay) + return + + # Then check is member moderator+, because we don't want to delete their messages. + if any(role.id in MODERATION_ROLES for role in message.author.roles) and is_verify_command is False: + log.trace(f"Excluding moderator message {message.id} from deletion in #{message.channel}.") + return + + # Ignore deleted voice verification messages + if ctx.command is not None and ctx.command.name == "voice_verify": + self.mod_log.ignore(Event.message_delete, message.id) + + with suppress(discord.NotFound): + await message.delete() + + @Cog.listener() + async def on_voice_state_update(self, member: Member, before: VoiceState, after: VoiceState) -> None: + """Pings a user if they've never joined the voice chat before and aren't voice verified.""" + if member.bot: + log.trace("User is a bot. Ignore.") + return + + # member.voice will return None if the user is not in a voice channel + if member.voice is None: + log.trace("User not in a voice channel. Ignore.") + return + + # To avoid race conditions, checking if the user should receive a notification + # and sending it if appropriate is delegated to an atomic helper + notification_sent = await self._ping_newcomer(member) + + # Schedule the notification to be deleted after the configured delay, which is + # again delegated to an atomic helper + if notification_sent: + await asyncio.sleep(GateConf.voice_ping_delete_delay) + await self._delete_ping(member.id) + + async def cog_command_error(self, ctx: Context, error: Exception) -> None: + """Check for & ignore any InWhitelistCheckFailure.""" + if isinstance(error, InWhitelistCheckFailure): + error.handled = True + + +def setup(bot: Bot) -> None: + """Loads the VoiceGate cog.""" + bot.add_cog(VoiceGate(bot)) diff --git a/bot/exts/moderation/watchchannels/__init__.py b/bot/exts/moderation/watchchannels/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/moderation/watchchannels/__init__.py diff --git a/bot/cogs/watchchannels/watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index eb787b083..f9fc12dc3 100644 --- a/bot/cogs/watchchannels/watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -9,13 +9,15 @@ from typing import Optional import dateutil.parser import discord -from discord import Color, Embed, HTTPException, Message, errors +from discord import Color, DMChannel, Embed, HTTPException, Message, errors from discord.ext.commands import Cog, Context from bot.api import ResponseCodeError from bot.bot import Bot -from bot.cogs.moderation import ModLog from bot.constants import BigBrother as BigBrotherConfig, Guild as GuildConfig, Icons +from bot.exts.filters.token_remover import TokenRemover +from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE +from bot.exts.moderation.modlog import ModLog from bot.pagination import LinePaginator from bot.utils import CogABCMeta, messages from bot.utils.time import time_since @@ -82,7 +84,7 @@ class WatchChannel(metaclass=CogABCMeta): exc = self._consume_task.exception() if exc: self.log.exception( - f"The message queue consume task has failed with:", + "The message queue consume task has failed with:", exc_info=exc ) return False @@ -91,7 +93,7 @@ class WatchChannel(metaclass=CogABCMeta): async def start_watchchannel(self) -> None: """Starts the watch channel by getting the channel, webhook, and user cache ready.""" - await self.bot.wait_until_ready() + await self.bot.wait_until_guild_available() try: self.channel = await self.bot.fetch_channel(self.destination) @@ -146,7 +148,7 @@ class WatchChannel(metaclass=CogABCMeta): try: data = await self.bot.api_client.get(self.api_endpoint, params=self.api_default_params) except ResponseCodeError as err: - self.log.exception(f"Failed to fetch the watched users from the API", exc_info=err) + self.log.exception("Failed to fetch the watched users from the API", exc_info=err) return False self.watched_users = defaultdict(dict) @@ -173,7 +175,7 @@ class WatchChannel(metaclass=CogABCMeta): self.log.trace(f"Sleeping {BigBrotherConfig.log_delay} seconds before consuming message queue") await asyncio.sleep(BigBrotherConfig.log_delay) - self.log.trace(f"Started consuming the message queue") + self.log.trace("Started consuming the message queue") # If the previous consumption Task failed, first consume the existing comsumption_queue if not self.consumption_queue: @@ -204,11 +206,12 @@ class WatchChannel(metaclass=CogABCMeta): embed: Optional[Embed] = None, ) -> None: """Sends a message to the webhook with the specified kwargs.""" + username = messages.sub_clyde(username) try: await self.webhook.send(content=content, username=username, avatar_url=avatar_url, embed=embed) except discord.HTTPException as exc: self.log.exception( - f"Failed to send a message to the webhook", + "Failed to send a message to the webhook", exc_info=exc ) @@ -225,14 +228,16 @@ class WatchChannel(metaclass=CogABCMeta): await self.send_header(msg) - cleaned_content = msg.clean_content - - if cleaned_content: + if TokenRemover.find_token_in_message(msg) or WEBHOOK_URL_RE.search(msg.content): + cleaned_content = "Content is censored because it contains a bot or webhook token." + elif cleaned_content := msg.clean_content: # Put all non-media URLs in a code block to prevent embeds media_urls = {embed.url for embed in msg.embeds if embed.type in ("image", "video")} for url in URL_RE.findall(cleaned_content): if url not in media_urls: cleaned_content = cleaned_content.replace(url, f"`{url}`") + + if cleaned_content: await self.webhook_send( cleaned_content, username=msg.author.display_name, @@ -254,7 +259,7 @@ class WatchChannel(metaclass=CogABCMeta): ) except discord.HTTPException as exc: self.log.exception( - f"Failed to send an attachment to the webhook", + "Failed to send an attachment to the webhook", exc_info=exc ) @@ -273,15 +278,27 @@ class WatchChannel(metaclass=CogABCMeta): reason = self.watched_users[user_id]['reason'] - embed = Embed(description=f"{msg.author.mention} in [#{msg.channel.name}]({msg.jump_url})") - embed.set_footer(text=f"Added {time_delta} by {actor} | Reason: {reason}") + if isinstance(msg.channel, DMChannel): + # If a watched user DMs the bot there won't be a channel name or jump URL + # This could technically include a GroupChannel but bot's can't be in those + message_jump = "via DM" + else: + message_jump = f"in [#{msg.channel.name}]({msg.jump_url})" + + footer = f"Added {time_delta} by {actor} | Reason: {reason}" + embed = Embed(description=f"{msg.author.mention} {message_jump}") + embed.set_footer(text=textwrap.shorten(footer, width=128, placeholder="...")) await self.webhook_send(embed=embed, username=msg.author.display_name, avatar_url=msg.author.avatar_url) - async def list_watched_users(self, ctx: Context, update_cache: bool = True) -> None: + async def list_watched_users( + self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + ) -> None: """ Gives an overview of the watched user list for this channel. + The optional kwarg `oldest_first` orders the list by oldest entry. + The optional kwarg `update_cache` specifies whether the cache should be refreshed by polling the API. """ @@ -296,7 +313,11 @@ class WatchChannel(metaclass=CogABCMeta): time_delta = self._get_time_delta(inserted_at) lines.append(f"• <@{user_id}> (added {time_delta})") + if oldest_first: + lines.reverse() + lines = lines or ("There's nothing here yet.",) + embed = Embed( title=f"{self.__class__.__name__} watched users ({'updated' if update_cache else 'cached'})", color=Color.blue() @@ -319,13 +340,16 @@ class WatchChannel(metaclass=CogABCMeta): def cog_unload(self) -> None: """Takes care of unloading the cog and canceling the consumption task.""" - self.log.trace(f"Unloading the cog") + self.log.trace("Unloading the cog") if self._consume_task and not self._consume_task.done(): + def done_callback(task: asyncio.Task) -> None: + """Send exception when consuming task have been cancelled.""" + try: + task.result() + except asyncio.CancelledError: + self.log.info( + f"The consume task of {type(self).__name__} was canceled. Messages may be lost." + ) + + self._consume_task.add_done_callback(done_callback) self._consume_task.cancel() - try: - self._consume_task.result() - except asyncio.CancelledError as e: - self.log.exception( - f"The consume task was canceled. Messages may be lost.", - exc_info=e - ) diff --git a/bot/cogs/watchchannels/bigbrother.py b/bot/exts/moderation/watchchannels/bigbrother.py index c601e0d4d..3b44056d3 100644 --- a/bot/cogs/watchchannels/bigbrother.py +++ b/bot/exts/moderation/watchchannels/bigbrother.py @@ -1,14 +1,14 @@ import logging +import textwrap from collections import ChainMap -from discord.ext.commands import Cog, Context, group +from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot -from bot.cogs.moderation.utils import post_infraction from bot.constants import Channels, MODERATION_ROLES, Webhooks from bot.converters import FetchedMember -from bot.decorators import with_role -from .watchchannel import WatchChannel +from bot.exts.moderation.infraction._utils import post_infraction +from bot.exts.moderation.watchchannels._watchchannel import WatchChannel log = logging.getLogger(__name__) @@ -27,24 +27,39 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): ) @group(name='bigbrother', aliases=('bb',), invoke_without_command=True) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def bigbrother_group(self, ctx: Context) -> None: """Monitors users by relaying their messages to the Big Brother watch channel.""" - await ctx.invoke(self.bot.get_command("help"), "bigbrother") + await ctx.send_help(ctx.command) @bigbrother_group.command(name='watched', aliases=('all', 'list')) - @with_role(*MODERATION_ROLES) - async def watched_command(self, ctx: Context, update_cache: bool = True) -> None: + @has_any_role(*MODERATION_ROLES) + async def watched_command( + self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + ) -> None: """ Shows the users that are currently being monitored by Big Brother. + The optional kwarg `oldest_first` can be used to order the list by oldest watched. + + The optional kwarg `update_cache` can be used to update the user + cache using the API before listing the users. + """ + await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + + @bigbrother_group.command(name='oldest') + @has_any_role(*MODERATION_ROLES) + async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: + """ + Shows Big Brother monitored users ordered by oldest watched. + The optional kwarg `update_cache` can be used to update the user cache using the API before listing the users. """ - await self.list_watched_users(ctx, update_cache) + await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) - @bigbrother_group.command(name='watch', aliases=('w',)) - @with_role(*MODERATION_ROLES) + @bigbrother_group.command(name='watch', aliases=('w',), root_aliases=('watch',)) + @has_any_role(*MODERATION_ROLES) async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: """ Relay messages sent by the given `user` to the `#big-brother` channel. @@ -52,6 +67,21 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): A `reason` for adding the user to Big Brother is required and will be displayed in the header when relaying messages of this user to the watchchannel. """ + await self.apply_watch(ctx, user, reason) + + @bigbrother_group.command(name='unwatch', aliases=('uw',), root_aliases=('unwatch',)) + @has_any_role(*MODERATION_ROLES) + async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: + """Stop relaying messages by the given `user`.""" + await self.apply_unwatch(ctx, user, reason) + + async def apply_watch(self, ctx: Context, user: FetchedMember, reason: str) -> None: + """ + Add `user` to watched users and apply a watch infraction with `reason`. + + A message indicating the result of the operation is sent to `ctx`. + The message will include `user`'s previous watch infraction history, if it exists. + """ if user.bot: await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") return @@ -82,26 +112,30 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): if len(history) > 1: total = f"({len(history) // 2} previous infractions in total)" - end_reason = history[0]["reason"] - start_reason = f"Watched: {history[1]['reason']}" + end_reason = textwrap.shorten(history[0]["reason"], width=500, placeholder="...") + start_reason = f"Watched: {textwrap.shorten(history[1]['reason'], width=500, placeholder='...')}" msg += f"\n\nUser's previous watch reasons {total}:```{start_reason}\n\n{end_reason}```" else: msg = ":x: Failed to post the infraction: response was empty." await ctx.send(msg) - @bigbrother_group.command(name='unwatch', aliases=('uw',)) - @with_role(*MODERATION_ROLES) - async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: - """Stop relaying messages by the given `user`.""" + async def apply_unwatch(self, ctx: Context, user: FetchedMember, reason: str, send_message: bool = True) -> None: + """ + Remove `user` from watched users and mark their infraction as inactive with `reason`. + + If `send_message` is True, a message indicating the result of the operation is sent to + `ctx`. + """ active_watches = await self.bot.api_client.get( self.api_endpoint, params=ChainMap( + {"user__id": str(user.id)}, self.api_default_params, - {"user__id": str(user.id)} ) ) if active_watches: + log.trace("Active watches for user found. Attempting to remove.") [infraction] = active_watches await self.bot.api_client.patch( @@ -111,8 +145,25 @@ class BigBrother(WatchChannel, Cog, name="Big Brother"): await post_infraction(ctx, user, 'watch', f"Unwatched: {reason}", hidden=True, active=False) - await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed.") - self._remove_user(user.id) + + if not send_message: # Prevents a message being sent to the channel if part of a permanent ban + log.debug(f"Perma-banned user {user} was unwatched.") + return + log.trace("User is not banned. Sending message to channel") + message = f":white_check_mark: Messages sent by {user} will no longer be relayed." + else: - await ctx.send(":x: The specified user is currently not being watched.") + log.trace("No active watches found for user.") + if not send_message: # Prevents a message being sent to the channel if part of a permanent ban + log.debug(f"{user} was not on the watch list; no removal necessary.") + return + log.trace("User is not perma banned. Send the error message.") + message = ":x: The specified user is currently not being watched." + + await ctx.send(message) + + +def setup(bot: Bot) -> None: + """Load the BigBrother cog.""" + bot.add_cog(BigBrother(bot)) diff --git a/bot/cogs/watchchannels/talentpool.py b/bot/exts/moderation/watchchannels/talentpool.py index ad0c51fa6..a77dbe156 100644 --- a/bot/cogs/watchchannels/talentpool.py +++ b/bot/exts/moderation/watchchannels/talentpool.py @@ -1,18 +1,18 @@ import logging import textwrap from collections import ChainMap +from typing import Union -from discord import Color, Embed, Member -from discord.ext.commands import Cog, Context, group +from discord import Color, Embed, Member, User +from discord.ext.commands import Cog, Context, group, has_any_role from bot.api import ResponseCodeError from bot.bot import Bot from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks from bot.converters import FetchedMember -from bot.decorators import with_role +from bot.exts.moderation.watchchannels._watchchannel import WatchChannel from bot.pagination import LinePaginator from bot.utils import time -from .watchchannel import WatchChannel log = logging.getLogger(__name__) @@ -31,24 +31,39 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): ) @group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def nomination_group(self, ctx: Context) -> None: """Highlights the activity of helper nominees by relaying their messages to the talent pool channel.""" - await ctx.invoke(self.bot.get_command("help"), "talentpool") + await ctx.send_help(ctx.command) - @nomination_group.command(name='watched', aliases=('all', 'list')) - @with_role(*MODERATION_ROLES) - async def watched_command(self, ctx: Context, update_cache: bool = True) -> None: + @nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",)) + @has_any_role(*MODERATION_ROLES) + async def watched_command( + self, ctx: Context, oldest_first: bool = False, update_cache: bool = True + ) -> None: """ Shows the users that are currently being monitored in the talent pool. + The optional kwarg `oldest_first` can be used to order the list by oldest nomination. + The optional kwarg `update_cache` can be used to update the user cache using the API before listing the users. """ - await self.list_watched_users(ctx, update_cache) + await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache) + + @nomination_group.command(name='oldest') + @has_any_role(*MODERATION_ROLES) + async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None: + """ + Shows talent pool monitored users ordered by oldest nomination. - @nomination_group.command(name='watch', aliases=('w', 'add', 'a')) - @with_role(*STAFF_ROLES) + The optional kwarg `update_cache` can be used to update the user + cache using the API before listing the users. + """ + await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache) + + @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) + @has_any_role(*STAFF_ROLES) async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: """ Relay messages sent by the given `user` to the `#talent-pool` channel. @@ -61,7 +76,7 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): return if isinstance(user, Member) and any(role.id in STAFF_ROLES for role in user.roles): - await ctx.send(f":x: Nominating staff members, eh? Here's a cookie :cookie:") + await ctx.send(":x: Nominating staff members, eh? Here's a cookie :cookie:") return if not await self.fetch_user_cache(): @@ -106,14 +121,14 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): if history: total = f"({len(history)} previous nominations in total)" - start_reason = f"Watched: {history[0]['reason']}" - end_reason = f"Unwatched: {history[0]['end_reason']}" + start_reason = f"Watched: {textwrap.shorten(history[0]['reason'], width=500, placeholder='...')}" + end_reason = f"Unwatched: {textwrap.shorten(history[0]['end_reason'], width=500, placeholder='...')}" msg += f"\n\nUser's previous watch reasons {total}:```{start_reason}\n\n{end_reason}```" await ctx.send(msg) @nomination_group.command(name='history', aliases=('info', 'search')) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def history_command(self, ctx: Context, user: FetchedMember) -> None: """Shows the specified user's nomination history.""" result = await self.bot.api_client.get( @@ -141,42 +156,27 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): max_size=1000 ) - @nomination_group.command(name='unwatch', aliases=('end', )) - @with_role(*MODERATION_ROLES) + @nomination_group.command(name='unwatch', aliases=('end', ), root_aliases=("unnominate",)) + @has_any_role(*MODERATION_ROLES) async def unwatch_command(self, ctx: Context, user: FetchedMember, *, reason: str) -> None: """ Ends the active nomination of the specified user with the given reason. Providing a `reason` is required. """ - active_nomination = await self.bot.api_client.get( - self.api_endpoint, - params=ChainMap( - self.api_default_params, - {"user__id": str(user.id)} - ) - ) - - if not active_nomination: + if await self.unwatch(user.id, reason): + await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") + else: await ctx.send(":x: The specified user does not have an active nomination") - return - - [nomination] = active_nomination - await self.bot.api_client.patch( - f"{self.api_endpoint}/{nomination['id']}", - json={'end_reason': reason, 'active': False} - ) - await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed") - self._remove_user(user.id) @nomination_group.group(name='edit', aliases=('e',), invoke_without_command=True) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def nomination_edit_group(self, ctx: Context) -> None: """Commands to edit nominations.""" - await ctx.invoke(self.bot.get_command("help"), "talentpool", "edit") + await ctx.send_help(ctx.command) @nomination_edit_group.command(name='reason') - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def edit_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None: """ Edits the reason/unnominate reason for the nomination with the given `id` depending on the status. @@ -205,6 +205,36 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): await ctx.send(f":white_check_mark: Updated the {field} of the nomination!") + @Cog.listener() + async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: + """Remove `user` from the talent pool after they are banned.""" + await self.unwatch(user.id, "User was banned.") + + async def unwatch(self, user_id: int, reason: str) -> bool: + """End the active nomination of a user with the given reason and return True on success.""" + active_nomination = await self.bot.api_client.get( + self.api_endpoint, + params=ChainMap( + {"user__id": str(user_id)}, + self.api_default_params, + ) + ) + + if not active_nomination: + log.debug(f"No active nominate exists for {user_id=}") + return False + + log.info(f"Ending nomination: {user_id=} {reason=}") + + nomination = active_nomination[0] + await self.bot.api_client.patch( + f"{self.api_endpoint}/{nomination['id']}", + json={'end_reason': reason, 'active': False} + ) + self._remove_user(user_id) + + return True + def _nomination_to_string(self, nomination_object: dict) -> str: """Creates a string representation of a nomination.""" guild = self.bot.get_guild(Guild.id) @@ -247,3 +277,8 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"): ) return lines.strip() + + +def setup(bot: Bot) -> None: + """Load the TalentPool cog.""" + bot.add_cog(TalentPool(bot)) diff --git a/bot/exts/utils/__init__.py b/bot/exts/utils/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/bot/exts/utils/__init__.py diff --git a/bot/exts/utils/bot.py b/bot/exts/utils/bot.py new file mode 100644 index 000000000..a4c828f95 --- /dev/null +++ b/bot/exts/utils/bot.py @@ -0,0 +1,64 @@ +import logging +from typing import Optional + +from discord import Embed, TextChannel +from discord.ext.commands import Cog, Context, command, group, has_any_role + +from bot.bot import Bot +from bot.constants import Guild, MODERATION_ROLES, URLs + +log = logging.getLogger(__name__) + + +class BotCog(Cog, name="Bot"): + """Bot information commands.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @group(invoke_without_command=True, name="bot", hidden=True) + async def botinfo_group(self, ctx: Context) -> None: + """Bot informational commands.""" + await ctx.send_help(ctx.command) + + @botinfo_group.command(name='about', aliases=('info',), hidden=True) + async def about_command(self, ctx: Context) -> None: + """Get information about the bot.""" + embed = Embed( + description="A utility bot designed just for the Python server! Try `!help` for more info.", + url="https://github.com/python-discord/bot" + ) + + embed.add_field(name="Total Users", value=str(len(self.bot.get_guild(Guild.id).members))) + embed.set_author( + name="Python Bot", + url="https://github.com/python-discord/bot", + icon_url=URLs.bot_avatar + ) + + await ctx.send(embed=embed) + + @command(name='echo', aliases=('print',)) + @has_any_role(*MODERATION_ROLES) + async def echo_command(self, ctx: Context, channel: Optional[TextChannel], *, text: str) -> None: + """Repeat the given message in either a specified channel or the current channel.""" + if channel is None: + await ctx.send(text) + else: + await channel.send(text) + + @command(name='embed') + @has_any_role(*MODERATION_ROLES) + async def embed_command(self, ctx: Context, channel: Optional[TextChannel], *, text: str) -> None: + """Send the input within an embed to either a specified channel or the current channel.""" + embed = Embed(description=text) + + if channel is None: + await ctx.send(embed=embed) + else: + await channel.send(embed=embed) + + +def setup(bot: Bot) -> None: + """Load the Bot cog.""" + bot.add_cog(BotCog(bot)) diff --git a/bot/cogs/clean.py b/bot/exts/utils/clean.py index 2104efe57..8acaf9131 100644 --- a/bot/cogs/clean.py +++ b/bot/exts/utils/clean.py @@ -1,18 +1,17 @@ import logging import random import re -from typing import Optional +from typing import Iterable, Optional from discord import Colour, Embed, Message, TextChannel, User -from discord.ext.commands import Cog, Context, group +from discord.ext import commands +from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot -from bot.cogs.moderation import ModLog from bot.constants import ( - Channels, CleanMessages, Colours, Event, - Icons, MODERATION_ROLES, NEGATIVE_REPLIES + Channels, CleanMessages, Colours, Event, Icons, MODERATION_ROLES, NEGATIVE_REPLIES ) -from bot.decorators import with_role +from bot.exts.moderation.modlog import ModLog log = logging.getLogger(__name__) @@ -41,10 +40,11 @@ class Clean(Cog): self, amount: int, ctx: Context, + channels: Iterable[TextChannel], bots_only: bool = False, user: User = None, regex: Optional[str] = None, - channel: Optional[TextChannel] = None + until_message: Optional[Message] = None, ) -> None: """A helper function that does the actual message cleaning.""" def predicate_bots_only(message: Message) -> bool: @@ -110,48 +110,60 @@ class Clean(Cog): predicate = None # Delete all messages # Default to using the invoking context's channel - if not channel: - channel = ctx.channel + if not channels: + channels = [ctx.channel] + + # Delete the invocation first + self.mod_log.ignore(Event.message_delete, ctx.message.id) + await ctx.message.delete() - # Look through the history and retrieve message data messages = [] message_ids = [] self.cleaning = True - invocation_deleted = False - # To account for the invocation message, we index `amount + 1` messages. - async for message in channel.history(limit=amount + 1): + # Find the IDs of the messages to delete. IDs are needed in order to ignore mod log events. + for channel in channels: + async for message in channel.history(limit=amount): + + # If at any point the cancel command is invoked, we should stop. + if not self.cleaning: + return - # If at any point the cancel command is invoked, we should stop. - if not self.cleaning: - return + # If we are looking for specific message. + if until_message: - # Always start by deleting the invocation - if not invocation_deleted: - self.mod_log.ignore(Event.message_delete, message.id) - await message.delete() - invocation_deleted = True - continue + # we could use ID's here however in case if the message we are looking for gets deleted, + # we won't have a way to figure that out thus checking for datetime should be more reliable + if message.created_at < until_message.created_at: + # means we have found the message until which we were supposed to be deleting. + break - # If the message passes predicate, let's save it. - if predicate is None or predicate(message): - message_ids.append(message.id) - messages.append(message) + # Since we will be using `delete_messages` method of a TextChannel and we need message objects to + # use it as well as to send logs we will start appending messages here instead adding them from + # purge. + messages.append(message) + + # If the message passes predicate, let's save it. + if predicate is None or predicate(message): + message_ids.append(message.id) self.cleaning = False - # We should ignore the ID's we stored, so we don't get mod-log spam. + # Now let's delete the actual messages with purge. self.mod_log.ignore(Event.message_delete, *message_ids) - - # Use bulk delete to actually do the cleaning. It's far faster. - await channel.purge( - limit=amount, - check=predicate - ) + for channel in channels: + if until_message: + for i in range(0, len(messages), 100): + # while purge automatically handles the amount of messages + # delete_messages only allows for up to 100 messages at once + # thus we need to paginate the amount to always be <= 100 + await channel.delete_messages(messages[i:i + 100]) + else: + messages += await channel.purge(limit=amount, check=predicate) # Reverse the list to restore chronological order if messages: - messages = list(reversed(messages)) + messages = reversed(messages) log_url = await self.mod_log.upload_log(messages, ctx.author.id) else: # Can't build an embed, nothing to clean! @@ -163,8 +175,11 @@ class Clean(Cog): return # Build the embed and send it + target_channels = ", ".join(channel.mention for channel in channels) + message = ( - f"**{len(message_ids)}** messages deleted in <#{channel.id}> by **{ctx.author.name}**\n\n" + f"**{len(message_ids)}** messages deleted in {target_channels} by " + f"{ctx.author.mention}\n\n" f"A log of the deleted messages can be found [here]({log_url})." ) @@ -173,63 +188,74 @@ class Clean(Cog): colour=Colour(Colours.soft_red), title="Bulk message delete", text=message, - channel_id=Channels.modlog, + channel_id=Channels.mod_log, ) - @group(invoke_without_command=True, name="clean", aliases=["purge"]) - @with_role(*MODERATION_ROLES) + @group(invoke_without_command=True, name="clean", aliases=["clear", "purge"]) + @has_any_role(*MODERATION_ROLES) async def clean_group(self, ctx: Context) -> None: """Commands for cleaning messages in channels.""" - await ctx.invoke(self.bot.get_command("help"), "clean") + await ctx.send_help(ctx.command) @clean_group.command(name="user", aliases=["users"]) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def clean_user( self, ctx: Context, user: User, amount: Optional[int] = 10, - channel: TextChannel = None + channels: commands.Greedy[TextChannel] = None ) -> None: """Delete messages posted by the provided user, stop cleaning after traversing `amount` messages.""" - await self._clean_messages(amount, ctx, user=user, channel=channel) + await self._clean_messages(amount, ctx, user=user, channels=channels) @clean_group.command(name="all", aliases=["everything"]) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def clean_all( self, ctx: Context, amount: Optional[int] = 10, - channel: TextChannel = None + channels: commands.Greedy[TextChannel] = None ) -> None: """Delete all messages, regardless of poster, stop cleaning after traversing `amount` messages.""" - await self._clean_messages(amount, ctx, channel=channel) + await self._clean_messages(amount, ctx, channels=channels) @clean_group.command(name="bots", aliases=["bot"]) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def clean_bots( self, ctx: Context, amount: Optional[int] = 10, - channel: TextChannel = None + channels: commands.Greedy[TextChannel] = None ) -> None: """Delete all messages posted by a bot, stop cleaning after traversing `amount` messages.""" - await self._clean_messages(amount, ctx, bots_only=True, channel=channel) + await self._clean_messages(amount, ctx, bots_only=True, channels=channels) @clean_group.command(name="regex", aliases=["word", "expression"]) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def clean_regex( self, ctx: Context, regex: str, amount: Optional[int] = 10, - channel: TextChannel = None + channels: commands.Greedy[TextChannel] = None ) -> None: """Delete all messages that match a certain regex, stop cleaning after traversing `amount` messages.""" - await self._clean_messages(amount, ctx, regex=regex, channel=channel) + await self._clean_messages(amount, ctx, regex=regex, channels=channels) + + @clean_group.command(name="message", aliases=["messages"]) + @has_any_role(*MODERATION_ROLES) + async def clean_message(self, ctx: Context, message: Message) -> None: + """Delete all messages until certain message, stop cleaning after hitting the `message`.""" + await self._clean_messages( + CleanMessages.message_limit, + ctx, + channels=[message.channel], + until_message=message + ) @clean_group.command(name="stop", aliases=["cancel", "abort"]) - @with_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES) async def clean_cancel(self, ctx: Context) -> None: """If there is an ongoing cleaning process, attempt to immediately cancel it.""" self.cleaning = False diff --git a/bot/cogs/extensions.py b/bot/exts/utils/extensions.py index f16e79fb7..418db0150 100644 --- a/bot/cogs/extensions.py +++ b/bot/exts/utils/extensions.py @@ -2,25 +2,22 @@ import functools import logging import typing as t from enum import Enum -from pkgutil import iter_modules from discord import Colour, Embed from discord.ext import commands from discord.ext.commands import Context, group +from bot import exts from bot.bot import Bot from bot.constants import Emojis, MODERATION_ROLES, Roles, URLs from bot.pagination import LinePaginator -from bot.utils.checks import with_role_check +from bot.utils.extensions import EXTENSIONS, unqualify log = logging.getLogger(__name__) -UNLOAD_BLACKLIST = {"bot.cogs.extensions", "bot.cogs.modlog"} -EXTENSIONS = frozenset( - ext.name - for ext in iter_modules(("bot/cogs",), "bot.cogs.") - if ext.name[-1] != "_" -) + +UNLOAD_BLACKLIST = {f"{exts.__name__}.utils.extensions", f"{exts.__name__}.moderation.modlog"} +BASE_PATH_LEN = len(exts.__name__.split(".")) class Action(Enum): @@ -47,11 +44,25 @@ class Extension(commands.Converter): argument = argument.lower() - if "." not in argument: - argument = f"bot.cogs.{argument}" - if argument in EXTENSIONS: return argument + elif (qualified_arg := f"{exts.__name__}.{argument}") in EXTENSIONS: + return qualified_arg + + matches = [] + for ext in EXTENSIONS: + if argument == unqualify(ext): + matches.append(ext) + + if len(matches) > 1: + matches.sort() + names = "\n".join(matches) + raise commands.BadArgument( + f":x: `{argument}` is an ambiguous extension name. " + f"Please use one of the following fully-qualified names.```\n{names}```" + ) + elif matches: + return matches[0] else: raise commands.BadArgument(f":x: Could not find the extension `{argument}`.") @@ -65,17 +76,17 @@ class Extensions(commands.Cog): @group(name="extensions", aliases=("ext", "exts", "c", "cogs"), invoke_without_command=True) async def extensions_group(self, ctx: Context) -> None: """Load, unload, reload, and list loaded extensions.""" - await ctx.invoke(self.bot.get_command("help"), "extensions") + await ctx.send_help(ctx.command) @extensions_group.command(name="load", aliases=("l",)) async def load_command(self, ctx: Context, *extensions: Extension) -> None: - """ + r""" Load extensions given their fully qualified or unqualified names. If '\*' or '\*\*' is given as the name, all unloaded extensions will be loaded. """ # noqa: W605 if not extensions: - await ctx.invoke(self.bot.get_command("help"), "extensions load") + await ctx.send_help(ctx.command) return if "*" in extensions or "**" in extensions: @@ -86,13 +97,13 @@ class Extensions(commands.Cog): @extensions_group.command(name="unload", aliases=("ul",)) async def unload_command(self, ctx: Context, *extensions: Extension) -> None: - """ + r""" Unload currently loaded extensions given their fully qualified or unqualified names. If '\*' or '\*\*' is given as the name, all loaded extensions will be unloaded. """ # noqa: W605 if not extensions: - await ctx.invoke(self.bot.get_command("help"), "extensions unload") + await ctx.send_help(ctx.command) return blacklisted = "\n".join(UNLOAD_BLACKLIST & set(extensions)) @@ -107,9 +118,9 @@ class Extensions(commands.Cog): await ctx.send(msg) - @extensions_group.command(name="reload", aliases=("r",)) + @extensions_group.command(name="reload", aliases=("r",), root_aliases=("reload",)) async def reload_command(self, ctx: Context, *extensions: Extension) -> None: - """ + r""" Reload extensions given their fully qualified or unqualified names. If an extension fails to be reloaded, it will be rolled-back to the prior working state. @@ -118,7 +129,7 @@ class Extensions(commands.Cog): If '\*\*' is given as the name, all extensions, including unloaded ones, will be reloaded. """ # noqa: W605 if not extensions: - await ctx.invoke(self.bot.get_command("help"), "extensions reload") + await ctx.send_help(ctx.command) return if "**" in extensions: @@ -139,27 +150,44 @@ class Extensions(commands.Cog): Grey indicates that the extension is unloaded. Green indicates that the extension is currently loaded. """ - embed = Embed() - lines = [] - - embed.colour = Colour.blurple() + embed = Embed(colour=Colour.blurple()) embed.set_author( name="Extensions List", url=URLs.github_bot_repo, icon_url=URLs.bot_avatar ) - for ext in sorted(list(EXTENSIONS)): + lines = [] + categories = self.group_extension_statuses() + for category, extensions in sorted(categories.items()): + # Treat each category as a single line by concatenating everything. + # This ensures the paginator will not cut off a page in the middle of a category. + category = category.replace("_", " ").title() + extensions = "\n".join(sorted(extensions)) + lines.append(f"**{category}**\n{extensions}\n") + + log.debug(f"{ctx.author} requested a list of all cogs. Returning a paginated list.") + await LinePaginator.paginate(lines, ctx, embed, scale_to_size=700, empty=False) + + def group_extension_statuses(self) -> t.Mapping[str, str]: + """Return a mapping of extension names and statuses to their categories.""" + categories = {} + + for ext in EXTENSIONS: if ext in self.bot.extensions: status = Emojis.status_online else: status = Emojis.status_offline - ext = ext.rsplit(".", 1)[1] - lines.append(f"{status} {ext}") + path = ext.split(".") + if len(path) > BASE_PATH_LEN + 1: + category = " - ".join(path[BASE_PATH_LEN:-1]) + else: + category = "uncategorised" - log.debug(f"{ctx.author} requested a list of all cogs. Returning a paginated list.") - await LinePaginator.paginate(lines, ctx, embed, max_size=300, empty=False) + categories.setdefault(category, []).append(f"{status} {path[-1]}") + + return categories def batch_manage(self, action: Action, *extensions: str) -> str: """ @@ -219,9 +247,9 @@ class Extensions(commands.Cog): return msg, error_msg # This cannot be static (must have a __func__ attribute). - def cog_check(self, ctx: Context) -> bool: + async def cog_check(self, ctx: Context) -> bool: """Only allow moderators and core developers to invoke the commands in this cog.""" - return with_role_check(ctx, *MODERATION_ROLES, Roles.core_developer) + return await commands.has_any_role(*MODERATION_ROLES, Roles.core_developers).predicate(ctx) # This cannot be static (must have a __func__ attribute). async def cog_command_error(self, ctx: Context, error: Exception) -> None: diff --git a/bot/cogs/eval.py b/bot/exts/utils/internal.py index 9c729f28a..3521c8fd4 100644 --- a/bot/cogs/eval.py +++ b/bot/exts/utils/internal.py @@ -5,22 +5,24 @@ import pprint import re import textwrap import traceback +from collections import Counter +from datetime import datetime from io import StringIO from typing import Any, Optional, Tuple import discord -from discord.ext.commands import Cog, Context, group +from discord.ext.commands import Cog, Context, group, has_any_role from bot.bot import Bot from bot.constants import Roles -from bot.decorators import with_role from bot.interpreter import Interpreter +from bot.utils import find_nth_occurrence, send_to_paste_service log = logging.getLogger(__name__) -class CodeEval(Cog): - """Owner and admin feature that evaluates code and returns the result to the channel.""" +class Internal(Cog): + """Administrator and Core Developer commands.""" def __init__(self, bot: Bot): self.bot = bot @@ -28,7 +30,18 @@ class CodeEval(Cog): self.ln = 0 self.stdout = StringIO() - self.interpreter = Interpreter(bot) + self.interpreter = Interpreter() + + self.socket_since = datetime.utcnow() + self.socket_event_total = 0 + self.socket_events = Counter() + + @Cog.listener() + async def on_socket_response(self, msg: dict) -> None: + """When a websocket event is received, increase our counters.""" + if event_type := msg.get("t"): + self.socket_event_total += 1 + self.socket_events[event_type] += 1 def _format(self, inp: str, out: Any) -> Tuple[str, Optional[discord.Embed]]: """Format the eval output into a string & attempt to format it into an Embed.""" @@ -171,17 +184,41 @@ async def func(): # (None,) -> Any res = traceback.format_exc() out, embed = self._format(code, res) + out = out.rstrip("\n") # Strip empty lines from output + + # Truncate output to max 15 lines or 1500 characters + newline_truncate_index = find_nth_occurrence(out, "\n", 15) + + if newline_truncate_index is None or newline_truncate_index > 1500: + truncate_index = 1500 + else: + truncate_index = newline_truncate_index + + if len(out) > truncate_index: + paste_link = await send_to_paste_service(out, extension="py") + if paste_link is not None: + paste_text = f"full contents at {paste_link}" + else: + paste_text = "failed to upload contents to paste service." + + await ctx.send( + f"```py\n{out[:truncate_index]}\n```" + f"... response truncated; {paste_text}", + embed=embed + ) + return + await ctx.send(f"```py\n{out}```", embed=embed) @group(name='internal', aliases=('int',)) - @with_role(Roles.owner, Roles.admin) + @has_any_role(Roles.owners, Roles.admins, Roles.core_developers) async def internal_group(self, ctx: Context) -> None: """Internal commands. Top secret!""" if not ctx.invoked_subcommand: - await ctx.invoke(self.bot.get_command("help"), "internal") + await ctx.send_help(ctx.command) @internal_group.command(name='eval', aliases=('e',)) - @with_role(Roles.admin, Roles.owner) + @has_any_role(Roles.admins, Roles.owners) async def eval(self, ctx: Context, *, code: str) -> None: """Run eval in a REPL-like format.""" code = code.strip("`") @@ -196,7 +233,26 @@ async def func(): # (None,) -> Any await self._eval(ctx, code) + @internal_group.command(name='socketstats', aliases=('socket', 'stats')) + @has_any_role(Roles.admins, Roles.owners, Roles.core_developers) + async def socketstats(self, ctx: Context) -> None: + """Fetch information on the socket events received from Discord.""" + running_s = (datetime.utcnow() - self.socket_since).total_seconds() + + per_s = self.socket_event_total / running_s + + stats_embed = discord.Embed( + title="WebSocket statistics", + description=f"Receiving {per_s:0.2f} event per second.", + color=discord.Color.blurple() + ) + + for event_type, count in self.socket_events.most_common(25): + stats_embed.add_field(name=event_type, value=count, inline=False) + + await ctx.send(embed=stats_embed) + def setup(bot: Bot) -> None: - """Load the CodeEval cog.""" - bot.add_cog(CodeEval(bot)) + """Load the Internal cog.""" + bot.add_cog(Internal(bot)) diff --git a/bot/cogs/jams.py b/bot/exts/utils/jams.py index 985f28ce5..1c0988343 100644 --- a/bot/cogs/jams.py +++ b/bot/exts/utils/jams.py @@ -1,15 +1,18 @@ import logging +import typing as t -from discord import Member, PermissionOverwrite, utils +from discord import CategoryChannel, Guild, Member, PermissionOverwrite, Role from discord.ext import commands from more_itertools import unique_everseen from bot.bot import Bot from bot.constants import Roles -from bot.decorators import with_role log = logging.getLogger(__name__) +MAX_CHANNELS = 50 +CATEGORY_NAME = "Code Jam" + class CodeJams(commands.Cog): """Manages the code-jam related parts of our server.""" @@ -18,7 +21,7 @@ class CodeJams(commands.Cog): self.bot = bot @commands.command() - @with_role(Roles.admin) + @commands.has_any_role(Roles.admins) async def createteam(self, ctx: commands.Context, team_name: str, members: commands.Greedy[Member]) -> None: """ Create team channels (voice and text) in the Code Jams category, assign roles, and add overwrites for the team. @@ -40,22 +43,47 @@ class CodeJams(commands.Cog): ) return - code_jam_category = utils.get(ctx.guild.categories, name="Code Jam") + team_channel = await self.create_channels(ctx.guild, team_name, members) + await self.add_roles(ctx.guild, members) - if code_jam_category is None: - log.info("Code Jam category not found, creating it.") + await ctx.send( + f":ok_hand: Team created: {team_channel}\n" + f"**Team Leader:** {members[0].mention}\n" + f"**Team Members:** {' '.join(member.mention for member in members[1:])}" + ) - category_overwrites = { - ctx.guild.default_role: PermissionOverwrite(read_messages=False), - ctx.guild.me: PermissionOverwrite(read_messages=True) - } + async def get_category(self, guild: Guild) -> CategoryChannel: + """ + Return a code jam category. - code_jam_category = await ctx.guild.create_category_channel( - "Code Jam", - overwrites=category_overwrites, - reason="It's code jam time!" - ) + If all categories are full or none exist, create a new category. + """ + for category in guild.categories: + # Need 2 available spaces: one for the text channel and one for voice. + if category.name == CATEGORY_NAME and MAX_CHANNELS - len(category.channels) >= 2: + return category + + return await self.create_category(guild) + + @staticmethod + async def create_category(guild: Guild) -> CategoryChannel: + """Create a new code jam category and return it.""" + log.info("Creating a new code jam category.") + + category_overwrites = { + guild.default_role: PermissionOverwrite(read_messages=False), + guild.me: PermissionOverwrite(read_messages=True) + } + + return await guild.create_category_channel( + CATEGORY_NAME, + overwrites=category_overwrites, + reason="It's code jam time!" + ) + @staticmethod + def get_overwrites(members: t.List[Member], guild: Guild) -> t.Dict[t.Union[Member, Role], PermissionOverwrite]: + """Get code jam team channels permission overwrites.""" # First member is always the team leader team_channel_overwrites = { members[0]: PermissionOverwrite( @@ -64,8 +92,8 @@ class CodeJams(commands.Cog): manage_webhooks=True, connect=True ), - ctx.guild.default_role: PermissionOverwrite(read_messages=False, connect=False), - ctx.guild.get_role(Roles.verified): PermissionOverwrite( + guild.default_role: PermissionOverwrite(read_messages=False, connect=False), + guild.get_role(Roles.verified): PermissionOverwrite( read_messages=False, connect=False ) @@ -78,8 +106,16 @@ class CodeJams(commands.Cog): connect=True ) + return team_channel_overwrites + + async def create_channels(self, guild: Guild, team_name: str, members: t.List[Member]) -> str: + """Create team text and voice channels. Return the mention for the text channel.""" + # Get permission overwrites and category + team_channel_overwrites = self.get_overwrites(members, guild) + code_jam_category = await self.get_category(guild) + # Create a text channel for the team - team_channel = await ctx.guild.create_text_channel( + team_channel = await guild.create_text_channel( team_name, overwrites=team_channel_overwrites, category=code_jam_category @@ -88,26 +124,25 @@ class CodeJams(commands.Cog): # Create a voice channel for the team team_voice_name = " ".join(team_name.split("-")).title() - await ctx.guild.create_voice_channel( + await guild.create_voice_channel( team_voice_name, overwrites=team_channel_overwrites, category=code_jam_category ) + return team_channel.mention + + @staticmethod + async def add_roles(guild: Guild, members: t.List[Member]) -> None: + """Assign team leader and jammer roles.""" # Assign team leader role - await members[0].add_roles(ctx.guild.get_role(Roles.team_leader)) + await members[0].add_roles(guild.get_role(Roles.team_leaders)) # Assign rest of roles - jammer_role = ctx.guild.get_role(Roles.jammer) + jammer_role = guild.get_role(Roles.jammers) for member in members: await member.add_roles(jammer_role) - await ctx.send( - f":ok_hand: Team created: {team_channel.mention}\n" - f"**Team Leader:** {members[0].mention}\n" - f"**Team Members:** {' '.join(member.mention for member in members[1:])}" - ) - def setup(bot: Bot) -> None: """Load the CodeJams cog.""" diff --git a/bot/exts/utils/ping.py b/bot/exts/utils/ping.py new file mode 100644 index 000000000..572fc934b --- /dev/null +++ b/bot/exts/utils/ping.py @@ -0,0 +1,59 @@ +import socket +from datetime import datetime + +import aioping +from discord import Embed +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import Channels, Emojis, STAFF_ROLES, URLs +from bot.decorators import in_whitelist + +DESCRIPTIONS = ( + "Command processing time", + "Python Discord website latency", + "Discord API latency" +) +ROUND_LATENCY = 3 + + +class Latency(commands.Cog): + """Getting the latency between the bot and websites.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + @commands.command() + @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) + async def ping(self, ctx: commands.Context) -> None: + """ + Gets different measures of latency within the bot. + + Returns bot, Python Discord Site, Discord Protocol latency. + """ + # datetime.datetime objects do not have the "milliseconds" attribute. + # It must be converted to seconds before converting to milliseconds. + bot_ping = (datetime.utcnow() - ctx.message.created_at).total_seconds() * 1000 + bot_ping = f"{bot_ping:.{ROUND_LATENCY}f} ms" + + try: + delay = await aioping.ping(URLs.site, family=socket.AddressFamily.AF_INET) * 1000 + site_ping = f"{delay:.{ROUND_LATENCY}f} ms" + + except TimeoutError: + site_ping = f"{Emojis.cross_mark} Connection timed out." + + # Discord Protocol latency return value is in seconds, must be multiplied by 1000 to get milliseconds. + discord_ping = f"{self.bot.latency * 1000:.{ROUND_LATENCY}f} ms" + + embed = Embed(title="Pong!") + + for desc, latency in zip(DESCRIPTIONS, [bot_ping, site_ping, discord_ping]): + embed.add_field(name=desc, value=latency, inline=False) + + await ctx.send(embed=embed) + + +def setup(bot: Bot) -> None: + """Load the Latency cog.""" + bot.add_cog(Latency(bot)) diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py new file mode 100644 index 000000000..3113a1149 --- /dev/null +++ b/bot/exts/utils/reminders.py @@ -0,0 +1,438 @@ +import asyncio +import logging +import random +import textwrap +import typing as t +from datetime import datetime, timedelta +from operator import itemgetter + +import discord +from dateutil.parser import isoparse +from dateutil.relativedelta import relativedelta +from discord.ext.commands import Cog, Context, Greedy, group + +from bot.bot import Bot +from bot.constants import Guild, Icons, MODERATION_ROLES, POSITIVE_REPLIES, Roles, STAFF_ROLES +from bot.converters import Duration +from bot.pagination import LinePaginator +from bot.utils.checks import has_any_role_check, has_no_roles_check +from bot.utils.lock import lock_arg +from bot.utils.messages import send_denial +from bot.utils.scheduling import Scheduler +from bot.utils.time import humanize_delta + +log = logging.getLogger(__name__) + +LOCK_NAMESPACE = "reminder" +WHITELISTED_CHANNELS = Guild.reminder_whitelist +MAXIMUM_REMINDERS = 5 + +Mentionable = t.Union[discord.Member, discord.Role] + + +class Reminders(Cog): + """Provide in-channel reminder functionality.""" + + def __init__(self, bot: Bot): + self.bot = bot + self.scheduler = Scheduler(self.__class__.__name__) + + self.bot.loop.create_task(self.reschedule_reminders()) + + def cog_unload(self) -> None: + """Cancel scheduled tasks.""" + self.scheduler.cancel_all() + + async def reschedule_reminders(self) -> None: + """Get all current reminders from the API and reschedule them.""" + await self.bot.wait_until_guild_available() + response = await self.bot.api_client.get( + 'bot/reminders', + params={'active': 'true'} + ) + + now = datetime.utcnow() + + for reminder in response: + is_valid, *_ = self.ensure_valid_reminder(reminder) + if not is_valid: + continue + + remind_at = isoparse(reminder['expiration']).replace(tzinfo=None) + + # If the reminder is already overdue ... + if remind_at < now: + late = relativedelta(now, remind_at) + await self.send_reminder(reminder, late) + else: + self.schedule_reminder(reminder) + + def ensure_valid_reminder(self, reminder: dict) -> t.Tuple[bool, discord.User, discord.TextChannel]: + """Ensure reminder author and channel can be fetched otherwise delete the reminder.""" + user = self.bot.get_user(reminder['author']) + channel = self.bot.get_channel(reminder['channel_id']) + is_valid = True + if not user or not channel: + is_valid = False + log.info( + f"Reminder {reminder['id']} invalid: " + f"User {reminder['author']}={user}, Channel {reminder['channel_id']}={channel}." + ) + asyncio.create_task(self.bot.api_client.delete(f"bot/reminders/{reminder['id']}")) + + return is_valid, user, channel + + @staticmethod + async def _send_confirmation( + ctx: Context, + on_success: str, + reminder_id: t.Union[str, int], + delivery_dt: t.Optional[datetime], + ) -> None: + """Send an embed confirming the reminder change was made successfully.""" + embed = discord.Embed() + embed.colour = discord.Colour.green() + embed.title = random.choice(POSITIVE_REPLIES) + embed.description = on_success + + footer_str = f"ID: {reminder_id}" + if delivery_dt: + # Reminder deletion will have a `None` `delivery_dt` + footer_str = f"{footer_str}, Due: {delivery_dt.strftime('%Y-%m-%dT%H:%M:%S')}" + + embed.set_footer(text=footer_str) + + await ctx.send(embed=embed) + + @staticmethod + async def _check_mentions(ctx: Context, mentions: t.Iterable[Mentionable]) -> t.Tuple[bool, str]: + """ + Returns whether or not the list of mentions is allowed. + + Conditions: + - Role reminders are Mods+ + - Reminders for other users are Helpers+ + + If mentions aren't allowed, also return the type of mention(s) disallowed. + """ + if await has_no_roles_check(ctx, *STAFF_ROLES): + return False, "members/roles" + elif await has_no_roles_check(ctx, *MODERATION_ROLES): + return all(isinstance(mention, discord.Member) for mention in mentions), "roles" + else: + return True, "" + + @staticmethod + async def validate_mentions(ctx: Context, mentions: t.Iterable[Mentionable]) -> bool: + """ + Filter mentions to see if the user can mention, and sends a denial if not allowed. + + Returns whether or not the validation is successful. + """ + mentions_allowed, disallowed_mentions = await Reminders._check_mentions(ctx, mentions) + + if not mentions or mentions_allowed: + return True + else: + await send_denial(ctx, f"You can't mention other {disallowed_mentions} in your reminder!") + return False + + def get_mentionables(self, mention_ids: t.List[int]) -> t.Iterator[Mentionable]: + """Converts Role and Member ids to their corresponding objects if possible.""" + guild = self.bot.get_guild(Guild.id) + for mention_id in mention_ids: + if (mentionable := (guild.get_member(mention_id) or guild.get_role(mention_id))): + yield mentionable + + def schedule_reminder(self, reminder: dict) -> None: + """A coroutine which sends the reminder once the time is reached, and cancels the running task.""" + reminder_datetime = isoparse(reminder['expiration']).replace(tzinfo=None) + self.scheduler.schedule_at(reminder_datetime, reminder["id"], self.send_reminder(reminder)) + + async def _edit_reminder(self, reminder_id: int, payload: dict) -> dict: + """ + Edits a reminder in the database given the ID and payload. + + Returns the edited reminder. + """ + # Send the request to update the reminder in the database + reminder = await self.bot.api_client.patch( + 'bot/reminders/' + str(reminder_id), + json=payload + ) + return reminder + + async def _reschedule_reminder(self, reminder: dict) -> None: + """Reschedule a reminder object.""" + log.trace(f"Cancelling old task #{reminder['id']}") + self.scheduler.cancel(reminder["id"]) + + log.trace(f"Scheduling new task #{reminder['id']}") + self.schedule_reminder(reminder) + + @lock_arg(LOCK_NAMESPACE, "reminder", itemgetter("id"), raise_error=True) + async def send_reminder(self, reminder: dict, late: relativedelta = None) -> None: + """Send the reminder.""" + is_valid, user, channel = self.ensure_valid_reminder(reminder) + if not is_valid: + # No need to cancel the task too; it'll simply be done once this coroutine returns. + return + + embed = discord.Embed() + embed.colour = discord.Colour.blurple() + embed.set_author( + icon_url=Icons.remind_blurple, + name="It has arrived!" + ) + + embed.description = f"Here's your reminder: `{reminder['content']}`." + + if reminder.get("jump_url"): # keep backward compatibility + embed.description += f"\n[Jump back to when you created the reminder]({reminder['jump_url']})" + + if late: + embed.colour = discord.Colour.red() + embed.set_author( + icon_url=Icons.remind_red, + name=f"Sorry it arrived {humanize_delta(late, max_units=2)} late!" + ) + + additional_mentions = ' '.join( + mentionable.mention for mentionable in self.get_mentionables(reminder["mentions"]) + ) + + await channel.send(content=f"{user.mention} {additional_mentions}", embed=embed) + + log.debug(f"Deleting reminder #{reminder['id']} (the user has been reminded).") + await self.bot.api_client.delete(f"bot/reminders/{reminder['id']}") + + @group(name="remind", aliases=("reminder", "reminders", "remindme"), invoke_without_command=True) + async def remind_group( + self, ctx: Context, mentions: Greedy[Mentionable], expiration: Duration, *, content: str + ) -> None: + """Commands for managing your reminders.""" + await self.new_reminder(ctx, mentions=mentions, expiration=expiration, content=content) + + @remind_group.command(name="new", aliases=("add", "create")) + async def new_reminder( + self, ctx: Context, mentions: Greedy[Mentionable], expiration: Duration, *, content: str + ) -> None: + """ + Set yourself a simple reminder. + + Expiration is parsed per: http://strftime.org/ + """ + # If the user is not staff, we need to verify whether or not to make a reminder at all. + if await has_no_roles_check(ctx, *STAFF_ROLES): + + # If they don't have permission to set a reminder in this channel + if ctx.channel.id not in WHITELISTED_CHANNELS: + await send_denial(ctx, "Sorry, you can't do that here!") + return + + # Get their current active reminders + active_reminders = await self.bot.api_client.get( + 'bot/reminders', + params={ + 'author__id': str(ctx.author.id) + } + ) + + # Let's limit this, so we don't get 10 000 + # reminders from kip or something like that :P + if len(active_reminders) > MAXIMUM_REMINDERS: + await send_denial(ctx, "You have too many active reminders!") + return + + # Remove duplicate mentions + mentions = set(mentions) + mentions.discard(ctx.author) + + # Filter mentions to see if the user can mention members/roles + if not await self.validate_mentions(ctx, mentions): + return + + mention_ids = [mention.id for mention in mentions] + + # Now we can attempt to actually set the reminder. + reminder = await self.bot.api_client.post( + 'bot/reminders', + json={ + 'author': ctx.author.id, + 'channel_id': ctx.message.channel.id, + 'jump_url': ctx.message.jump_url, + 'content': content, + 'expiration': expiration.isoformat(), + 'mentions': mention_ids, + } + ) + + now = datetime.utcnow() - timedelta(seconds=1) + humanized_delta = humanize_delta(relativedelta(expiration, now)) + mention_string = f"Your reminder will arrive in {humanized_delta}" + + if mentions: + mention_string += f" and will mention {len(mentions)} other(s)" + mention_string += "!" + + # Confirm to the user that it worked. + await self._send_confirmation( + ctx, + on_success=mention_string, + reminder_id=reminder["id"], + delivery_dt=expiration, + ) + + self.schedule_reminder(reminder) + + @remind_group.command(name="list") + async def list_reminders(self, ctx: Context) -> None: + """View a paginated embed of all reminders for your user.""" + # Get all the user's reminders from the database. + data = await self.bot.api_client.get( + 'bot/reminders', + params={'author__id': str(ctx.author.id)} + ) + + now = datetime.utcnow() + + # Make a list of tuples so it can be sorted by time. + reminders = sorted( + ( + (rem['content'], rem['expiration'], rem['id'], rem['mentions']) + for rem in data + ), + key=itemgetter(1) + ) + + lines = [] + + for content, remind_at, id_, mentions in reminders: + # Parse and humanize the time, make it pretty :D + remind_datetime = isoparse(remind_at).replace(tzinfo=None) + time = humanize_delta(relativedelta(remind_datetime, now)) + + mentions = ", ".join( + # Both Role and User objects have the `name` attribute + mention.name for mention in self.get_mentionables(mentions) + ) + mention_string = f"\n**Mentions:** {mentions}" if mentions else "" + + text = textwrap.dedent(f""" + **Reminder #{id_}:** *expires in {time}* (ID: {id_}){mention_string} + {content} + """).strip() + + lines.append(text) + + embed = discord.Embed() + embed.colour = discord.Colour.blurple() + embed.title = f"Reminders for {ctx.author}" + + # Remind the user that they have no reminders :^) + if not lines: + embed.description = "No active reminders could be found." + await ctx.send(embed=embed) + return + + # Construct the embed and paginate it. + embed.colour = discord.Colour.blurple() + + await LinePaginator.paginate( + lines, + ctx, embed, + max_lines=3, + empty=True + ) + + @remind_group.group(name="edit", aliases=("change", "modify"), invoke_without_command=True) + async def edit_reminder_group(self, ctx: Context) -> None: + """Commands for modifying your current reminders.""" + await ctx.send_help(ctx.command) + + @edit_reminder_group.command(name="duration", aliases=("time",)) + async def edit_reminder_duration(self, ctx: Context, id_: int, expiration: Duration) -> None: + """ + Edit one of your reminder's expiration. + + Expiration is parsed per: http://strftime.org/ + """ + await self.edit_reminder(ctx, id_, {'expiration': expiration.isoformat()}) + + @edit_reminder_group.command(name="content", aliases=("reason",)) + async def edit_reminder_content(self, ctx: Context, id_: int, *, content: str) -> None: + """Edit one of your reminder's content.""" + await self.edit_reminder(ctx, id_, {"content": content}) + + @edit_reminder_group.command(name="mentions", aliases=("pings",)) + async def edit_reminder_mentions(self, ctx: Context, id_: int, mentions: Greedy[Mentionable]) -> None: + """Edit one of your reminder's mentions.""" + # Remove duplicate mentions + mentions = set(mentions) + mentions.discard(ctx.author) + + # Filter mentions to see if the user can mention members/roles + if not await self.validate_mentions(ctx, mentions): + return + + mention_ids = [mention.id for mention in mentions] + await self.edit_reminder(ctx, id_, {"mentions": mention_ids}) + + @lock_arg(LOCK_NAMESPACE, "id_", raise_error=True) + async def edit_reminder(self, ctx: Context, id_: int, payload: dict) -> None: + """Edits a reminder with the given payload, then sends a confirmation message.""" + if not await self._can_modify(ctx, id_): + return + reminder = await self._edit_reminder(id_, payload) + + # Parse the reminder expiration back into a datetime + expiration = isoparse(reminder["expiration"]).replace(tzinfo=None) + + # Send a confirmation message to the channel + await self._send_confirmation( + ctx, + on_success="That reminder has been edited successfully!", + reminder_id=id_, + delivery_dt=expiration, + ) + await self._reschedule_reminder(reminder) + + @remind_group.command("delete", aliases=("remove", "cancel")) + @lock_arg(LOCK_NAMESPACE, "id_", raise_error=True) + async def delete_reminder(self, ctx: Context, id_: int) -> None: + """Delete one of your active reminders.""" + if not await self._can_modify(ctx, id_): + return + + await self.bot.api_client.delete(f"bot/reminders/{id_}") + self.scheduler.cancel(id_) + + await self._send_confirmation( + ctx, + on_success="That reminder has been deleted successfully!", + reminder_id=id_, + delivery_dt=None, + ) + + async def _can_modify(self, ctx: Context, reminder_id: t.Union[str, int]) -> bool: + """ + Check whether the reminder can be modified by the ctx author. + + The check passes when the user is an admin, or if they created the reminder. + """ + if await has_any_role_check(ctx, Roles.admins): + return True + + api_response = await self.bot.api_client.get(f"bot/reminders/{reminder_id}") + if not api_response["author"] == ctx.author.id: + log.debug(f"{ctx.author} is not the reminder author and does not pass the check.") + await send_denial(ctx, "You can't modify reminders of other users!") + return False + + log.debug(f"{ctx.author} is the reminder author and passes the check.") + return True + + +def setup(bot: Bot) -> None: + """Load the Reminders cog.""" + bot.add_cog(Reminders(bot)) diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py new file mode 100644 index 000000000..9f480c067 --- /dev/null +++ b/bot/exts/utils/snekbox.py @@ -0,0 +1,346 @@ +import asyncio +import contextlib +import datetime +import logging +import re +import textwrap +from functools import partial +from signal import Signals +from typing import Optional, Tuple + +from discord import HTTPException, Message, NotFound, Reaction, User +from discord.ext.commands import Cog, Context, command, guild_only + +from bot.bot import Bot +from bot.constants import Categories, Channels, Roles, URLs +from bot.decorators import in_whitelist +from bot.utils import send_to_paste_service +from bot.utils.messages import wait_for_deletion + +log = logging.getLogger(__name__) + +ESCAPE_REGEX = re.compile("[`\u202E\u200B]{3,}") +FORMATTED_CODE_REGEX = re.compile( + r"(?P<delim>(?P<block>```)|``?)" # code delimiter: 1-3 backticks; (?P=block) only matches if it's a block + r"(?(block)(?:(?P<lang>[a-z]+)\n)?)" # if we're in a block, match optional language (only letters plus newline) + r"(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code + r"(?P<code>.*?)" # extract all code inside the markup + r"\s*" # any more whitespace before the end of the code markup + r"(?P=delim)", # match the exact same delimiter from the start again + re.DOTALL | re.IGNORECASE # "." also matches newlines, case insensitive +) +RAW_CODE_REGEX = re.compile( + r"^(?:[ \t]*\n)*" # any blank (empty or tabs/spaces only) lines before the code + r"(?P<code>.*?)" # extract all the rest as code + r"\s*$", # any trailing whitespace until the end of the string + re.DOTALL # "." also matches newlines +) + +MAX_PASTE_LEN = 10000 + +# `!eval` command whitelists +EVAL_CHANNELS = (Channels.bot_commands, Channels.esoteric) +EVAL_CATEGORIES = (Categories.help_available, Categories.help_in_use, Categories.voice) +EVAL_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners) + +SIGKILL = 9 + +REEVAL_EMOJI = '\U0001f501' # :repeat: +REEVAL_TIMEOUT = 30 + + +class Snekbox(Cog): + """Safe evaluation of Python code using Snekbox.""" + + def __init__(self, bot: Bot): + self.bot = bot + self.jobs = {} + + async def post_eval(self, code: str) -> dict: + """Send a POST request to the Snekbox API to evaluate code and return the results.""" + url = URLs.snekbox_eval_api + data = {"input": code} + async with self.bot.http_session.post(url, json=data, raise_for_status=True) as resp: + return await resp.json() + + async def upload_output(self, output: str) -> Optional[str]: + """Upload the eval output to a paste service and return a URL to it if successful.""" + log.trace("Uploading full output to paste service...") + + if len(output) > MAX_PASTE_LEN: + log.info("Full output is too long to upload") + return "too long to upload" + return await send_to_paste_service(output, extension="txt") + + @staticmethod + def prepare_input(code: str) -> str: + """ + Extract code from the Markdown, format it, and insert it into the code template. + + If there is any code block, ignore text outside the code block. + Use the first code block, but prefer a fenced code block. + If there are several fenced code blocks, concatenate only the fenced code blocks. + """ + if match := list(FORMATTED_CODE_REGEX.finditer(code)): + blocks = [block for block in match if block.group("block")] + + if len(blocks) > 1: + code = '\n'.join(block.group("code") for block in blocks) + info = "several code blocks" + else: + match = match[0] if len(blocks) == 0 else blocks[0] + code, block, lang, delim = match.group("code", "block", "lang", "delim") + if block: + info = (f"'{lang}' highlighted" if lang else "plain") + " code block" + else: + info = f"{delim}-enclosed inline code" + else: + code = RAW_CODE_REGEX.fullmatch(code).group("code") + info = "unformatted or badly formatted code" + + code = textwrap.dedent(code) + log.trace(f"Extracted {info} for evaluation:\n{code}") + return code + + @staticmethod + def get_results_message(results: dict) -> Tuple[str, str]: + """Return a user-friendly message and error corresponding to the process's return code.""" + stdout, returncode = results["stdout"], results["returncode"] + msg = f"Your eval job has completed with return code {returncode}" + error = "" + + if returncode is None: + msg = "Your eval job has failed" + error = stdout.strip() + elif returncode == 128 + SIGKILL: + msg = "Your eval job timed out or ran out of memory" + elif returncode == 255: + msg = "Your eval job has failed" + error = "A fatal NsJail error occurred" + else: + # Try to append signal's name if one exists + try: + name = Signals(returncode - 128).name + msg = f"{msg} ({name})" + except ValueError: + pass + + return msg, error + + @staticmethod + def get_status_emoji(results: dict) -> str: + """Return an emoji corresponding to the status code or lack of output in result.""" + if not results["stdout"].strip(): # No output + return ":warning:" + elif results["returncode"] == 0: # No error + return ":white_check_mark:" + else: # Exception + return ":x:" + + async def format_output(self, output: str) -> Tuple[str, Optional[str]]: + """ + Format the output and return a tuple of the formatted output and a URL to the full output. + + Prepend each line with a line number. Truncate if there are over 10 lines or 1000 characters + and upload the full output to a paste service. + """ + log.trace("Formatting output...") + + output = output.rstrip("\n") + original_output = output # To be uploaded to a pasting service if needed + paste_link = None + + if "<@" in output: + output = output.replace("<@", "<@\u200B") # Zero-width space + + if "<!@" in output: + output = output.replace("<!@", "<!@\u200B") # Zero-width space + + if ESCAPE_REGEX.findall(output): + paste_link = await self.upload_output(original_output) + return "Code block escape attempt detected; will not output result", paste_link + + truncated = False + lines = output.count("\n") + + if lines > 0: + output = [f"{i:03d} | {line}" for i, line in enumerate(output.split('\n'), 1)] + output = output[:11] # Limiting to only 11 lines + output = "\n".join(output) + + if lines > 10: + truncated = True + if len(output) >= 1000: + output = f"{output[:1000]}\n... (truncated - too long, too many lines)" + else: + output = f"{output}\n... (truncated - too many lines)" + elif len(output) >= 1000: + truncated = True + output = f"{output[:1000]}\n... (truncated - too long)" + + if truncated: + paste_link = await self.upload_output(original_output) + + output = output or "[No output]" + + return output, paste_link + + async def send_eval(self, ctx: Context, code: str) -> Message: + """ + Evaluate code, format it, and send the output to the corresponding channel. + + Return the bot response. + """ + async with ctx.typing(): + results = await self.post_eval(code) + msg, error = self.get_results_message(results) + + if error: + output, paste_link = error, None + else: + output, paste_link = await self.format_output(results["stdout"]) + + icon = self.get_status_emoji(results) + msg = f"{ctx.author.mention} {icon} {msg}.\n\n```\n{output}\n```" + if paste_link: + msg = f"{msg}\nFull output: {paste_link}" + + # Collect stats of eval fails + successes + if icon == ":x:": + self.bot.stats.incr("snekbox.python.fail") + else: + self.bot.stats.incr("snekbox.python.success") + + filter_cog = self.bot.get_cog("Filtering") + filter_triggered = False + if filter_cog: + filter_triggered = await filter_cog.filter_eval(msg, ctx.message) + if filter_triggered: + response = await ctx.send("Attempt to circumvent filter detected. Moderator team has been alerted.") + else: + response = await ctx.send(msg) + self.bot.loop.create_task(wait_for_deletion(response, (ctx.author.id,))) + + log.info(f"{ctx.author}'s job had a return code of {results['returncode']}") + return response + + async def continue_eval(self, ctx: Context, response: Message) -> Optional[str]: + """ + Check if the eval session should continue. + + Return the new code to evaluate or None if the eval session should be terminated. + """ + _predicate_eval_message_edit = partial(predicate_eval_message_edit, ctx) + _predicate_emoji_reaction = partial(predicate_eval_emoji_reaction, ctx) + + with contextlib.suppress(NotFound): + try: + _, new_message = await self.bot.wait_for( + 'message_edit', + check=_predicate_eval_message_edit, + timeout=REEVAL_TIMEOUT + ) + await ctx.message.add_reaction(REEVAL_EMOJI) + await self.bot.wait_for( + 'reaction_add', + check=_predicate_emoji_reaction, + timeout=10 + ) + + code = await self.get_code(new_message) + await ctx.message.clear_reaction(REEVAL_EMOJI) + with contextlib.suppress(HTTPException): + await response.delete() + + except asyncio.TimeoutError: + await ctx.message.clear_reaction(REEVAL_EMOJI) + return None + + return code + + async def get_code(self, message: Message) -> Optional[str]: + """ + Return the code from `message` to be evaluated. + + If the message is an invocation of the eval command, return the first argument or None if it + doesn't exist. Otherwise, return the full content of the message. + """ + log.trace(f"Getting context for message {message.id}.") + new_ctx = await self.bot.get_context(message) + + if new_ctx.command is self.eval_command: + log.trace(f"Message {message.id} invokes eval command.") + split = message.content.split(maxsplit=1) + code = split[1] if len(split) > 1 else None + else: + log.trace(f"Message {message.id} does not invoke eval command.") + code = message.content + + return code + + @command(name="eval", aliases=("e",)) + @guild_only() + @in_whitelist(channels=EVAL_CHANNELS, categories=EVAL_CATEGORIES, roles=EVAL_ROLES) + async def eval_command(self, ctx: Context, *, code: str = None) -> None: + """ + Run Python code and get the results. + + This command supports multiple lines of code, including code wrapped inside a formatted code + block. Code can be re-evaluated by editing the original message within 10 seconds and + clicking the reaction that subsequently appears. + + We've done our best to make this sandboxed, but do let us know if you manage to find an + issue with it! + """ + if ctx.author.id in self.jobs: + await ctx.send( + f"{ctx.author.mention} You've already got a job running - " + "please wait for it to finish!" + ) + return + + if not code: # None or empty string + await ctx.send_help(ctx.command) + return + + if Roles.helpers in (role.id for role in ctx.author.roles): + self.bot.stats.incr("snekbox_usages.roles.helpers") + else: + self.bot.stats.incr("snekbox_usages.roles.developers") + + if ctx.channel.category_id == Categories.help_in_use: + self.bot.stats.incr("snekbox_usages.channels.help") + elif ctx.channel.id == Channels.bot_commands: + self.bot.stats.incr("snekbox_usages.channels.bot_commands") + else: + self.bot.stats.incr("snekbox_usages.channels.topical") + + log.info(f"Received code from {ctx.author} for evaluation:\n{code}") + + while True: + self.jobs[ctx.author.id] = datetime.datetime.now() + code = self.prepare_input(code) + try: + response = await self.send_eval(ctx, code) + finally: + del self.jobs[ctx.author.id] + + code = await self.continue_eval(ctx, response) + if not code: + break + log.info(f"Re-evaluating code from message {ctx.message.id}:\n{code}") + + +def predicate_eval_message_edit(ctx: Context, old_msg: Message, new_msg: Message) -> bool: + """Return True if the edited message is the context message and the content was indeed modified.""" + return new_msg.id == ctx.message.id and old_msg.content != new_msg.content + + +def predicate_eval_emoji_reaction(ctx: Context, reaction: Reaction, user: User) -> bool: + """Return True if the reaction REEVAL_EMOJI was added by the context message author on this message.""" + return reaction.message.id == ctx.message.id and user.id == ctx.author.id and str(reaction) == REEVAL_EMOJI + + +def setup(bot: Bot) -> None: + """Load the Snekbox cog.""" + bot.add_cog(Snekbox(bot)) diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py new file mode 100644 index 000000000..eb92dfca7 --- /dev/null +++ b/bot/exts/utils/utils.py @@ -0,0 +1,198 @@ +import difflib +import logging +import re +import unicodedata +from typing import Tuple, Union + +from discord import Colour, Embed, utils +from discord.ext.commands import BadArgument, Cog, Context, clean_content, command, has_any_role +from discord.utils import snowflake_time + +from bot.bot import Bot +from bot.constants import Channels, MODERATION_ROLES, STAFF_ROLES +from bot.converters import Snowflake +from bot.decorators import in_whitelist +from bot.pagination import LinePaginator +from bot.utils import messages +from bot.utils.time import time_since + +log = logging.getLogger(__name__) + +ZEN_OF_PYTHON = """\ +Beautiful is better than ugly. +Explicit is better than implicit. +Simple is better than complex. +Complex is better than complicated. +Flat is better than nested. +Sparse is better than dense. +Readability counts. +Special cases aren't special enough to break the rules. +Although practicality beats purity. +Errors should never pass silently. +Unless explicitly silenced. +In the face of ambiguity, refuse the temptation to guess. +There should be one-- and preferably only one --obvious way to do it. +Although that way may not be obvious at first unless you're Dutch. +Now is better than never. +Although never is often better than *right* now. +If the implementation is hard to explain, it's a bad idea. +If the implementation is easy to explain, it may be a good idea. +Namespaces are one honking great idea -- let's do more of those! +""" + + +class Utils(Cog): + """A selection of utilities which don't have a clear category.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @command() + @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) + async def charinfo(self, ctx: Context, *, characters: str) -> None: + """Shows you information on up to 50 unicode characters.""" + match = re.match(r"<(a?):(\w+):(\d+)>", characters) + if match: + return await messages.send_denial( + ctx, + "**Non-Character Detected**\n" + "Only unicode characters can be processed, but a custom Discord emoji " + "was found. Please remove it and try again." + ) + + if len(characters) > 50: + return await messages.send_denial(ctx, f"Too many characters ({len(characters)}/50)") + + def get_info(char: str) -> Tuple[str, str]: + digit = f"{ord(char):x}" + if len(digit) <= 4: + u_code = f"\\u{digit:>04}" + else: + u_code = f"\\U{digit:>08}" + url = f"https://www.compart.com/en/unicode/U+{digit:>04}" + name = f"[{unicodedata.name(char, '')}]({url})" + info = f"`{u_code.ljust(10)}`: {name} - {utils.escape_markdown(char)}" + return info, u_code + + char_list, raw_list = zip(*(get_info(c) for c in characters)) + embed = Embed().set_author(name="Character Info") + + if len(characters) > 1: + # Maximum length possible is 502 out of 1024, so there's no need to truncate. + embed.add_field(name='Full Raw Text', value=f"`{''.join(raw_list)}`", inline=False) + + await LinePaginator.paginate(char_list, ctx, embed, max_lines=10, max_size=2000, empty=False) + + @command() + async def zen(self, ctx: Context, *, search_value: Union[int, str, None] = None) -> None: + """ + Show the Zen of Python. + + Without any arguments, the full Zen will be produced. + If an integer is provided, the line with that index will be produced. + If a string is provided, the line which matches best will be produced. + """ + embed = Embed( + colour=Colour.blurple(), + title="The Zen of Python", + description=ZEN_OF_PYTHON + ) + + if search_value is None: + embed.title += ", by Tim Peters" + await ctx.send(embed=embed) + return + + zen_lines = ZEN_OF_PYTHON.splitlines() + + # handle if it's an index int + if isinstance(search_value, int): + upper_bound = len(zen_lines) - 1 + lower_bound = -1 * upper_bound + if not (lower_bound <= search_value <= upper_bound): + raise BadArgument(f"Please provide an index between {lower_bound} and {upper_bound}.") + + embed.title += f" (line {search_value % len(zen_lines)}):" + embed.description = zen_lines[search_value] + await ctx.send(embed=embed) + return + + # Try to handle first exact word due difflib.SequenceMatched may use some other similar word instead + # exact word. + for i, line in enumerate(zen_lines): + for word in line.split(): + if word.lower() == search_value.lower(): + embed.title += f" (line {i}):" + embed.description = line + await ctx.send(embed=embed) + return + + # handle if it's a search string and not exact word + matcher = difflib.SequenceMatcher(None, search_value.lower()) + + best_match = "" + match_index = 0 + best_ratio = 0 + + for index, line in enumerate(zen_lines): + matcher.set_seq2(line.lower()) + + # the match ratio needs to be adjusted because, naturally, + # longer lines will have worse ratios than shorter lines when + # fuzzy searching for keywords. this seems to work okay. + adjusted_ratio = (len(line) - 5) ** 0.5 * matcher.ratio() + + if adjusted_ratio > best_ratio: + best_ratio = adjusted_ratio + best_match = line + match_index = index + + if not best_match: + raise BadArgument("I didn't get a match! Please try again with a different search term.") + + embed.title += f" (line {match_index}):" + embed.description = best_match + await ctx.send(embed=embed) + + @command(aliases=("snf", "snfl", "sf")) + @in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES) + async def snowflake(self, ctx: Context, snowflake: Snowflake) -> None: + """Get Discord snowflake creation time.""" + created_at = snowflake_time(snowflake) + embed = Embed( + description=f"**Created at {created_at}** ({time_since(created_at, max_units=3)}).", + colour=Colour.blue() + ) + embed.set_author( + name=f"Snowflake: {snowflake}", + icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" + ) + await ctx.send(embed=embed) + + @command(aliases=("poll",)) + @has_any_role(*MODERATION_ROLES) + async def vote(self, ctx: Context, title: clean_content(fix_channel_mentions=True), *options: str) -> None: + """ + Build a quick voting poll with matching reactions with the provided options. + + A maximum of 20 options can be provided, as Discord supports a max of 20 + reactions on a single message. + """ + if len(title) > 256: + raise BadArgument("The title cannot be longer than 256 characters.") + if len(options) < 2: + raise BadArgument("Please provide at least 2 options.") + if len(options) > 20: + raise BadArgument("I can only handle 20 options!") + + codepoint_start = 127462 # represents "regional_indicator_a" unicode value + options = {chr(i): f"{chr(i)} - {v}" for i, v in enumerate(options, start=codepoint_start)} + embed = Embed(title=title, description="\n".join(options.values())) + message = await ctx.send(embed=embed) + for reaction in options: + await message.add_reaction(reaction) + + +def setup(bot: Bot) -> None: + """Load the Utils cog.""" + bot.add_cog(Utils(bot)) diff --git a/bot/interpreter.py b/bot/interpreter.py index 8b7268746..b58f7a6b0 100644 --- a/bot/interpreter.py +++ b/bot/interpreter.py @@ -4,7 +4,7 @@ from typing import Any from discord.ext.commands import Context -from bot.bot import Bot +import bot CODE_TEMPLATE = """ async def _func(): @@ -21,8 +21,8 @@ class Interpreter(InteractiveInterpreter): write_callable = None - def __init__(self, bot: Bot): - locals_ = {"bot": bot} + def __init__(self): + locals_ = {"bot": bot.instance} super().__init__(locals_) async def run(self, code: str, ctx: Context, io: StringIO, *args, **kwargs) -> Any: diff --git a/bot/log.py b/bot/log.py new file mode 100644 index 000000000..0935666d1 --- /dev/null +++ b/bot/log.py @@ -0,0 +1,85 @@ +import logging +import os +import sys +from logging import Logger, handlers +from pathlib import Path + +import coloredlogs +import sentry_sdk +from sentry_sdk.integrations.logging import LoggingIntegration +from sentry_sdk.integrations.redis import RedisIntegration + +from bot import constants + +TRACE_LEVEL = 5 + + +def setup() -> None: + """Set up loggers.""" + logging.TRACE = TRACE_LEVEL + logging.addLevelName(TRACE_LEVEL, "TRACE") + Logger.trace = _monkeypatch_trace + + log_level = TRACE_LEVEL if constants.DEBUG_MODE else logging.INFO + format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s" + log_format = logging.Formatter(format_string) + + log_file = Path("logs", "bot.log") + log_file.parent.mkdir(exist_ok=True) + file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7, encoding="utf8") + file_handler.setFormatter(log_format) + + root_log = logging.getLogger() + root_log.setLevel(log_level) + root_log.addHandler(file_handler) + + if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: + coloredlogs.DEFAULT_LEVEL_STYLES = { + **coloredlogs.DEFAULT_LEVEL_STYLES, + "trace": {"color": 246}, + "critical": {"background": "red"}, + "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] + } + + if "COLOREDLOGS_LOG_FORMAT" not in os.environ: + coloredlogs.DEFAULT_LOG_FORMAT = format_string + + if "COLOREDLOGS_LOG_LEVEL" not in os.environ: + coloredlogs.DEFAULT_LOG_LEVEL = log_level + + coloredlogs.install(logger=root_log, stream=sys.stdout) + + logging.getLogger("discord").setLevel(logging.WARNING) + logging.getLogger("websockets").setLevel(logging.WARNING) + logging.getLogger("chardet").setLevel(logging.WARNING) + logging.getLogger("async_rediscache").setLevel(logging.WARNING) + + +def setup_sentry() -> None: + """Set up the Sentry logging integrations.""" + sentry_logging = LoggingIntegration( + level=logging.DEBUG, + event_level=logging.WARNING + ) + + sentry_sdk.init( + dsn=constants.Bot.sentry_dsn, + integrations=[ + sentry_logging, + RedisIntegration(), + ], + release=f"bot@{constants.GIT_SHA}" + ) + + +def _monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: + """ + Log 'msg % args' with severity 'TRACE'. + + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) + """ + if self.isEnabledFor(TRACE_LEVEL): + self._log(TRACE_LEVEL, msg, args, **kwargs) diff --git a/bot/pagination.py b/bot/pagination.py index e82763912..182b2fa76 100644 --- a/bot/pagination.py +++ b/bot/pagination.py @@ -1,8 +1,9 @@ import asyncio import logging -from typing import Iterable, List, Optional, Tuple +import typing as t +from contextlib import suppress -from discord import Embed, Member, Message, Reaction +import discord from discord.abc import User from discord.ext.commands import Context, Paginator @@ -14,7 +15,7 @@ RIGHT_EMOJI = "\u27A1" # [:arrow_right:] LAST_EMOJI = "\u23ED" # [:track_next:] DELETE_EMOJI = constants.Emojis.trashcan # [:trashcan:] -PAGINATION_EMOJI = [FIRST_EMOJI, LEFT_EMOJI, RIGHT_EMOJI, LAST_EMOJI, DELETE_EMOJI] +PAGINATION_EMOJI = (FIRST_EMOJI, LEFT_EMOJI, RIGHT_EMOJI, LAST_EMOJI, DELETE_EMOJI) log = logging.getLogger(__name__) @@ -36,12 +37,19 @@ class LinePaginator(Paginator): The suffix appended at the end of every page. e.g. three backticks. * max_size: `int` The maximum amount of codepoints allowed in a page. + * scale_to_size: `int` + The maximum amount of characters a single line can scale up to. * max_lines: `int` The maximum amount of lines allowed in a page. """ def __init__( - self, prefix: str = '```', suffix: str = '```', max_size: int = 2000, max_lines: int = None + self, + prefix: str = '```', + suffix: str = '```', + max_size: int = 2000, + scale_to_size: int = 2000, + max_lines: t.Optional[int] = None ) -> None: """ This function overrides the Paginator.__init__ from inside discord.ext.commands. @@ -50,7 +58,21 @@ class LinePaginator(Paginator): """ self.prefix = prefix self.suffix = suffix + + # Embeds that exceed 2048 characters will result in an HTTPException + # (Discord API limit), so we've set a limit of 2000 + if max_size > 2000: + raise ValueError(f"max_size must be <= 2,000 characters. ({max_size} > 2000)") + self.max_size = max_size - len(suffix) + + if scale_to_size < max_size: + raise ValueError(f"scale_to_size must be >= max_size. ({scale_to_size} < {max_size})") + + if scale_to_size > 2000: + raise ValueError(f"scale_to_size must be <= 2,000 characters. ({scale_to_size} > 2000)") + + self.scale_to_size = scale_to_size - len(suffix) self.max_lines = max_lines self._current_page = [prefix] self._linecount = 0 @@ -61,23 +83,38 @@ class LinePaginator(Paginator): """ Adds a line to the current page. - If the line exceeds the `self.max_size` then an exception is raised. + If a line on a page exceeds `max_size` characters, then `max_size` will go up to + `scale_to_size` for a single line before creating a new page for the overflow words. If it + is still exceeded, the excess characters are stored and placed on the next pages unti + there are none remaining (by word boundary). The line is truncated if `scale_to_size` is + still exceeded after attempting to continue onto the next page. + + In the case that the page already contains one or more lines and the new lines would cause + `max_size` to be exceeded, a new page is created. This is done in order to make a best + effort to avoid breaking up single lines across pages, while keeping the total length of the + page at a reasonable size. This function overrides the `Paginator.add_line` from inside `discord.ext.commands`. It overrides in order to allow us to configure the maximum number of lines per page. """ - if len(line) > self.max_size - len(self.prefix) - 2: - raise RuntimeError('Line exceeds maximum page size %s' % (self.max_size - len(self.prefix) - 2)) - - if self.max_lines is not None: - if self._linecount >= self.max_lines: - self._linecount = 0 - self.close_page() - - self._linecount += 1 - if self._count + len(line) + 1 > self.max_size: - self.close_page() + remaining_words = None + if len(line) > (max_chars := self.max_size - len(self.prefix) - 2): + if len(line) > self.scale_to_size: + line, remaining_words = self._split_remaining_words(line, max_chars) + if len(line) > self.scale_to_size: + log.debug("Could not continue to next page, truncating line.") + line = line[:self.scale_to_size] + + # Check if we should start a new page or continue the line on the current one + if self.max_lines is not None and self._linecount >= self.max_lines: + log.debug("max_lines exceeded, creating new page.") + self._new_page() + elif self._count + len(line) + 1 > self.max_size and self._linecount > 0: + log.debug("max_size exceeded on page with lines, creating new page.") + self._new_page() + + self._linecount += 1 self._count += len(line) + 1 self._current_page.append(line) @@ -86,23 +123,83 @@ class LinePaginator(Paginator): self._current_page.append('') self._count += 1 + # Start a new page if there were any overflow words + if remaining_words: + self._new_page() + self.add_line(remaining_words) + + def _new_page(self) -> None: + """ + Internal: start a new page for the paginator. + + This closes the current page and resets the counters for the new page's line count and + character count. + """ + self._linecount = 0 + self._count = len(self.prefix) + 1 + self.close_page() + + def _split_remaining_words(self, line: str, max_chars: int) -> t.Tuple[str, t.Optional[str]]: + """ + Internal: split a line into two strings -- reduced_words and remaining_words. + + reduced_words: the remaining words in `line`, after attempting to remove all words that + exceed `max_chars` (rounding down to the nearest word boundary). + + remaining_words: the words in `line` which exceed `max_chars`. This value is None if + no words could be split from `line`. + + If there are any remaining_words, an ellipses is appended to reduced_words and a + continuation header is inserted before remaining_words to visually communicate the line + continuation. + + Return a tuple in the format (reduced_words, remaining_words). + """ + reduced_words = [] + remaining_words = [] + + # "(Continued)" is used on a line by itself to indicate the continuation of last page + continuation_header = "(Continued)\n-----------\n" + reduced_char_count = 0 + is_full = False + + for word in line.split(" "): + if not is_full: + if len(word) + reduced_char_count <= max_chars: + reduced_words.append(word) + reduced_char_count += len(word) + 1 + else: + # If reduced_words is empty, we were unable to split the words across pages + if not reduced_words: + return line, None + is_full = True + remaining_words.append(word) + else: + remaining_words.append(word) + + return ( + " ".join(reduced_words) + "..." if remaining_words else "", + continuation_header + " ".join(remaining_words) if remaining_words else None + ) + @classmethod async def paginate( cls, - lines: Iterable[str], + lines: t.List[str], ctx: Context, - embed: Embed, + embed: discord.Embed, prefix: str = "", suffix: str = "", - max_lines: Optional[int] = None, + max_lines: t.Optional[int] = None, max_size: int = 500, + scale_to_size: int = 2000, empty: bool = True, restrict_to_user: User = None, timeout: int = 300, footer_text: str = None, url: str = None, - exception_on_empty_embed: bool = False - ) -> Optional[Message]: + exception_on_empty_embed: bool = False, + ) -> t.Optional[discord.Message]: """ Use a paginator and set of reactions to provide pagination over a set of lines. @@ -114,11 +211,11 @@ class LinePaginator(Paginator): Pagination will also be removed automatically if no reaction is added for five minutes (300 seconds). Example: - >>> embed = Embed() + >>> embed = discord.Embed() >>> embed.set_author(name="Some Operation", url=url, icon_url=icon) - >>> await LinePaginator.paginate((line for line in lines), ctx, embed) + >>> await LinePaginator.paginate([line for line in lines], ctx, embed) """ - def event_check(reaction_: Reaction, user_: Member) -> bool: + def event_check(reaction_: discord.Reaction, user_: discord.Member) -> bool: """Make sure that this reaction is what we want to operate on.""" no_restrictions = ( # Pagination is not restricted @@ -141,12 +238,13 @@ class LinePaginator(Paginator): )) ) - paginator = cls(prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines) + paginator = cls(prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines, + scale_to_size=scale_to_size) current_page = 0 if not lines: if exception_on_empty_embed: - log.exception(f"Pagination asked for empty lines iterable") + log.exception("Pagination asked for empty lines iterable") raise EmptyPaginatorEmbed("No lines to paginate") log.debug("No lines to add to paginator, adding '(nothing to display)' message") @@ -215,8 +313,6 @@ class LinePaginator(Paginator): log.debug(f"Got first page reaction - changing to page 1/{len(paginator.pages)}") - embed.description = "" - await message.edit(embed=embed) embed.description = paginator.pages[current_page] if footer_text: embed.set_footer(text=f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})") @@ -230,8 +326,6 @@ class LinePaginator(Paginator): log.debug(f"Got last page reaction - changing to page {current_page + 1}/{len(paginator.pages)}") - embed.description = "" - await message.edit(embed=embed) embed.description = paginator.pages[current_page] if footer_text: embed.set_footer(text=f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})") @@ -249,8 +343,6 @@ class LinePaginator(Paginator): current_page -= 1 log.debug(f"Got previous page reaction - changing to page {current_page + 1}/{len(paginator.pages)}") - embed.description = "" - await message.edit(embed=embed) embed.description = paginator.pages[current_page] if footer_text: @@ -270,8 +362,6 @@ class LinePaginator(Paginator): current_page += 1 log.debug(f"Got next page reaction - changing to page {current_page + 1}/{len(paginator.pages)}") - embed.description = "" - await message.edit(embed=embed) embed.description = paginator.pages[current_page] if footer_text: @@ -281,169 +371,6 @@ class LinePaginator(Paginator): await message.edit(embed=embed) - log.debug("Ending pagination and removing all reactions...") - await message.clear_reactions() - - -class ImagePaginator(Paginator): - """ - Helper class that paginates images for embeds in messages. - - Close resemblance to LinePaginator, except focuses on images over text. - - Refer to ImagePaginator.paginate for documentation on how to use. - """ - - def __init__(self, prefix: str = "", suffix: str = ""): - super().__init__(prefix, suffix) - self._current_page = [prefix] - self.images = [] - self._pages = [] - - def add_line(self, line: str = '', *, empty: bool = False) -> None: - """Adds a line to each page.""" - if line: - self._count = len(line) - else: - self._count = 0 - self._current_page.append(line) - self.close_page() - - def add_image(self, image: str = None) -> None: - """Adds an image to a page.""" - self.images.append(image) - - @classmethod - async def paginate( - cls, - pages: List[Tuple[str, str]], - ctx: Context, embed: Embed, - prefix: str = "", - suffix: str = "", - timeout: int = 300, - exception_on_empty_embed: bool = False - ) -> Optional[Message]: - """ - Use a paginator and set of reactions to provide pagination over a set of title/image pairs. - - The reactions are used to switch page, or to finish with pagination. - - When used, this will send a message using `ctx.send()` and apply a set of reactions to it. These reactions may - be used to change page, or to remove pagination from the message. - - Note: Pagination will be removed automatically if no reaction is added for five minutes (300 seconds). - - Example: - >>> embed = Embed() - >>> embed.set_author(name="Some Operation", url=url, icon_url=icon) - >>> await ImagePaginator.paginate(pages, ctx, embed) - """ - def check_event(reaction_: Reaction, member: Member) -> bool: - """Checks each reaction added, if it matches our conditions pass the wait_for.""" - return all(( - # Reaction is on the same message sent - reaction_.message.id == message.id, - # The reaction is part of the navigation menu - str(reaction_.emoji) in PAGINATION_EMOJI, - # The reactor is not a bot - not member.bot - )) - - paginator = cls(prefix=prefix, suffix=suffix) - current_page = 0 - - if not pages: - if exception_on_empty_embed: - log.exception(f"Pagination asked for empty image list") - raise EmptyPaginatorEmbed("No images to paginate") - - log.debug("No images to add to paginator, adding '(no images to display)' message") - pages.append(("(no images to display)", "")) - - for text, image_url in pages: - paginator.add_line(text) - paginator.add_image(image_url) - - embed.description = paginator.pages[current_page] - image = paginator.images[current_page] - - if image: - embed.set_image(url=image) - - if len(paginator.pages) <= 1: - return await ctx.send(embed=embed) - - embed.set_footer(text=f"Page {current_page + 1}/{len(paginator.pages)}") - message = await ctx.send(embed=embed) - - for emoji in PAGINATION_EMOJI: - await message.add_reaction(emoji) - - while True: - # Start waiting for reactions - try: - reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=check_event) - except asyncio.TimeoutError: - log.debug("Timed out waiting for a reaction") - break # We're done, no reactions for the last 5 minutes - - # Deletes the users reaction - await message.remove_reaction(reaction.emoji, user) - - # Delete reaction press - [:trashcan:] - if str(reaction.emoji) == DELETE_EMOJI: - log.debug("Got delete reaction") - return await message.delete() - - # First reaction press - [:track_previous:] - if reaction.emoji == FIRST_EMOJI: - if current_page == 0: - log.debug("Got first page reaction, but we're on the first page - ignoring") - continue - - current_page = 0 - reaction_type = "first" - - # Last reaction press - [:track_next:] - if reaction.emoji == LAST_EMOJI: - if current_page >= len(paginator.pages) - 1: - log.debug("Got last page reaction, but we're on the last page - ignoring") - continue - - current_page = len(paginator.pages) - 1 - reaction_type = "last" - - # Previous reaction press - [:arrow_left: ] - if reaction.emoji == LEFT_EMOJI: - if current_page <= 0: - log.debug("Got previous page reaction, but we're on the first page - ignoring") - continue - - current_page -= 1 - reaction_type = "previous" - - # Next reaction press - [:arrow_right:] - if reaction.emoji == RIGHT_EMOJI: - if current_page >= len(paginator.pages) - 1: - log.debug("Got next page reaction, but we're on the last page - ignoring") - continue - - current_page += 1 - reaction_type = "next" - - # Magic happens here, after page and reaction_type is set - embed.description = "" - await message.edit(embed=embed) - embed.description = paginator.pages[current_page] - - image = paginator.images[current_page] - if image: - embed.set_image(url=image) - - embed.set_footer(text=f"Page {current_page + 1}/{len(paginator.pages)}") - log.debug(f"Got {reaction_type} page reaction - changing to page {current_page + 1}/{len(paginator.pages)}") - - await message.edit(embed=embed) - - log.debug("Ending pagination and removing all reactions...") - await message.clear_reactions() + log.debug("Ending pagination and clearing reactions.") + with suppress(discord.NotFound): + await message.clear_reactions() diff --git a/bot/patches/__init__.py b/bot/patches/__init__.py deleted file mode 100644 index 60f6becaa..000000000 --- a/bot/patches/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Subpackage that contains patches for discord.py.""" -from . import message_edited_at - -__all__ = [ - message_edited_at, -] diff --git a/bot/patches/message_edited_at.py b/bot/patches/message_edited_at.py deleted file mode 100644 index a0154f12d..000000000 --- a/bot/patches/message_edited_at.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -# message_edited_at patch. - -Date: 2019-09-16 -Author: Scragly -Added by: Ves Zappa - -Due to a bug in our current version of discord.py (1.2.3), the edited_at timestamp of -`discord.Messages` are not being handled correctly. This patch fixes that until a new -release of discord.py is released (and we've updated to it). -""" -import logging - -from discord import message, utils - -log = logging.getLogger(__name__) - - -def _handle_edited_timestamp(self: message.Message, value: str) -> None: - """Helper function that takes care of parsing the edited timestamp.""" - self._edited_timestamp = utils.parse_time(value) - - -def apply_patch() -> None: - """Applies the `edited_at` patch to the `discord.message.Message` class.""" - message.Message._handle_edited_timestamp = _handle_edited_timestamp - message.Message._HANDLERS['edited_timestamp'] = message.Message._handle_edited_timestamp - log.info("Patch applied: message_edited_at") - - -if __name__ == "__main__": - apply_patch() diff --git a/bot/resources/elements.json b/bot/resources/elements.json new file mode 100644 index 000000000..a3ac5b99f --- /dev/null +++ b/bot/resources/elements.json @@ -0,0 +1,119 @@ +[ + "hydrogen", + "helium", + "lithium", + "beryllium", + "boron", + "carbon", + "nitrogen", + "oxygen", + "fluorine", + "neon", + "sodium", + "magnesium", + "aluminium", + "silicon", + "phosphorus", + "sulfur", + "chlorine", + "argon", + "potassium", + "calcium", + "scandium", + "titanium", + "vanadium", + "chromium", + "manganese", + "iron", + "cobalt", + "nickel", + "copper", + "zinc", + "gallium", + "germanium", + "arsenic", + "bromine", + "krypton", + "rubidium", + "strontium", + "yttrium", + "zirconium", + "niobium", + "molybdenum", + "technetium", + "ruthenium", + "rhodium", + "palladium", + "silver", + "cadmium", + "indium", + "tin", + "antimony", + "tellurium", + "iodine", + "xenon", + "caesium", + "barium", + "lanthanum", + "cerium", + "praseodymium", + "neodymium", + "promethium", + "samarium", + "europium", + "gadolinium", + "terbium", + "dysprosium", + "holmium", + "erbium", + "thulium", + "ytterbium", + "lutetium", + "hafnium", + "tantalum", + "tungsten", + "rhenium", + "osmium", + "iridium", + "platinum", + "gold", + "mercury", + "thallium", + "lead", + "bismuth", + "polonium", + "astatine", + "radon", + "francium", + "radium", + "actinium", + "thorium", + "protactinium", + "uranium", + "neptunium", + "plutonium", + "americium", + "curium", + "berkelium", + "californium", + "einsteinium", + "fermium", + "mendelevium", + "nobelium", + "lawrencium", + "rutherfordium", + "dubnium", + "seaborgium", + "bohrium", + "hassium", + "meitnerium", + "darmstadtium", + "roentgenium", + "copernicium", + "nihonium", + "flerovium", + "moscovium", + "livermorium", + "tennessine", + "oganesson" +] diff --git a/bot/resources/tags/args-kwargs.md b/bot/resources/tags/args-kwargs.md new file mode 100644 index 000000000..b440a2346 --- /dev/null +++ b/bot/resources/tags/args-kwargs.md @@ -0,0 +1,17 @@ +`*args` and `**kwargs` + +These special parameters allow functions to take arbitrary amounts of positional and keyword arguments. The names `args` and `kwargs` are purely convention, and could be named any other valid variable name. The special functionality comes from the single and double asterisks (`*`). If both are used in a function signature, `*args` **must** appear before `**kwargs`. + +**Single asterisk** +`*args` will ingest an arbitrary amount of **positional arguments**, and store it in a tuple. If there are parameters after `*args` in the parameter list with no default value, they will become **required** keyword arguments by default. + +**Double asterisk** +`**kwargs` will ingest an arbitrary amount of **keyword arguments**, and store it in a dictionary. There can be **no** additional parameters **after** `**kwargs` in the parameter list. + +**Use cases** +• **Decorators** (see `!tags decorators`) +• **Inheritance** (overriding methods) +• **Future proofing** (in the case of the first two bullet points, if the parameters change, your code won't break) +• **Flexibility** (writing functions that behave like `dict()` or `print()`) + +*See* `!tags positional-keyword` *for information about positional and keyword arguments* diff --git a/bot/resources/tags/class.md b/bot/resources/tags/class.md new file mode 100644 index 000000000..4f73fc974 --- /dev/null +++ b/bot/resources/tags/class.md @@ -0,0 +1,25 @@ +**Classes** + +Classes are used to create objects that have specific behavior. + +Every object in python has a class, including `list`s, `dict`ionaries and even numbers. Using a class to group code and data like this is the foundation of Object Oriented Programming. Classes allow you to expose a simple, consistent interface while hiding the more complicated details. This simplifies the rest of your program and makes it easier to separately maintain and debug each component. + +Here is an example class: + +```python +class Foo: + def __init__(self, somedata): + self.my_attrib = somedata + + def show(self): + print(self.my_attrib) +``` + +To use a class, you need to instantiate it. The following creates a new object named `bar`, with `Foo` as its class. + +```python +bar = Foo('data') +bar.show() +``` + +We can access any of `Foo`'s methods via `bar.my_method()`, and access any of `bar`s data via `bar.my_attribute`. diff --git a/bot/resources/tags/classmethod.md b/bot/resources/tags/classmethod.md new file mode 100644 index 000000000..a4e803093 --- /dev/null +++ b/bot/resources/tags/classmethod.md @@ -0,0 +1,20 @@ +Although most methods are tied to an _object instance_, it can sometimes be useful to create a method that does something with _the class itself_. To achieve this in Python, you can use the `@classmethod` decorator. This is often used to provide alternative constructors for a class. + +For example, you may be writing a class that takes some magic token (like an API key) as a constructor argument, but you sometimes read this token from a configuration file. You could make use of a `@classmethod` to create an alternate constructor for when you want to read from the configuration file. +```py +class Bot: + def __init__(self, token: str): + self._token = token + + @classmethod + def from_config(cls, config: dict) -> Bot: + token = config['token'] + return cls(token) + +# now we can create the bot instance like this +alternative_bot = Bot.from_config(default_config) + +# but this still works, too +regular_bot = Bot("tokenstring") +``` +This is just one of the many use cases of `@classmethod`. A more in-depth explanation can be found [here](https://stackoverflow.com/questions/12179271/meaning-of-classmethod-and-staticmethod-for-beginner#12179752). diff --git a/bot/resources/tags/codeblock.md b/bot/resources/tags/codeblock.md new file mode 100644 index 000000000..ac64656e5 --- /dev/null +++ b/bot/resources/tags/codeblock.md @@ -0,0 +1,7 @@ +Here's how to format Python code on Discord: + +\`\`\`py +print('Hello world!') +\`\`\` + +**These are backticks, not quotes.** Check [this](https://superuser.com/questions/254076/how-do-i-type-the-tick-and-backtick-characters-on-windows/254077#254077) out if you can't find the backtick key. diff --git a/bot/resources/tags/customcooldown.md b/bot/resources/tags/customcooldown.md new file mode 100644 index 000000000..ac7e70aee --- /dev/null +++ b/bot/resources/tags/customcooldown.md @@ -0,0 +1,20 @@ +**Cooldowns in discord.py** + +Cooldowns can be used in discord.py to rate-limit. In this example, we're using it in an on_message. + +```python +from discord.ext import commands + +message_cooldown = commands.CooldownMapping.from_cooldown(1.0, 60.0, commands.BucketType.user) + +async def on_message(message): + bucket = message_cooldown.get_bucket(message) + retry_after = bucket.update_rate_limit() + if retry_after: + await message.channel.send(f"Slow down! Try again in {retry_after} seconds.") + else: + await message.channel.send("Not ratelimited!") +``` + +`from_cooldown` takes the amount of `update_rate_limit()`s needed to trigger the cooldown, the time in which the cooldown is triggered, and a [`BucketType`](https://discordpy.readthedocs.io/en/latest/ext/commands/api.html#discord.discord.ext.commands.BucketType). diff --git a/bot/resources/tags/decorators.md b/bot/resources/tags/decorators.md new file mode 100644 index 000000000..39c943f0a --- /dev/null +++ b/bot/resources/tags/decorators.md @@ -0,0 +1,31 @@ +**Decorators** + +A decorator is a function that modifies another function. + +Consider the following example of a timer decorator: +```py +>>> import time +>>> def timer(f): +... def inner(*args, **kwargs): +... start = time.time() +... result = f(*args, **kwargs) +... print('Time elapsed:', time.time() - start) +... return result +... return inner +... +>>> @timer +... def slow(delay=1): +... time.sleep(delay) +... return 'Finished!' +... +>>> print(slow()) +Time elapsed: 1.0011568069458008 +Finished! +>>> print(slow(3)) +Time elapsed: 3.000307321548462 +Finished! +``` + +More information: +• [Corey Schafer's video on decorators](https://youtu.be/FsAPt_9Bf3U) +• [Real python article](https://realpython.com/primer-on-python-decorators/) diff --git a/bot/resources/tags/dictcomps.md b/bot/resources/tags/dictcomps.md new file mode 100644 index 000000000..11867d77b --- /dev/null +++ b/bot/resources/tags/dictcomps.md @@ -0,0 +1,20 @@ +**Dictionary Comprehensions** + +Like lists, there is a convenient way of creating dictionaries: +```py +>>> ftoc = {f: round((5/9)*(f-32)) for f in range(-40,101,20)} +>>> print(ftoc) +{-40: -40, -20: -29, 0: -18, 20: -7, 40: 4, 60: 16, 80: 27, 100: 38} +``` +In the example above, I created a dictionary of temperatures in Fahrenheit, that are mapped to (*roughly*) their Celsius counterpart within a small range. These comprehensions are useful for succinctly creating dictionaries from some other sequence. + +They are also very useful for inverting the key value pairs of a dictionary that already exists, such that the value in the old dictionary is now the key, and the corresponding key is now its value: +```py +>>> ctof = {v:k for k, v in ftoc.items()} +>>> print(ctof) +{-40: -40, -29: -20, -18: 0, -7: 20, 4: 40, 16: 60, 27: 80, 38: 100} +``` + +Also like list comprehensions, you can add a conditional to it in order to filter out items you don't want. + +For more information and examples, check [PEP 274](https://www.python.org/dev/peps/pep-0274/) diff --git a/bot/resources/tags/enumerate.md b/bot/resources/tags/enumerate.md new file mode 100644 index 000000000..dd984af52 --- /dev/null +++ b/bot/resources/tags/enumerate.md @@ -0,0 +1,13 @@ +Ever find yourself in need of the current iteration number of your `for` loop? You should use **enumerate**! Using `enumerate`, you can turn code that looks like this: +```py +index = 0 +for item in my_list: + print(f"{index}: {item}") + index += 1 +``` +into beautiful, _pythonic_ code: +```py +for index, item in enumerate(my_list): + print(f"{index}: {item}") +``` +For more information, check out [the official docs](https://docs.python.org/3/library/functions.html#enumerate), or [PEP 279](https://www.python.org/dev/peps/pep-0279/). diff --git a/bot/resources/tags/except.md b/bot/resources/tags/except.md new file mode 100644 index 000000000..8f0abf156 --- /dev/null +++ b/bot/resources/tags/except.md @@ -0,0 +1,17 @@ +A key part of the Python philosophy is to ask for forgiveness, not permission. This means that it's okay to write code that may produce an error, as long as you specify how that error should be handled. Code written this way is readable and resilient. +```py +try: + number = int(user_input) +except ValueError: + print("failed to convert user_input to a number. setting number to 0.") + number = 0 +``` +You should always specify the exception type if it is possible to do so, and your `try` block should be as short as possible. Attempting to handle broad categories of unexpected exceptions can silently hide serious problems. +```py +try: + number = int(user_input) + item = some_list[number] +except: + print("An exception was raised, but we have no idea if it was a ValueError or an IndexError.") +``` +For more information about exception handling, see [the official Python docs](https://docs.python.org/3/tutorial/errors.html), or watch [Corey Schafer's video on exception handling](https://www.youtube.com/watch?v=NIWwJbo-9_8). diff --git a/bot/resources/tags/exit().md b/bot/resources/tags/exit().md new file mode 100644 index 000000000..27da9f866 --- /dev/null +++ b/bot/resources/tags/exit().md @@ -0,0 +1,8 @@ +**Exiting Programmatically** + +If you want to exit your code programmatically, you might think to use the functions `exit()` or `quit()`, however this is bad practice. These functions are constants added by the [`site`](https://docs.python.org/3/library/site.html#module-site) module as a convenient method for exiting the interactive interpreter shell, and should not be used in programs. + +You should use either [`SystemExit`](https://docs.python.org/3/library/exceptions.html#SystemExit) or [`sys.exit()`](https://docs.python.org/3/library/sys.html#sys.exit) instead. +There's not much practical difference between these two other than having to `import sys` for the latter. Both take an optional argument to provide an exit status. + +[Official documentation](https://docs.python.org/3/library/constants.html#exit) with the warning not to use `exit()` or `quit()` in source code. diff --git a/bot/resources/tags/f-strings.md b/bot/resources/tags/f-strings.md new file mode 100644 index 000000000..69bc82487 --- /dev/null +++ b/bot/resources/tags/f-strings.md @@ -0,0 +1,17 @@ +In Python, there are several ways to do string interpolation, including using `%s`\'s and by using the `+` operator to concatenate strings together. However, because some of these methods offer poor readability and require typecasting to prevent errors, you should for the most part be using a feature called format strings. + +**In Python 3.6 or later, we can use f-strings like this:** +```py +snake = "Pythons" +print(f"{snake} are some of the largest snakes in the world") +``` +**In earlier versions of Python or in projects where backwards compatibility is very important, use str.format() like this:** +```py +snake = "Pythons" + +# With str.format() you can either use indexes +print("{0} are some of the largest snakes in the world".format(snake)) + +# Or keyword arguments +print("{family} are some of the largest snakes in the world".format(family=snake)) +``` diff --git a/bot/resources/tags/foo.md b/bot/resources/tags/foo.md new file mode 100644 index 000000000..98529bfc0 --- /dev/null +++ b/bot/resources/tags/foo.md @@ -0,0 +1,10 @@ +**Metasyntactic variables** + +A specific word or set of words identified as a placeholder used in programming. They are used to name entities such as variables, functions, etc, whose exact identity is unimportant and serve only to demonstrate a concept, which is useful for teaching programming. + +Common examples include `foobar`, `foo`, `bar`, `baz`, and `qux`. +Python has its own metasyntactic variables, namely `spam`, `eggs`, and `bacon`. This is a reference to a [Monty Python](https://en.wikipedia.org/wiki/Monty_Python) sketch (the eponym of the language). + +More information: +• [History of foobar](https://en.wikipedia.org/wiki/Foobar) +• [Monty Python sketch](https://en.wikipedia.org/wiki/Spam_%28Monty_Python%29) diff --git a/bot/resources/tags/free.md b/bot/resources/tags/free.md new file mode 100644 index 000000000..1493076c7 --- /dev/null +++ b/bot/resources/tags/free.md @@ -0,0 +1,5 @@ +**We have a new help channel system!** + +Please see <#704250143020417084> for further information. + +A more detailed guide can be found on [our website](https://pythondiscord.com/pages/resources/guides/help-channels/). diff --git a/bot/resources/tags/functions-are-objects.md b/bot/resources/tags/functions-are-objects.md new file mode 100644 index 000000000..01af7a721 --- /dev/null +++ b/bot/resources/tags/functions-are-objects.md @@ -0,0 +1,39 @@ +**Calling vs. Referencing functions** + +When assigning a new name to a function, storing it in a container, or passing it as an argument, a common mistake made is to call the function. Instead of getting the actual function, you'll get its return value. + +In Python you can treat function names just like any other variable. Assume there was a function called `now` that returns the current time. If you did `x = now()`, the current time would be assigned to `x`, but if you did `x = now`, the function `now` itself would be assigned to `x`. `x` and `now` would both equally reference the function. + +**Examples** +```py +# assigning new name + +def foo(): + return 'bar' + +def spam(): + return 'eggs' + +baz = foo +baz() # returns 'bar' + +ham = spam +ham() # returns 'eggs' +``` +```py +# storing in container + +import math +functions = [math.sqrt, math.factorial, math.log] +functions[0](25) # returns 5.0 +# the above equivalent to math.sqrt(25) +``` +```py +# passing as argument + +class C: + builtin_open = staticmethod(open) + +# open function is passed +# to the staticmethod class +``` diff --git a/bot/resources/tags/global.md b/bot/resources/tags/global.md new file mode 100644 index 000000000..64c316b62 --- /dev/null +++ b/bot/resources/tags/global.md @@ -0,0 +1,16 @@ +When adding functions or classes to a program, it can be tempting to reference inaccessible variables by declaring them as global. Doing this can result in code that is harder to read, debug and test. Instead of using globals, pass variables or objects as parameters and receive return values. + +Instead of writing +```py +def update_score(): + global score, roll + score = score + roll +update_score() +``` +do this instead +```py +def update_score(score, roll): + return score + roll +score = update_score(score, roll) +``` +For in-depth explanations on why global variables are bad news in a variety of situations, see [this Stack Overflow answer](https://stackoverflow.com/questions/19158339/why-are-global-variables-evil/19158418#19158418). diff --git a/bot/resources/tags/guilds.md b/bot/resources/tags/guilds.md new file mode 100644 index 000000000..571abb99b --- /dev/null +++ b/bot/resources/tags/guilds.md @@ -0,0 +1,3 @@ +**Communities** + +The [communities page](https://pythondiscord.com/pages/resources/communities/) on our website contains a number of communities we have partnered with as well as a [curated list](https://github.com/mhxion/awesome-discord-communities) of other communities relating to programming and technology. diff --git a/bot/resources/tags/if-name-main.md b/bot/resources/tags/if-name-main.md new file mode 100644 index 000000000..9d88bb897 --- /dev/null +++ b/bot/resources/tags/if-name-main.md @@ -0,0 +1,26 @@ +`if __name__ == '__main__'` + +This is a statement that is only true if the module (your source code) it appears in is being run directly, as opposed to being imported into another module. When you run your module, the `__name__` special variable is automatically set to the string `'__main__'`. Conversely, when you import that same module into a different one, and run that, `__name__` is instead set to the filename of your module minus the `.py` extension. + +**Example** +```py +# foo.py + +print('spam') + +if __name__ == '__main__': + print('eggs') +``` +If you run the above module `foo.py` directly, both `'spam'`and `'eggs'` will be printed. Now consider this next example: +```py +# bar.py + +import foo +``` +If you run this module named `bar.py`, it will execute the code in `foo.py`. First it will print `'spam'`, and then the `if` statement will fail, because `__name__` will now be the string `'foo'`. + +**Why would I do this?** + +• Your module is a library, but also has a special case where it can be run directly +• Your module is a library and you want to safeguard it against people running it directly (like what `pip` does) +• Your module is the main program, but has unit tests and the testing framework works by importing your module, and you want to avoid having your main code run during the test diff --git a/bot/resources/tags/indent.md b/bot/resources/tags/indent.md new file mode 100644 index 000000000..dec8407b0 --- /dev/null +++ b/bot/resources/tags/indent.md @@ -0,0 +1,24 @@ +**Indentation** + +Indentation is leading whitespace (spaces and tabs) at the beginning of a line of code. In the case of Python, they are used to determine the grouping of statements. + +Spaces should be preferred over tabs. To be clear, this is in reference to the character itself, not the keys on a keyboard. Your editor/IDE should be configured to insert spaces when the TAB key is pressed. The amount of spaces should be a multiple of 4, except optionally in the case of continuation lines. + +**Example** +```py +def foo(): + bar = 'baz' # indented one level + if bar == 'baz': + print('ham') # indented two levels + return bar # indented one level +``` +The first line is not indented. The next two lines are indented to be inside of the function definition. They will only run when the function is called. The fourth line is indented to be inside the `if` statement, and will only run if the `if` statement evaluates to `True`. The fifth and last line is like the 2nd and 3rd and will always run when the function is called. It effectively closes the `if` statement above as no more lines can be inside the `if` statement below that line. + +**Indentation is used after:** +**1.** [Compound statements](https://docs.python.org/3/reference/compound_stmts.html) (eg. `if`, `while`, `for`, `try`, `with`, `def`, `class`, and their counterparts) +**2.** [Continuation lines](https://www.python.org/dev/peps/pep-0008/#indentation) + +**More Info** +**1.** [Indentation style guide](https://www.python.org/dev/peps/pep-0008/#indentation) +**2.** [Tabs or Spaces?](https://www.python.org/dev/peps/pep-0008/#tabs-or-spaces) +**3.** [Official docs on indentation](https://docs.python.org/3/reference/lexical_analysis.html#indentation) diff --git a/bot/resources/tags/inline.md b/bot/resources/tags/inline.md new file mode 100644 index 000000000..a6a7c35d6 --- /dev/null +++ b/bot/resources/tags/inline.md @@ -0,0 +1,16 @@ +**Inline codeblocks** + +In addition to multi-line codeblocks, discord has support for inline codeblocks as well. These are small codeblocks that are usually a single line, that can fit between non-codeblocks on the same line. + +The following is an example of how it's done: + +The \`\_\_init\_\_\` method customizes the newly created instance. + +And results in the following: + +The `__init__` method customizes the newly created instance. + +**Note:** +• These are **backticks** not quotes +• Avoid using them for multiple lines +• Useful for negating formatting you don't want diff --git a/bot/resources/tags/iterate-dict.md b/bot/resources/tags/iterate-dict.md new file mode 100644 index 000000000..78c067b20 --- /dev/null +++ b/bot/resources/tags/iterate-dict.md @@ -0,0 +1,10 @@ +There are two common ways to iterate over a dictionary in Python. To iterate over the keys: +```py +for key in my_dict: + print(key) +``` +To iterate over both the keys and values: +```py +for key, val in my_dict.items(): + print(key, val) +``` diff --git a/bot/resources/tags/kindling-projects.md b/bot/resources/tags/kindling-projects.md new file mode 100644 index 000000000..54ed8c961 --- /dev/null +++ b/bot/resources/tags/kindling-projects.md @@ -0,0 +1,3 @@ +**Kindling Projects** + +The [Kindling projects page](https://nedbatchelder.com/text/kindling.html) on Ned Batchelder's website contains a list of projects and ideas programmers can tackle to build their skills and knowledge. diff --git a/bot/resources/tags/listcomps.md b/bot/resources/tags/listcomps.md new file mode 100644 index 000000000..0003b9bb8 --- /dev/null +++ b/bot/resources/tags/listcomps.md @@ -0,0 +1,14 @@ +Do you ever find yourself writing something like: +```py +even_numbers = [] +for n in range(20): + if n % 2 == 0: + even_numbers.append(n) +``` +Using list comprehensions can simplify this significantly, and greatly improve code readability. If we rewrite the example above to use list comprehensions, it would look like this: +```py +even_numbers = [n for n in range(20) if n % 2 == 0] +``` +This also works for generators, dicts and sets by using `()` or `{}` instead of `[]`. + +For more info, see [this pythonforbeginners.com post](http://www.pythonforbeginners.com/basics/list-comprehensions-in-python) or [PEP 202](https://www.python.org/dev/peps/pep-0202/). diff --git a/bot/resources/tags/microsoft-build-tools.md b/bot/resources/tags/microsoft-build-tools.md new file mode 100644 index 000000000..7c702e296 --- /dev/null +++ b/bot/resources/tags/microsoft-build-tools.md @@ -0,0 +1,15 @@ +**Microsoft Visual C++ Build Tools** + +When you install a library through `pip` on Windows, sometimes you may encounter this error: + +``` +error: Microsoft Visual C++ 14.0 or greater is required. Get it with "Microsoft C++ Build Tools": https://visualstudio.microsoft.com/visual-cpp-build-tools/ +``` + +This means the library you're installing has code written in other languages and needs additional tools to install. To install these tools, follow the following steps: (Requires 6GB+ disk space) + +**1.** Open [https://visualstudio.microsoft.com/visual-cpp-build-tools/](https://visualstudio.microsoft.com/visual-cpp-build-tools/). +**2.** Click **`Download Build Tools >`**. A file named `vs_BuildTools` or `vs_BuildTools.exe` should start downloading. If no downloads start after a few seconds, click **`click here to retry`**. +**3.** Run the downloaded file. Click **`Continue`** to proceed. +**4.** Choose **C++ build tools** and press **`Install`**. You may need a reboot after the installation. +**5.** Try installing the library via `pip` again. diff --git a/bot/resources/tags/modmail.md b/bot/resources/tags/modmail.md new file mode 100644 index 000000000..7545419ee --- /dev/null +++ b/bot/resources/tags/modmail.md @@ -0,0 +1,9 @@ +**Contacting the moderation team via ModMail** + +<@!683001325440860340> is a bot that will relay your messages to our moderation team, so that you can start a conversation with the moderation team. Your messages will be relayed to the entire moderator team, who will be able to respond to you via the bot. + +It supports attachments, codeblocks, and reactions. As communication happens over direct messages, the conversation will stay between you and the mod team. + +**To use it, simply send a direct message to the bot.** + +Should there be an urgent and immediate need for a moderator or admin to look at a channel, feel free to ping the <@&267629731250176001> or <@&267628507062992896> role instead. diff --git a/bot/resources/tags/mutability.md b/bot/resources/tags/mutability.md new file mode 100644 index 000000000..bde9b5e7e --- /dev/null +++ b/bot/resources/tags/mutability.md @@ -0,0 +1,37 @@ +**Mutable vs immutable objects** + +Imagine that you want to make all letters in a string upper case. Conveniently, strings have an `.upper()` method. + +You might think that this would work: +```python +>>> greeting = "hello" +>>> greeting.upper() +'HELLO' +>>> greeting +'hello' +``` + +`greeting` didn't change. Why is that so? + +That's because strings in Python are _immutable_. You can't change them, you can only pass around existing strings or create new ones. + +```python +>>> greeting = "hello" +>>> greeting = greeting.upper() +>>> greeting +'HELLO' +``` + +`greeting.upper()` creates and returns a new string which is like the old one, but with all the letters turned to upper case. + +`int`, `float`, `complex`, `tuple`, `frozenset` are other examples of immutable data types in Python. + +Mutable data types like `list`, on the other hand, can be changed in-place: +```python +>>> my_list = [1, 2, 3] +>>> my_list.append(4) +>>> my_list +[1, 2, 3, 4] +``` + +Other examples of mutable data types in Python are `dict` and `set`. Instances of user-defined classes are also mutable. diff --git a/bot/resources/tags/mutable-default-args.md b/bot/resources/tags/mutable-default-args.md new file mode 100644 index 000000000..a8f0c38b3 --- /dev/null +++ b/bot/resources/tags/mutable-default-args.md @@ -0,0 +1,48 @@ +**Mutable Default Arguments** + +Default arguments in python are evaluated *once* when the function is +**defined**, *not* each time the function is **called**. This means that if +you have a mutable default argument and mutate it, you will have +mutated that object for all future calls to the function as well. + +For example, the following `append_one` function appends `1` to a list +and returns it. `foo` is set to an empty list by default. +```python +>>> def append_one(foo=[]): +... foo.append(1) +... return foo +... +``` +See what happens when we call it a few times: +```python +>>> append_one() +[1] +>>> append_one() +[1, 1] +>>> append_one() +[1, 1, 1] +``` +Each call appends an additional `1` to our list `foo`. It does not +receive a new empty list on each call, it is the same list everytime. + +To avoid this problem, you have to create a new object every time the +function is **called**: +```python +>>> def append_one(foo=None): +... if foo is None: +... foo = [] +... foo.append(1) +... return foo +... +>>> append_one() +[1] +>>> append_one() +[1] +``` + +**Note**: + +• This behavior can be used intentionally to maintain state between +calls of a function (eg. when writing a caching function). +• This behavior is not unique to mutable objects, all default +arguments are evaulated only once when the function is defined. diff --git a/bot/resources/tags/names.md b/bot/resources/tags/names.md new file mode 100644 index 000000000..3e76269f7 --- /dev/null +++ b/bot/resources/tags/names.md @@ -0,0 +1,37 @@ +**Naming and Binding** + +A name is a piece of text that is bound to an object. They are a **reference** to an object. Examples are function names, class names, module names, variables, etc. + +**Note:** Names **cannot** reference other names, and assignment **never** creates a copy. +```py +x = 1 # x is bound to 1 +y = x # y is bound to VALUE of x +x = 2 # x is bound to 2 +print(x, y) # 2 1 +``` +When doing `y = x`, the name `y` is being bound to the *value* of `x` which is `1`. Neither `x` nor `y` are the 'real' name. The object `1` simply has *multiple* names. They are the exact same object. +``` +>>> x = 1 +x ━━ 1 + +>>> y = x +x ━━ 1 +y ━━━┛ + +>>> x = 2 +x ━━ 2 +y ━━ 1 +``` +**Names are created in multiple ways** +You might think that the only way to bind a name to an object is by using assignment, but that isn't the case. All of the following work exactly the same as assignment: +• `import` statements +• `class` and `def` +• `for` loop headers +• `as` keyword when used with `except`, `import`, and `with` +• formal parameters in function headers + +There is also `del` which has the purpose of *unbinding* a name. + +**More info** +• Please watch [Ned Batchelder's talk](https://youtu.be/_AEJHKGk9ns) on names in python for a detailed explanation with examples +• [Official documentation](https://docs.python.org/3/reference/executionmodel.html#naming-and-binding) diff --git a/bot/resources/tags/off-topic.md b/bot/resources/tags/off-topic.md new file mode 100644 index 000000000..c7f98a813 --- /dev/null +++ b/bot/resources/tags/off-topic.md @@ -0,0 +1,8 @@ +**Off-topic channels** + +There are three off-topic channels: +• <#291284109232308226> +• <#463035241142026251> +• <#463035268514185226> + +Their names change randomly every 24 hours, but you can always find them under the `OFF-TOPIC/GENERAL` category in the channel list. diff --git a/bot/resources/tags/open.md b/bot/resources/tags/open.md new file mode 100644 index 000000000..13b4555b9 --- /dev/null +++ b/bot/resources/tags/open.md @@ -0,0 +1,26 @@ +**Opening files** + +The built-in function `open()` is one of several ways to open files on your computer. It accepts many different parameters, so this tag will only go over two of them (`file` and `mode`). For more extensive documentation on all these parameters, consult the [official documentation](https://docs.python.org/3/library/functions.html#open). The object returned from this function is a [file object or stream](https://docs.python.org/3/glossary.html#term-file-object), for which the full documentation can be found [here](https://docs.python.org/3/library/io.html#io.TextIOBase). + +See also: +• `!tags with` for information on context managers +• `!tags pathlib` for an alternative way of opening files +• `!tags seek` for information on changing your position in a file + +**The `file` parameter** + +This should be a [path-like object](https://docs.python.org/3/glossary.html#term-path-like-object) denoting the name or path (absolute or relative) to the file you want to open. + +An absolute path is the full path from your root directory to the file you want to open. Generally this is the option you should choose so it doesn't matter what directory you're in when you execute your module. + +See `!tags relative-path` for more information on relative paths. + +**The `mode` parameter** + +This is an optional string that specifies the mode in which the file should be opened. There's not enough room to discuss them all, but listed below are some of the more confusing modes. + +`'r+'` Opens for reading and writing (file must already exist) +`'w+'` Opens for reading and writing and truncates (can create files) +`'x'` Creates file and opens for writing (file must **not** already exist) +`'x+'` Creates file and opens for reading and writing (file must **not** already exist) +`'a+'` Opens file for reading and writing at **end of file** (can create files) diff --git a/bot/resources/tags/or-gotcha.md b/bot/resources/tags/or-gotcha.md new file mode 100644 index 000000000..d75a73d78 --- /dev/null +++ b/bot/resources/tags/or-gotcha.md @@ -0,0 +1,17 @@ +When checking if something is equal to one thing or another, you might think that this is possible: +```py +if favorite_fruit == 'grapefruit' or 'lemon': + print("That's a weird favorite fruit to have.") +``` +While this makes sense in English, it may not behave the way you would expect. In Python, you should have _[complete instructions on both sides of the logical operator](https://docs.python.org/3/reference/expressions.html#boolean-operations)_. + +So, if you want to check if something is equal to one thing or another, there are two common ways: +```py +# Like this... +if favorite_fruit == 'grapefruit' or favorite_fruit == 'lemon': + print("That's a weird favorite fruit to have.") + +# ...or like this. +if favorite_fruit in ('grapefruit', 'lemon'): + print("That's a weird favorite fruit to have.") +``` diff --git a/bot/resources/tags/param-arg.md b/bot/resources/tags/param-arg.md new file mode 100644 index 000000000..88069d8bd --- /dev/null +++ b/bot/resources/tags/param-arg.md @@ -0,0 +1,12 @@ +**Parameters vs. Arguments** + +A parameter is a variable defined in a function signature (the line with `def` in it), while arguments are objects passed to a function call. + +```py +def square(n): # n is the parameter + return n*n + +print(square(5)) # 5 is the argument +``` + +Note that `5` is the argument passed to `square`, but `square(5)` in its entirety is the argument passed to `print` diff --git a/bot/resources/tags/paste.md b/bot/resources/tags/paste.md new file mode 100644 index 000000000..2ed51def7 --- /dev/null +++ b/bot/resources/tags/paste.md @@ -0,0 +1,6 @@ +**Pasting large amounts of code** + +If your code is too long to fit in a codeblock in discord, you can paste your code here: +https://paste.pydis.com/ + +After pasting your code, **save** it by clicking the floppy disk icon in the top right, or by typing `ctrl + S`. After doing that, the URL should **change**. Copy the URL and post it here so others can see it. diff --git a/bot/resources/tags/pathlib.md b/bot/resources/tags/pathlib.md new file mode 100644 index 000000000..dfeb7ecac --- /dev/null +++ b/bot/resources/tags/pathlib.md @@ -0,0 +1,21 @@ +**Pathlib** + +Python 3 comes with a new module named `Pathlib`. Since Python 3.6, `pathlib.Path` objects work nearly everywhere that `os.path` can be used, meaning you can integrate your new code directly into legacy code without having to rewrite anything. Pathlib makes working with paths way simpler than `os.path` does. + +**Feature spotlight**: + +• Normalizes file paths for all platforms automatically +• Has glob-like utilites (eg. `Path.glob`, `Path.rglob`) for searching files +• Can read and write files, and close them automatically +• Convenient syntax, utilising the `/` operator (e.g. `Path('~') / 'Documents'`) +• Can easily pick out components of a path (eg. name, parent, stem, suffix, anchor) +• Supports method chaining +• Move and delete files +• And much more + +**More Info**: + +• [**Why you should use pathlib** - Trey Hunner](https://treyhunner.com/2018/12/why-you-should-be-using-pathlib/) +• [**Answering concerns about pathlib** - Trey Hunner](https://treyhunner.com/2019/01/no-really-pathlib-is-great/) +• [**Official Documentation**](https://docs.python.org/3/library/pathlib.html) +• [**PEP 519** - Adding a file system path protocol](https://www.python.org/dev/peps/pep-0519/) diff --git a/bot/resources/tags/pep8.md b/bot/resources/tags/pep8.md new file mode 100644 index 000000000..cab4c4db8 --- /dev/null +++ b/bot/resources/tags/pep8.md @@ -0,0 +1,3 @@ +**PEP 8** is the official style guide for Python. It includes comprehensive guidelines for code formatting, variable naming, and making your code easy to read. Professional Python developers are usually required to follow the guidelines, and will often use code-linters like `flake8` to verify that the code they\'re writing complies with the style guide. + +You can find the PEP 8 document [here](https://www.python.org/dev/peps/pep-0008). diff --git a/bot/resources/tags/positional-keyword.md b/bot/resources/tags/positional-keyword.md new file mode 100644 index 000000000..dd6ddfc4b --- /dev/null +++ b/bot/resources/tags/positional-keyword.md @@ -0,0 +1,38 @@ +**Positional vs. Keyword arguments** + +Functions can take two different kinds of arguments. A positional argument is just the object itself. A keyword argument is a name assigned to an object. + +**Example** +```py +>>> print('Hello', 'world!', sep=', ') +Hello, world! +``` +The first two strings `'Hello'` and `world!'` are positional arguments. +The `sep=', '` is a keyword argument. + +**Note** +A keyword argument can be passed positionally in some cases. +```py +def sum(a, b=1): + return a + b + +sum(1, b=5) +sum(1, 5) # same as above +``` +[Somtimes this is forced](https://www.python.org/dev/peps/pep-0570/#history-of-positional-only-parameter-semantics-in-python), in the case of the `pow()` function. + +The reverse is also true: +```py +>>> def foo(a, b): +... print(a, b) +... +>>> foo(a=1, b=2) +1 2 +>>> foo(b=1, a=2) +2 1 +``` + +**More info** +• [Keyword only arguments](https://www.python.org/dev/peps/pep-3102/) +• [Positional only arguments](https://www.python.org/dev/peps/pep-0570/) +• `!tags param-arg` (Parameters vs. Arguments) diff --git a/bot/resources/tags/precedence.md b/bot/resources/tags/precedence.md new file mode 100644 index 000000000..ed399143c --- /dev/null +++ b/bot/resources/tags/precedence.md @@ -0,0 +1,13 @@ +**Operator Precedence** + +Operator precedence is essentially like an order of operations for python's operators. + +**Example 1** (arithmetic) +`2 * 3 + 1` is `7` because multiplication is first +`2 * (3 + 1)` is `8` because the parenthesis change the precedence allowing the sum to be first + +**Example 2** (logic) +`not True or True` is `True` because the `not` is first +`not (True or True)` is `False` because the `or` is first + +The full table of precedence from lowest to highest is [here](https://docs.python.org/3/reference/expressions.html#operator-precedence) diff --git a/bot/resources/tags/quotes.md b/bot/resources/tags/quotes.md new file mode 100644 index 000000000..8421748a1 --- /dev/null +++ b/bot/resources/tags/quotes.md @@ -0,0 +1,20 @@ +**String Quotes** + +Single and Double quoted strings are the **same** in Python. The choice of which one to use is up to you, just make sure that you **stick to that choice**. + +With that said, there are exceptions to this that are more important than consistency. If a single or double quote is needed *inside* the string, using the opposite quotation is better than using escape characters. + +Example: +```py +'My name is "Guido"' # good +"My name is \"Guido\"" # bad + +"Don't go in there" # good +'Don\'t go in there' # bad +``` +**Note:** +If you need both single and double quotes inside your string, use the version that would result in the least amount of escapes. In the case of a tie, use the quotation you use the most. + +**References:** +• [pep-8 on quotes](https://www.python.org/dev/peps/pep-0008/#string-quotes) +• [convention for triple quoted strings](https://www.python.org/dev/peps/pep-0257/) diff --git a/bot/resources/tags/range-len.md b/bot/resources/tags/range-len.md new file mode 100644 index 000000000..65665eccf --- /dev/null +++ b/bot/resources/tags/range-len.md @@ -0,0 +1,11 @@ +Iterating over `range(len(...))` is a common approach to accessing each item in an ordered collection. +```py +for i in range(len(my_list)): + do_something(my_list[i]) +``` +The pythonic syntax is much simpler, and is guaranteed to produce elements in the same order: +```py +for item in my_list: + do_something(item) +``` +Python has other solutions for cases when the index itself might be needed. To get the element at the same index from two or more lists, use [zip](https://docs.python.org/3/library/functions.html#zip). To get both the index and the element at that index, use [enumerate](https://docs.python.org/3/library/functions.html#enumerate). diff --git a/bot/resources/tags/relative-path.md b/bot/resources/tags/relative-path.md new file mode 100644 index 000000000..6e97b78af --- /dev/null +++ b/bot/resources/tags/relative-path.md @@ -0,0 +1,7 @@ +**Relative Path** + +A relative path is a partial path that is relative to your current working directory. A common misconception is that your current working directory is the location of the module you're executing, **but this is not the case**. Your current working directory is actually the **directory you were in when you ran the python interpreter**. The reason for this misconception is because a common way to run your code is to navigate to the directory your module is stored, and run `python <module>.py`. Thus, in this case your current working directory will be the same as the location of the module. However, if we instead did `python path/to/<module>.py`, our current working directory would no longer be the same as the location of the module we're executing. + +**Why is this important?** + +When opening files in python, relative paths won't always work since it's dependent on what directory you were in when you ran your code. A common issue people face is running their code in an IDE thinking they can open files that are in the same directory as their module, but the current working directory will be different than what they expect and so they won't find the file. The way to avoid this problem is by using absolute paths, which is the full path from your root directory to the file you want to open. diff --git a/bot/resources/tags/repl.md b/bot/resources/tags/repl.md new file mode 100644 index 000000000..875b4ec47 --- /dev/null +++ b/bot/resources/tags/repl.md @@ -0,0 +1,13 @@ +**Read-Eval-Print Loop** + +A REPL is an interactive language shell environment. It first **reads** one or more expressions entered by the user, **evaluates** it, yields the result, and **prints** it out to the user. It will then **loop** back to the **read** step. + +To use python's REPL, execute the interpreter with no arguments. This will drop you into the interactive interpreter shell, print out some relevant information, and then prompt you with the primary prompt `>>>`. At this point it is waiting for your input. + +Firstly you can start typing in some valid python expressions, pressing <return> to either bring you to the **eval** step, or prompting you with the secondary prompt `...` (or no prompt at all depending on your environment), meaning your expression isn't yet terminated and it's waiting for more input. This is useful for code that requires multiple lines like loops, functions, and classes. If you reach the secondary prompt in a clause that can have an arbitrary amount of expressions, you can terminate it by pressing <return> on a blank line. In other words, for the last expression you write in the clause, <return> must be pressed twice in a row. + +Alternatively, you can make use of the builtin `help()` function. `help(thing)` to get help on some `thing` object, or `help()` to start an interactive help session. This mode is extremely powerful, read the instructions when first entering the session to learn how to use it. + +Lastly you can run your code with the `-i` flag to execute your code normally, but be dropped into the REPL once execution is finished, giving you access to all your global variables/functions in the REPL. + +To **exit** either a help session, or normal REPL prompt, you must send an EOF signal to the prompt. In *nix systems, this is done with `ctrl + D`, and in windows systems it is `ctrl + Z`. You can also exit the normal REPL prompt with the dedicated functions `exit()` or `quit()`. diff --git a/bot/resources/tags/return.md b/bot/resources/tags/return.md new file mode 100644 index 000000000..e37f0eebc --- /dev/null +++ b/bot/resources/tags/return.md @@ -0,0 +1,35 @@ +**Return Statement** + +When calling a function, you'll often want it to give you a value back. In order to do that, you must `return` it. The reason for this is because functions have their own scope. Any values defined within the function body are inaccessible outside of that function. + +*For more information about scope, see `!tags scope`* + +Consider the following function: +```py +def square(n): + return n*n +``` +If we wanted to store 5 squared in a variable called `x`, we could do that like so: +`x = square(5)`. `x` would now equal `25`. + +**Common Mistakes** +```py +>>> def square(n): +... n*n # calculates then throws away, returns None +... +>>> x = square(5) +>>> print(x) +None +>>> def square(n): +... print(n*n) # calculates and prints, then throws away and returns None +... +>>> x = square(5) +25 +>>> print(x) +None +``` +**Things to note** +• `print()` and `return` do **not** accomplish the same thing. `print()` will only print the value, it will not be accessible outside of the function afterwards. +• A function will return `None` if it ends without reaching an explicit `return` statement. +• When you want to print a value calculated in a function, instead of printing inside the function, it is often better to return the value and print the *function call* instead. +• [Official documentation for `return`](https://docs.python.org/3/reference/simple_stmts.html#the-return-statement) diff --git a/bot/resources/tags/round.md b/bot/resources/tags/round.md new file mode 100644 index 000000000..0392bb41b --- /dev/null +++ b/bot/resources/tags/round.md @@ -0,0 +1,24 @@ +**Round half to even** + +Python 3 uses bankers' rounding (also known by other names), where if the fractional part of a number is `.5`, it's rounded to the nearest **even** result instead of away from zero. + +Example: +```py +>>> round(2.5) +2 +>>> round(1.5) +2 +``` +In the first example, there is a tie between 2 and 3, and since 3 is odd and 2 is even, the result is 2. +In the second example, the tie is between 1 and 2, and so 2 is also the result. + +**Why this is done:** +The round half up technique creates a slight bias towards the larger number. With a large amount of calculations, this can be significant. The round half to even technique eliminates this bias. + +It should be noted that round half to even distorts the distribution by increasing the probability of evens relative to odds, however this is considered less important than the bias explained above. + +**References:** +• [Wikipedia article about rounding](https://en.wikipedia.org/wiki/Rounding#Round_half_to_even) +• [Documentation on `round` function](https://docs.python.org/3/library/functions.html#round) +• [`round` in what's new in python 3](https://docs.python.org/3/whatsnew/3.0.html#builtins) (4th bullet down) +• [How to force rounding technique](https://stackoverflow.com/a/10826537/4607272) diff --git a/bot/resources/tags/scope.md b/bot/resources/tags/scope.md new file mode 100644 index 000000000..5c1e64e1c --- /dev/null +++ b/bot/resources/tags/scope.md @@ -0,0 +1,24 @@ +**Scoping Rules** + +A *scope* defines the visibility of a name within a block, where a block is a piece of python code executed as a unit. For simplicity, this would be a module, a function body, and a class definition. A name refers to text bound to an object. + +*For more information about names, see `!tags names`* + +A module is the source code file itself, and encompasses all blocks defined within it. Therefore if a variable is defined at the module level (top-level code block), it is a global variable and can be accessed anywhere in the module as long as the block in which it's referenced is executed after it was defined. + +Alternatively if a variable is defined within a function block for example, it is a local variable. It is not accessible at the module level, as that would be *outside* its scope. This is the purpose of the `return` statement, as it hands an object back to the scope of its caller. Conversely if a function was defined *inside* the previously mentioned block, it *would* have access to that variable, because it is within the first function's scope. +```py +>>> def outer(): +... foo = 'bar' # local variable to outer +... def inner(): +... print(foo) # has access to foo from scope of outer +... return inner # brings inner to scope of caller +... +>>> inner = outer() # get inner function +>>> inner() # prints variable foo without issue +bar +``` +**Official Documentation** +**1.** [Program structure, name binding and resolution](https://docs.python.org/3/reference/executionmodel.html#execution-model) +**2.** [`global` statement](https://docs.python.org/3/reference/simple_stmts.html#the-global-statement) +**3.** [`nonlocal` statement](https://docs.python.org/3/reference/simple_stmts.html#the-nonlocal-statement) diff --git a/bot/resources/tags/seek.md b/bot/resources/tags/seek.md new file mode 100644 index 000000000..bc013fe03 --- /dev/null +++ b/bot/resources/tags/seek.md @@ -0,0 +1,22 @@ +**Seek** + +In the context of a [file object](https://docs.python.org/3/glossary.html#term-file-object), the `seek` function changes the stream position to a given byte offset, with an optional argument of where to offset from. While you can find the official documentation [here](https://docs.python.org/3/library/io.html#io.IOBase.seek), it can be unclear how to actually use this feature, so keep reading to see examples on how to use it. + +File named `example`: +``` +foobar +spam eggs +``` +Open file for reading in byte mode: +```py +f = open('example', 'rb') +``` +Note that stream positions start from 0 in much the same way that the index for a list does. If we do `f.seek(3, 0)`, our stream position will move 3 bytes forward relative to the **beginning** of the stream. Now if we then did `f.read(1)` to read a single byte from where we are in the stream, it would return the string `'b'` from the 'b' in 'foobar'. Notice that the 'b' is the 4th character. Also note that after we did `f.read(1)`, we moved the stream position again 1 byte forward relative to the **current** position in the stream. So the stream position is now currently at position 4. + +Now lets do `f.seek(4, 1)`. This will move our stream position 4 bytes forward relative to our **current** position in the stream. Now if we did `f.read(1)`, it would return the string `'p'` from the 'p' in 'spam' on the next line. Note this time that the character at position 6 is the newline character `'\n'`. + +Finally, lets do `f.seek(-4, 2)`, moving our stream position *backwards* 4 bytes relative to the **end** of the stream. Now if we did `f.read()` to read everything after our position in the file, it would return the string `'eggs'` and also move our stream position to the end of the file. + +**Note** +• For the second argument in `seek()`, use `os.SEEK_SET`, `os.SEEK_CUR`, and `os.SEEK_END` in place of 0, 1, and 2 respectively. +• `os.SEEK_CUR` is only usable when the file is in byte mode. diff --git a/bot/resources/tags/self.md b/bot/resources/tags/self.md new file mode 100644 index 000000000..d20154fd5 --- /dev/null +++ b/bot/resources/tags/self.md @@ -0,0 +1,25 @@ +**Class instance** + +When calling a method from a class instance (ie. `instance.method()`), the instance itself will automatically be passed as the first argument implicitly. By convention, we call this `self`, but it could technically be called any valid variable name. + +```py +class Foo: + def bar(self): + print('bar') + + def spam(self, eggs): + print(eggs) + +foo = Foo() +``` + +If we call `foo.bar()`, it is equivalent to doing `Foo.bar(foo)`. Our instance `foo` is passed for us to the `bar` function, so while we initially gave zero arguments, it is actually called with one. + +Similarly if we call `foo.spam('ham')`, it is equivalent to +doing `Foo.spam(foo, 'ham')`. + +**Why is this useful?** + +Methods do not inherently have access to attributes defined in the class. In order for any one method to be able to access other methods or variables defined in the class, it must have access to the instance. + +Consider if outside the class, we tried to do this: `spam(foo, 'ham')`. This would give an error, because we don't have access to the `spam` method directly, we have to call it by doing `foo.spam('ham')`. This is also the case inside of the class. If we wanted to call the `bar` method inside the `spam` method, we'd have to do `self.bar()`, just doing `bar()` would give an error. diff --git a/bot/resources/tags/star-imports.md b/bot/resources/tags/star-imports.md new file mode 100644 index 000000000..2be6aab6e --- /dev/null +++ b/bot/resources/tags/star-imports.md @@ -0,0 +1,48 @@ +**Star / Wildcard imports** + +Wildcard imports are import statements in the form `from <module_name> import *`. What imports like these do is that they import everything **[1]** from the module into the current module's namespace **[2]**. This allows you to use names defined in the imported module without prefixing the module's name. + +Example: +```python +>>> from math import * +>>> sin(pi / 2) +1.0 +``` +**This is discouraged, for various reasons:** + +Example: +```python +>>> from custom_sin import sin +>>> from math import * +>>> sin(pi / 2) # uses sin from math rather than your custom sin +``` + +• Potential namespace collision. Names defined from a previous import might get shadowed by a wildcard import. + +• Causes ambiguity. From the example, it is unclear which `sin` function is actually being used. From the Zen of Python **[3]**: `Explicit is better than implicit.` + +• Makes import order significant, which they shouldn't. Certain IDE's `sort import` functionality may end up breaking code due to namespace collision. + +**How should you import?** + +• Import the module under the module's namespace (Only import the name of the module, and names defined in the module can be used by prefixing the module's name) + +```python +>>> import math +>>> math.sin(math.pi / 2) +``` + +• Explicitly import certain names from the module + +```python +>>> from math import sin, pi +>>> sin(pi / 2) +``` + +Conclusion: Namespaces are one honking great idea -- let's do more of those! *[3]* + +**[1]** If the module defines the variable `__all__`, the names defined in `__all__` will get imported by the wildcard import, otherwise all the names in the module get imported (except for names with a leading underscore) + +**[2]** [Namespaces and scopes](https://www.programiz.com/python-programming/namespace) + +**[3]** [Zen of Python](https://www.python.org/dev/peps/pep-0020/) diff --git a/bot/resources/tags/traceback.md b/bot/resources/tags/traceback.md new file mode 100644 index 000000000..e770fa86d --- /dev/null +++ b/bot/resources/tags/traceback.md @@ -0,0 +1,18 @@ +Please provide a full traceback to your exception in order for us to identify your issue. + +A full traceback could look like: +```py +Traceback (most recent call last): + File "tiny", line 3, in + do_something() + File "tiny", line 2, in do_something + a = 6 / 0 +ZeroDivisionError: integer division or modulo by zero +``` +The best way to read your traceback is bottom to top. + +• Identify the exception raised (e.g. ZeroDivisionError) +• Make note of the line number, and navigate there in your program. +• Try to understand why the error occurred. + +To read more about exceptions and errors, please refer to the [PyDis Wiki](https://pythondiscord.com/pages/asking-good-questions/#examining-tracebacks) or the [official Python tutorial.](https://docs.python.org/3.7/tutorial/errors.html) diff --git a/bot/resources/tags/windows-path.md b/bot/resources/tags/windows-path.md new file mode 100644 index 000000000..da8edf685 --- /dev/null +++ b/bot/resources/tags/windows-path.md @@ -0,0 +1,30 @@ +**PATH on Windows** + +If you have installed Python but you forgot to check the *Add Python to PATH* option during the installation you may still be able to access your installation with ease. + +If you did not uncheck the option to install the Python launcher then you will find a `py` command on your system. If you want to be able to open your Python installation by running `python` then your best option is to re-install Python. + +Otherwise, you can access your install using the `py` command in Command Prompt. Where you may type something with the `python` command like: +``` +C:\Users\Username> python3 my_application_file.py +``` + +You can achieve the same result using the `py` command like this: +``` +C:\Users\Username> py -3 my_application_file.py +``` + +You can pass any options to the Python interpreter after you specify a version, for example, to install a Python module using `pip` you can run: +``` +C:\Users\Username> py -3 -m pip install numpy +``` + +You can also access different versions of Python using the version flag, like so: +``` +C:\Users\Username> py -3.7 +... Python 3.7 starts ... +C:\Users\Username> py -3.6 +... Python 3.6 stars ... +C:\Users\Username> py -2 +... Python 2 (any version installed) starts ... +``` diff --git a/bot/resources/tags/with.md b/bot/resources/tags/with.md new file mode 100644 index 000000000..62d5612f2 --- /dev/null +++ b/bot/resources/tags/with.md @@ -0,0 +1,8 @@ +The `with` keyword triggers a context manager. Context managers automatically set up and take down data connections, or any other kind of object that implements the magic methods `__enter__` and `__exit__`. +```py +with open("test.txt", "r") as file: + do_things(file) +``` +The above code automatically closes `file` when the `with` block exits, so you never have to manually do a `file.close()`. Most connection types, including file readers and database connections, support this. + +For more information, read [the official docs](https://docs.python.org/3/reference/compound_stmts.html#with), watch [Corey Schafer\'s context manager video](https://www.youtube.com/watch?v=-aKFBoZpiqA), or see [PEP 343](https://www.python.org/dev/peps/pep-0343/). diff --git a/bot/resources/tags/xy-problem.md b/bot/resources/tags/xy-problem.md new file mode 100644 index 000000000..b77bd27e8 --- /dev/null +++ b/bot/resources/tags/xy-problem.md @@ -0,0 +1,7 @@ +**xy-problem** + +Asking about your attempted solution rather than your actual problem. + +Often programmers will get distracted with a potential solution they've come up with, and will try asking for help getting it to work. However, it's possible this solution either wouldn't work as they expect, or there's a much better solution instead. + +For more information and examples: http://xyproblem.info/ diff --git a/bot/resources/tags/ytdl.md b/bot/resources/tags/ytdl.md new file mode 100644 index 000000000..e34ecff44 --- /dev/null +++ b/bot/resources/tags/ytdl.md @@ -0,0 +1,12 @@ +Per [PyDis' Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, commonly used by Discord bots to stream audio, as its use violates YouTube's Terms of Service. + +For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?template=terms), as of 2019-07-22: +``` +The following restrictions apply to your use of the Service. You are not allowed to: + +1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as specifically permitted by the Service; (b) with prior written permission from YouTube and, if applicable, the respective rights holders; or (c) as permitted by applicable law; + +3. access the Service using any automated means (such as robots, botnets or scrapers) except: (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; (b) with YouTube’s prior written permission; or (c) as permitted by applicable law; + +9. use the Service to view or listen to Content other than for personal, non-commercial use (for example, you may not publicly screen videos or stream music from the Service) +``` diff --git a/bot/resources/tags/zip.md b/bot/resources/tags/zip.md new file mode 100644 index 000000000..6b05f0282 --- /dev/null +++ b/bot/resources/tags/zip.md @@ -0,0 +1,12 @@ +The zip function allows you to iterate through multiple iterables simultaneously. It joins the iterables together, almost like a zipper, so that each new element is a tuple with one element from each iterable. + +```py +letters = 'abc' +numbers = [1, 2, 3] +# zip(letters, numbers) --> [('a', 1), ('b', 2), ('c', 3)] +for letter, number in zip(letters, numbers): + print(letter, number) +``` +The `zip()` iterator is exhausted after the length of the shortest iterable is exceeded. If you would like to retain the other values, consider using [itertools.zip_longest](https://docs.python.org/3/library/itertools.html#itertools.zip_longest). + +For more information on zip, please refer to the [official documentation](https://docs.python.org/3/library/functions.html#zip). diff --git a/bot/rules/burst_shared.py b/bot/rules/burst_shared.py index bbe9271b3..0e66df69c 100644 --- a/bot/rules/burst_shared.py +++ b/bot/rules/burst_shared.py @@ -2,11 +2,20 @@ from typing import Dict, Iterable, List, Optional, Tuple from discord import Member, Message +from bot.constants import Channels + async def apply( last_message: Message, recent_messages: List[Message], config: Dict[str, int] ) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects repeated messages sent by multiple users.""" + """ + Detects repeated messages sent by multiple users. + + This filter never triggers in the verification channel. + """ + if last_message.channel.id == Channels.verification: + return + total_recent = len(recent_messages) if total_recent > config['max']: diff --git a/bot/rules/discord_emojis.py b/bot/rules/discord_emojis.py index 5bab514f2..41faf7ee8 100644 --- a/bot/rules/discord_emojis.py +++ b/bot/rules/discord_emojis.py @@ -2,23 +2,27 @@ import re from typing import Dict, Iterable, List, Optional, Tuple from discord import Member, Message +from emoji import demojize -DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>") +DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>|:\w+:") +CODE_BLOCK_RE = re.compile(r"```.*?```", flags=re.DOTALL) async def apply( last_message: Message, recent_messages: List[Message], config: Dict[str, int] ) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total Discord emojis (excluding Unicode emojis) exceeding the limit sent by a single user.""" + """Detects total Discord emojis exceeding the limit sent by a single user.""" relevant_messages = tuple( msg for msg in recent_messages if msg.author == last_message.author ) + # Get rid of code blocks in the message before searching for emojis. + # Convert Unicode emojis to :emoji: format to get their count. total_emojis = sum( - len(DISCORD_EMOJI_RE.findall(msg.content)) + len(DISCORD_EMOJI_RE.findall(demojize(CODE_BLOCK_RE.sub("", msg.content)))) for msg in relevant_messages ) diff --git a/bot/utils/__init__.py b/bot/utils/__init__.py index 8184be824..13533a467 100644 --- a/bot/utils/__init__.py +++ b/bot/utils/__init__.py @@ -1,76 +1,4 @@ -from abc import ABCMeta -from typing import Any, Generator, Hashable, Iterable +from bot.utils.helpers import CogABCMeta, find_nth_occurrence, has_lines, pad_base64 +from bot.utils.services import send_to_paste_service -from discord.ext.commands import CogMeta - - -class CogABCMeta(CogMeta, ABCMeta): - """Metaclass for ABCs meant to be implemented as Cogs.""" - - pass - - -class CaseInsensitiveDict(dict): - """ - We found this class on StackOverflow. Thanks to m000 for writing it! - - https://stackoverflow.com/a/32888599/4022104 - """ - - @classmethod - def _k(cls, key: Hashable) -> Hashable: - """Return lowered key if a string-like is passed, otherwise pass key straight through.""" - return key.lower() if isinstance(key, str) else key - - def __init__(self, *args, **kwargs): - super(CaseInsensitiveDict, self).__init__(*args, **kwargs) - self._convert_keys() - - def __getitem__(self, key: Hashable) -> Any: - """Case insensitive __setitem__.""" - return super(CaseInsensitiveDict, self).__getitem__(self.__class__._k(key)) - - def __setitem__(self, key: Hashable, value: Any): - """Case insensitive __setitem__.""" - super(CaseInsensitiveDict, self).__setitem__(self.__class__._k(key), value) - - def __delitem__(self, key: Hashable) -> Any: - """Case insensitive __delitem__.""" - return super(CaseInsensitiveDict, self).__delitem__(self.__class__._k(key)) - - def __contains__(self, key: Hashable) -> bool: - """Case insensitive __contains__.""" - return super(CaseInsensitiveDict, self).__contains__(self.__class__._k(key)) - - def pop(self, key: Hashable, *args, **kwargs) -> Any: - """Case insensitive pop.""" - return super(CaseInsensitiveDict, self).pop(self.__class__._k(key), *args, **kwargs) - - def get(self, key: Hashable, *args, **kwargs) -> Any: - """Case insensitive get.""" - return super(CaseInsensitiveDict, self).get(self.__class__._k(key), *args, **kwargs) - - def setdefault(self, key: Hashable, *args, **kwargs) -> Any: - """Case insensitive setdefault.""" - return super(CaseInsensitiveDict, self).setdefault(self.__class__._k(key), *args, **kwargs) - - def update(self, E: Any = None, **F) -> None: - """Case insensitive update.""" - super(CaseInsensitiveDict, self).update(self.__class__(E)) - super(CaseInsensitiveDict, self).update(self.__class__(**F)) - - def _convert_keys(self) -> None: - """Helper method to lowercase all existing string-like keys.""" - for k in list(self.keys()): - v = super(CaseInsensitiveDict, self).pop(k) - self.__setitem__(k, v) - - -def chunks(iterable: Iterable, size: int) -> Generator[Any, None, None]: - """ - Generator that allows you to iterate over any indexable collection in `size`-length chunks. - - Found: https://stackoverflow.com/a/312464/4022104 - """ - for i in range(0, len(iterable), size): - yield iterable[i:i + size] +__all__ = ['CogABCMeta', 'find_nth_occurrence', 'has_lines', 'pad_base64', 'send_to_paste_service'] diff --git a/bot/utils/cache.py b/bot/utils/cache.py new file mode 100644 index 000000000..68ce15607 --- /dev/null +++ b/bot/utils/cache.py @@ -0,0 +1,41 @@ +import functools +from collections import OrderedDict +from typing import Any, Callable + + +class AsyncCache: + """ + LRU cache implementation for coroutines. + + Once the cache exceeds the maximum size, keys are deleted in FIFO order. + + An offset may be optionally provided to be applied to the coroutine's arguments when creating the cache key. + """ + + def __init__(self, max_size: int = 128): + self._cache = OrderedDict() + self._max_size = max_size + + def __call__(self, arg_offset: int = 0) -> Callable: + """Decorator for async cache.""" + + def decorator(function: Callable) -> Callable: + """Define the async cache decorator.""" + + @functools.wraps(function) + async def wrapper(*args) -> Any: + """Decorator wrapper for the caching logic.""" + key = args[arg_offset:] + + if key not in self._cache: + if len(self._cache) > self._max_size: + self._cache.popitem(last=False) + + self._cache[key] = await function(*args) + return self._cache[key] + return wrapper + return decorator + + def clear(self) -> None: + """Clear cache instance.""" + self._cache.clear() diff --git a/bot/utils/channel.py b/bot/utils/channel.py new file mode 100644 index 000000000..0c072184c --- /dev/null +++ b/bot/utils/channel.py @@ -0,0 +1,50 @@ +import logging + +import discord + +import bot +from bot import constants +from bot.constants import Categories + +log = logging.getLogger(__name__) + + +def is_help_channel(channel: discord.TextChannel) -> bool: + """Return True if `channel` is in one of the help categories (excluding dormant).""" + log.trace(f"Checking if #{channel} is a help channel.") + categories = (Categories.help_available, Categories.help_in_use) + + return any(is_in_category(channel, category) for category in categories) + + +def is_mod_channel(channel: discord.TextChannel) -> bool: + """True if `channel` is considered a mod channel.""" + if channel.id in constants.MODERATION_CHANNELS: + log.trace(f"Channel #{channel} is a configured mod channel") + return True + + elif any(is_in_category(channel, category) for category in constants.MODERATION_CATEGORIES): + log.trace(f"Channel #{channel} is in a configured mod category") + return True + + else: + log.trace(f"Channel #{channel} is not a mod channel") + return False + + +def is_in_category(channel: discord.TextChannel, category_id: int) -> bool: + """Return True if `channel` is within a category with `category_id`.""" + return getattr(channel, "category_id", None) == category_id + + +async def try_get_channel(channel_id: int) -> discord.abc.GuildChannel: + """Attempt to get or fetch a channel and return it.""" + log.trace(f"Getting the channel {channel_id}.") + + channel = bot.instance.get_channel(channel_id) + if not channel: + log.debug(f"Channel {channel_id} is not in cache; fetching from API.") + channel = await bot.instance.fetch_channel(channel_id) + + log.trace(f"Channel #{channel} ({channel_id}) retrieved.") + return channel diff --git a/bot/utils/checks.py b/bot/utils/checks.py index db56c347c..460a937d8 100644 --- a/bot/utils/checks.py +++ b/bot/utils/checks.py @@ -1,49 +1,122 @@ import datetime import logging -from typing import Callable, Iterable - -from discord.ext.commands import BucketType, Cog, Command, CommandOnCooldown, Context, Cooldown, CooldownMapping +from typing import Callable, Container, Iterable, Optional, Union + +from discord.ext.commands import ( + BucketType, + CheckFailure, + Cog, + Command, + CommandOnCooldown, + Context, + Cooldown, + CooldownMapping, + NoPrivateMessage, + has_any_role, +) + +from bot import constants log = logging.getLogger(__name__) -def with_role_check(ctx: Context, *role_ids: int) -> bool: - """Returns True if the user has any one of the roles in role_ids.""" - if not ctx.guild: # Return False in a DM - log.trace(f"{ctx.author} tried to use the '{ctx.command.name}'command from a DM. " - "This command is restricted by the with_role decorator. Rejecting request.") - return False +class InWhitelistCheckFailure(CheckFailure): + """Raised when the `in_whitelist` check fails.""" + + def __init__(self, redirect_channel: Optional[int]) -> None: + self.redirect_channel = redirect_channel + + if redirect_channel: + redirect_message = f" here. Please use the <#{redirect_channel}> channel instead" + else: + redirect_message = "" + + error_message = f"You are not allowed to use that command{redirect_message}." + + super().__init__(error_message) + + +def in_whitelist_check( + ctx: Context, + channels: Container[int] = (), + categories: Container[int] = (), + roles: Container[int] = (), + redirect: Optional[int] = constants.Channels.bot_commands, + fail_silently: bool = False, +) -> bool: + """ + Check if a command was issued in a whitelisted context. - for role in ctx.author.roles: - if role.id in role_ids: - log.trace(f"{ctx.author} has the '{role.name}' role, and passes the check.") - return True + The whitelists that can be provided are: - log.trace(f"{ctx.author} does not have the required role to use " - f"the '{ctx.command.name}' command, so the request is rejected.") + - `channels`: a container with channel ids for whitelisted channels + - `categories`: a container with category ids for whitelisted categories + - `roles`: a container with with role ids for whitelisted roles + + If the command was invoked in a context that was not whitelisted, the member is either + redirected to the `redirect` channel that was passed (default: #bot-commands) or simply + told that they're not allowed to use this particular command (if `None` was passed). + """ + if redirect and redirect not in channels: + # It does not make sense for the channel whitelist to not contain the redirection + # channel (if applicable). That's why we add the redirection channel to the `channels` + # container if it's not already in it. As we allow any container type to be passed, + # we first create a tuple in order to safely add the redirection channel. + # + # Note: It's possible for the redirect channel to be in a whitelisted category, but + # there's no easy way to check that and as a channel can easily be moved in and out of + # categories, it's probably not wise to rely on its category in any case. + channels = tuple(channels) + (redirect,) + + if channels and ctx.channel.id in channels: + log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they are in a whitelisted channel.") + return True + + # Only check the category id if we have a category whitelist and the channel has a `category_id` + if categories and hasattr(ctx.channel, "category_id") and ctx.channel.category_id in categories: + log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they are in a whitelisted category.") + return True + + # Only check the roles whitelist if we have one and ensure the author's roles attribute returns + # an iterable to prevent breakage in DM channels (for if we ever decide to enable commands there). + if roles and any(r.id in roles for r in getattr(ctx.author, "roles", ())): + log.trace(f"{ctx.author} may use the `{ctx.command.name}` command as they have a whitelisted role.") + return True + + log.trace(f"{ctx.author} may not use the `{ctx.command.name}` command within this context.") + + # Some commands are secret, and should produce no feedback at all. + if not fail_silently: + raise InWhitelistCheckFailure(redirect) return False -def without_role_check(ctx: Context, *role_ids: int) -> bool: - """Returns True if the user does not have any of the roles in role_ids.""" - if not ctx.guild: # Return False in a DM - log.trace(f"{ctx.author} tried to use the '{ctx.command.name}' command from a DM. " - "This command is restricted by the without_role decorator. Rejecting request.") +async def has_any_role_check(ctx: Context, *roles: Union[str, int]) -> bool: + """ + Returns True if the context's author has any of the specified roles. + + `roles` are the names or IDs of the roles for which to check. + False is always returns if the context is outside a guild. + """ + try: + return await has_any_role(*roles).predicate(ctx) + except CheckFailure: return False - author_roles = [role.id for role in ctx.author.roles] - check = all(role not in author_roles for role in role_ids) - log.trace(f"{ctx.author} tried to call the '{ctx.command.name}' command. " - f"The result of the without_role check was {check}.") - return check +async def has_no_roles_check(ctx: Context, *roles: Union[str, int]) -> bool: + """ + Returns True if the context's author doesn't have any of the specified roles. -def in_channel_check(ctx: Context, *channel_ids: int) -> bool: - """Checks if the command was executed inside the list of specified channels.""" - check = ctx.channel.id in channel_ids - log.trace(f"{ctx.author} tried to call the '{ctx.command.name}' command. " - f"The result of the in_channel check was {check}.") - return check + `roles` are the names or IDs of the roles for which to check. + False is always returns if the context is outside a guild. + """ + try: + return not await has_any_role(*roles).predicate(ctx) + except NoPrivateMessage: + return False + except CheckFailure: + return True def cooldown_with_role_bypass(rate: int, per: float, type: BucketType = BucketType.default, *, diff --git a/bot/utils/extensions.py b/bot/utils/extensions.py new file mode 100644 index 000000000..50350ea8d --- /dev/null +++ b/bot/utils/extensions.py @@ -0,0 +1,34 @@ +import importlib +import inspect +import pkgutil +from typing import Iterator, NoReturn + +from bot import exts + + +def unqualify(name: str) -> str: + """Return an unqualified name given a qualified module/package `name`.""" + return name.rsplit(".", maxsplit=1)[-1] + + +def walk_extensions() -> Iterator[str]: + """Yield extension names from the bot.exts subpackage.""" + + def on_error(name: str) -> NoReturn: + raise ImportError(name=name) # pragma: no cover + + for module in pkgutil.walk_packages(exts.__path__, f"{exts.__name__}.", onerror=on_error): + if unqualify(module.name).startswith("_"): + # Ignore module/package names starting with an underscore. + continue + + if module.ispkg: + imported = importlib.import_module(module.name) + if not inspect.isfunction(getattr(imported, "setup", None)): + # If it lacks a setup function, it's not an extension. + continue + + yield module.name + + +EXTENSIONS = frozenset(walk_extensions()) diff --git a/bot/utils/function.py b/bot/utils/function.py new file mode 100644 index 000000000..3ab32fe3c --- /dev/null +++ b/bot/utils/function.py @@ -0,0 +1,75 @@ +"""Utilities for interaction with functions.""" + +import inspect +import typing as t + +Argument = t.Union[int, str] +BoundArgs = t.OrderedDict[str, t.Any] +Decorator = t.Callable[[t.Callable], t.Callable] +ArgValGetter = t.Callable[[BoundArgs], t.Any] + + +def get_arg_value(name_or_pos: Argument, arguments: BoundArgs) -> t.Any: + """ + Return a value from `arguments` based on a name or position. + + `arguments` is an ordered mapping of parameter names to argument values. + + Raise TypeError if `name_or_pos` isn't a str or int. + Raise ValueError if `name_or_pos` does not match any argument. + """ + if isinstance(name_or_pos, int): + # Convert arguments to a tuple to make them indexable. + arg_values = tuple(arguments.items()) + arg_pos = name_or_pos + + try: + name, value = arg_values[arg_pos] + return value + except IndexError: + raise ValueError(f"Argument position {arg_pos} is out of bounds.") + elif isinstance(name_or_pos, str): + arg_name = name_or_pos + try: + return arguments[arg_name] + except KeyError: + raise ValueError(f"Argument {arg_name!r} doesn't exist.") + else: + raise TypeError("'arg' must either be an int (positional index) or a str (keyword).") + + +def get_arg_value_wrapper( + decorator_func: t.Callable[[ArgValGetter], Decorator], + name_or_pos: Argument, + func: t.Callable[[t.Any], t.Any] = None, +) -> Decorator: + """ + Call `decorator_func` with the value of the arg at the given name/position. + + `decorator_func` must accept a callable as a parameter to which it will pass a mapping of + parameter names to argument values of the function it's decorating. + + `func` is an optional callable which will return a new value given the argument's value. + + Return the decorator returned by `decorator_func`. + """ + def wrapper(args: BoundArgs) -> t.Any: + value = get_arg_value(name_or_pos, args) + if func: + value = func(value) + return value + + return decorator_func(wrapper) + + +def get_bound_args(func: t.Callable, args: t.Tuple, kwargs: t.Dict[str, t.Any]) -> BoundArgs: + """ + Bind `args` and `kwargs` to `func` and return a mapping of parameter names to argument values. + + Default parameter values are also set. + """ + sig = inspect.signature(func) + bound_args = sig.bind(*args, **kwargs) + bound_args.apply_defaults() + + return bound_args.arguments diff --git a/bot/utils/helpers.py b/bot/utils/helpers.py new file mode 100644 index 000000000..3501a3933 --- /dev/null +++ b/bot/utils/helpers.py @@ -0,0 +1,32 @@ +from abc import ABCMeta +from typing import Optional + +from discord.ext.commands import CogMeta + + +class CogABCMeta(CogMeta, ABCMeta): + """Metaclass for ABCs meant to be implemented as Cogs.""" + + +def find_nth_occurrence(string: str, substring: str, n: int) -> Optional[int]: + """Return index of `n`th occurrence of `substring` in `string`, or None if not found.""" + index = 0 + for _ in range(n): + index = string.find(substring, index+1) + if index == -1: + return None + return index + + +def has_lines(string: str, count: int) -> bool: + """Return True if `string` has at least `count` lines.""" + # Benchmarks show this is significantly faster than using str.count("\n") or a for loop & break. + split = string.split("\n", count - 1) + + # Make sure the last part isn't empty, which would happen if there was a final newline. + return split[-1] and len(split) == count + + +def pad_base64(data: str) -> str: + """Return base64 `data` with padding characters to ensure its length is a multiple of 4.""" + return data + "=" * (-len(data) % 4) diff --git a/bot/utils/lock.py b/bot/utils/lock.py new file mode 100644 index 000000000..7aaafbc88 --- /dev/null +++ b/bot/utils/lock.py @@ -0,0 +1,114 @@ +import inspect +import logging +from collections import defaultdict +from functools import partial, wraps +from typing import Any, Awaitable, Callable, Hashable, Union +from weakref import WeakValueDictionary + +from bot.errors import LockedResourceError +from bot.utils import function + +log = logging.getLogger(__name__) +__lock_dicts = defaultdict(WeakValueDictionary) + +_IdCallableReturn = Union[Hashable, Awaitable[Hashable]] +_IdCallable = Callable[[function.BoundArgs], _IdCallableReturn] +ResourceId = Union[Hashable, _IdCallable] + + +class LockGuard: + """ + A context manager which acquires and releases a lock (mutex). + + Raise RuntimeError if trying to acquire a locked lock. + """ + + def __init__(self): + self._locked = False + + @property + def locked(self) -> bool: + """Return True if currently locked or False if unlocked.""" + return self._locked + + def __enter__(self): + if self._locked: + raise RuntimeError("Cannot acquire a locked lock.") + + self._locked = True + + def __exit__(self, _exc_type, _exc_value, _traceback): # noqa: ANN001 + self._locked = False + return False # Indicate any raised exception shouldn't be suppressed. + + +def lock(namespace: Hashable, resource_id: ResourceId, *, raise_error: bool = False) -> Callable: + """ + Turn the decorated coroutine function into a mutually exclusive operation on a `resource_id`. + + If any other mutually exclusive function currently holds the lock for a resource, do not run the + decorated function and return None. If `raise_error` is True, raise `LockedResourceError` if + the lock cannot be acquired. + + `namespace` is an identifier used to prevent collisions among resource IDs. + + `resource_id` identifies a resource on which to perform a mutually exclusive operation. + It may also be a callable or awaitable which will return the resource ID given an ordered + mapping of the parameters' names to arguments' values. + + If decorating a command, this decorator must go before (below) the `command` decorator. + """ + def decorator(func: Callable) -> Callable: + name = func.__name__ + + @wraps(func) + async def wrapper(*args, **kwargs) -> Any: + log.trace(f"{name}: mutually exclusive decorator called") + + if callable(resource_id): + log.trace(f"{name}: binding args to signature") + bound_args = function.get_bound_args(func, args, kwargs) + + log.trace(f"{name}: calling the given callable to get the resource ID") + id_ = resource_id(bound_args) + + if inspect.isawaitable(id_): + log.trace(f"{name}: awaiting to get resource ID") + id_ = await id_ + else: + id_ = resource_id + + log.trace(f"{name}: getting lock for resource {id_!r} under namespace {namespace!r}") + + # Get the lock for the ID. Create a lock if one doesn't exist yet. + locks = __lock_dicts[namespace] + lock_guard = locks.setdefault(id_, LockGuard()) + + if not lock_guard.locked: + log.debug(f"{name}: resource {namespace!r}:{id_!r} is free; acquiring it...") + with lock_guard: + return await func(*args, **kwargs) + else: + log.info(f"{name}: aborted because resource {namespace!r}:{id_!r} is locked") + if raise_error: + raise LockedResourceError(str(namespace), id_) + + return wrapper + return decorator + + +def lock_arg( + namespace: Hashable, + name_or_pos: function.Argument, + func: Callable[[Any], _IdCallableReturn] = None, + *, + raise_error: bool = False, +) -> Callable: + """ + Apply the `lock` decorator using the value of the arg at the given name/position as the ID. + + `func` is an optional callable or awaitable which will return the ID given the argument value. + See `lock` docs for more information. + """ + decorator_func = partial(lock, namespace, raise_error=raise_error) + return function.get_arg_value_wrapper(decorator_func, name_or_pos, func) diff --git a/bot/utils/messages.py b/bot/utils/messages.py index a36edc774..42bde358d 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -1,45 +1,46 @@ import asyncio import contextlib import logging +import random +import re from io import BytesIO from typing import List, Optional, Sequence, Union -from discord import Client, Embed, File, Member, Message, Reaction, TextChannel, Webhook -from discord.abc import Snowflake +import discord from discord.errors import HTTPException +from discord.ext.commands import Context -from bot.constants import Emojis +import bot +from bot.constants import Emojis, NEGATIVE_REPLIES log = logging.getLogger(__name__) async def wait_for_deletion( - message: Message, - user_ids: Sequence[Snowflake], + message: discord.Message, + user_ids: Sequence[discord.abc.Snowflake], deletion_emojis: Sequence[str] = (Emojis.trashcan,), timeout: float = 60 * 5, attach_emojis: bool = True, - client: Optional[Client] = None ) -> None: """ Wait for up to `timeout` seconds for a reaction by any of the specified `user_ids` to delete the message. An `attach_emojis` bool may be specified to determine whether to attach the given - `deletion_emojis` to the message in the given `context` - - A `client` instance may be optionally specified, otherwise client will be taken from the - guild of the message. + `deletion_emojis` to the message in the given `context`. """ - if message.guild is None and client is None: + if message.guild is None: raise ValueError("Message must be sent on a guild") - bot = client or message.guild.me - if attach_emojis: for emoji in deletion_emojis: - await message.add_reaction(emoji) + try: + await message.add_reaction(emoji) + except discord.NotFound: + log.trace(f"Aborting wait_for_deletion: message {message.id} deleted prematurely.") + return - def check(reaction: Reaction, user: Member) -> bool: + def check(reaction: discord.Reaction, user: discord.Member) -> bool: """Check that the deletion emoji is reacted by the appropriate user.""" return ( reaction.message.id == message.id @@ -48,22 +49,31 @@ async def wait_for_deletion( ) with contextlib.suppress(asyncio.TimeoutError): - await bot.wait_for('reaction_add', check=check, timeout=timeout) + await bot.instance.wait_for('reaction_add', check=check, timeout=timeout) await message.delete() async def send_attachments( - message: Message, - destination: Union[TextChannel, Webhook], - link_large: bool = True + message: discord.Message, + destination: Union[discord.TextChannel, discord.Webhook], + link_large: bool = True, + use_cached: bool = False, + **kwargs ) -> List[str]: """ Re-upload the message's attachments to the destination and return a list of their new URLs. Each attachment is sent as a separate message to more easily comply with the request/file size limit. If link_large is True, attachments which are too large are instead grouped into a single - embed which links to them. + embed which links to them. Extra kwargs will be passed to send() when sending the attachment. """ + webhook_send_kwargs = { + 'username': message.author.display_name, + 'avatar_url': message.author.avatar_url, + } + webhook_send_kwargs.update(kwargs) + webhook_send_kwargs['username'] = sub_clyde(webhook_send_kwargs['username']) + large = [] urls = [] for attachment in message.attachments: @@ -77,40 +87,64 @@ async def send_attachments( # but some may get through hence the try-catch. if attachment.size <= destination.guild.filesize_limit - 512: with BytesIO() as file: - await attachment.save(file, use_cached=True) - attachment_file = File(file, filename=attachment.filename) + await attachment.save(file, use_cached=use_cached) + attachment_file = discord.File(file, filename=attachment.filename) - if isinstance(destination, TextChannel): - msg = await destination.send(file=attachment_file) + if isinstance(destination, discord.TextChannel): + msg = await destination.send(file=attachment_file, **kwargs) urls.append(msg.attachments[0].url) else: - await destination.send( - file=attachment_file, - username=message.author.display_name, - avatar_url=message.author.avatar_url - ) + await destination.send(file=attachment_file, **webhook_send_kwargs) elif link_large: large.append(attachment) else: - log.warning(f"{failure_msg} because it's too large.") + log.info(f"{failure_msg} because it's too large.") except HTTPException as e: if link_large and e.status == 413: large.append(attachment) else: - log.warning(f"{failure_msg} with status {e.status}.") + log.warning(f"{failure_msg} with status {e.status}.", exc_info=e) if link_large and large: - desc = f"\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large) - embed = Embed(description=desc) + desc = "\n".join(f"[{attachment.filename}]({attachment.url})" for attachment in large) + embed = discord.Embed(description=desc) embed.set_footer(text="Attachments exceed upload size limit.") - if isinstance(destination, TextChannel): - await destination.send(embed=embed) + if isinstance(destination, discord.TextChannel): + await destination.send(embed=embed, **kwargs) else: - await destination.send( - embed=embed, - username=message.author.display_name, - avatar_url=message.author.avatar_url - ) + await destination.send(embed=embed, **webhook_send_kwargs) return urls + + +def sub_clyde(username: Optional[str]) -> Optional[str]: + """ + Replace "e"/"E" in any "clyde" in `username` with a Cyrillic "е"/"E" and return the new string. + + Discord disallows "clyde" anywhere in the username for webhooks. It will return a 400. + Return None only if `username` is None. + """ + def replace_e(match: re.Match) -> str: + char = "е" if match[2] == "e" else "Е" + return match[1] + char + + if username: + return re.sub(r"(clyd)(e)", replace_e, username, flags=re.I) + else: + return username # Empty string or None + + +async def send_denial(ctx: Context, reason: str) -> None: + """Send an embed denying the user with the given reason.""" + embed = discord.Embed() + embed.colour = discord.Colour.red() + embed.title = random.choice(NEGATIVE_REPLIES) + embed.description = reason + + await ctx.send(embed=embed) + + +def format_user(user: discord.abc.User) -> str: + """Return a string for `user` which has their mention and ID.""" + return f"{user.mention} (`{user.id}`)" diff --git a/bot/utils/regex.py b/bot/utils/regex.py new file mode 100644 index 000000000..0d2068f90 --- /dev/null +++ b/bot/utils/regex.py @@ -0,0 +1,12 @@ +import re + +INVITE_RE = re.compile( + r"(?:discord(?:[\.,]|dot)gg|" # Could be discord.gg/ + r"discord(?:[\.,]|dot)com(?:\/|slash)invite|" # or discord.com/invite/ + r"discordapp(?:[\.,]|dot)com(?:\/|slash)invite|" # or discordapp.com/invite/ + r"discord(?:[\.,]|dot)me|" # or discord.me + r"discord(?:[\.,]|dot)io" # or discord.io. + r")(?:[\/]|slash)" # / or 'slash' + r"([a-zA-Z0-9\-]+)", # the invite code itself + flags=re.IGNORECASE +) diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index ee6c0a8e6..03f31d78f 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -1,74 +1,157 @@ import asyncio import contextlib +import inspect import logging -from abc import abstractmethod -from typing import Coroutine, Dict, Union +import typing as t +from datetime import datetime +from functools import partial -from bot.utils import CogABCMeta -log = logging.getLogger(__name__) +class Scheduler: + """ + Schedule the execution of coroutines and keep track of them. + When instantiating a Scheduler, a name must be provided. This name is used to distinguish the + instance's log messages from other instances. Using the name of the class or module containing + the instance is suggested. -class Scheduler(metaclass=CogABCMeta): - """Task scheduler.""" + Coroutines can be scheduled immediately with `schedule` or in the future with `schedule_at` + or `schedule_later`. A unique ID is required to be given in order to keep track of the + resulting Tasks. Any scheduled task can be cancelled prematurely using `cancel` by providing + the same ID used to schedule it. The `in` operator is supported for checking if a task with a + given ID is currently scheduled. - def __init__(self): + Any exception raised in a scheduled task is logged when the task is done. + """ - self.cog_name = self.__class__.__name__ # keep track of the child cog's name so the logs are clear. - self.scheduled_tasks: Dict[str, asyncio.Task] = {} + def __init__(self, name: str): + self.name = name - @abstractmethod - async def _scheduled_task(self, task_object: dict) -> None: - """ - A coroutine which handles the scheduling. + self._log = logging.getLogger(f"{__name__}.{name}") + self._scheduled_tasks: t.Dict[t.Hashable, asyncio.Task] = {} - This is added to the scheduled tasks, and should wait the task duration, execute the desired - code, then clean up the task. + def __contains__(self, task_id: t.Hashable) -> bool: + """Return True if a task with the given `task_id` is currently scheduled.""" + return task_id in self._scheduled_tasks - For example, in Reminders this will wait for the reminder duration, send the reminder, - then make a site API request to delete the reminder from the database. + def schedule(self, task_id: t.Hashable, coroutine: t.Coroutine) -> None: """ + Schedule the execution of a `coroutine`. - def schedule_task(self, loop: asyncio.AbstractEventLoop, task_id: str, task_data: dict) -> None: + If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This + prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere. """ - Schedules a task. + self._log.trace(f"Scheduling task #{task_id}...") - `task_data` is passed to `Scheduler._scheduled_expiration` - """ - if task_id in self.scheduled_tasks: - log.debug( - f"{self.cog_name}: did not schedule task #{task_id}; task was already scheduled." - ) + msg = f"Cannot schedule an already started coroutine for #{task_id}" + assert inspect.getcoroutinestate(coroutine) == "CORO_CREATED", msg + + if task_id in self._scheduled_tasks: + self._log.debug(f"Did not schedule task #{task_id}; task was already scheduled.") + coroutine.close() return - task: asyncio.Task = create_task(loop, self._scheduled_task(task_data)) + task = asyncio.create_task(coroutine, name=f"{self.name}_{task_id}") + task.add_done_callback(partial(self._task_done_callback, task_id)) - self.scheduled_tasks[task_id] = task - log.debug(f"{self.cog_name}: scheduled task #{task_id}.") + self._scheduled_tasks[task_id] = task + self._log.debug(f"Scheduled task #{task_id} {id(task)}.") - def cancel_task(self, task_id: str) -> None: - """Un-schedules a task.""" - task = self.scheduled_tasks.get(task_id) + def schedule_at(self, time: datetime, task_id: t.Hashable, coroutine: t.Coroutine) -> None: + """ + Schedule `coroutine` to be executed at the given naïve UTC `time`. - if task is None: - log.warning(f"{self.cog_name}: Failed to unschedule {task_id} (no task found).") - return + If `time` is in the past, schedule `coroutine` immediately. - task.cancel() - log.debug(f"{self.cog_name}: unscheduled task #{task_id}.") - del self.scheduled_tasks[task_id] + If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This + prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere. + """ + delay = (time - datetime.utcnow()).total_seconds() + if delay > 0: + coroutine = self._await_later(delay, task_id, coroutine) + self.schedule(task_id, coroutine) -def create_task(loop: asyncio.AbstractEventLoop, coro_or_future: Union[Coroutine, asyncio.Future]) -> asyncio.Task: - """Creates an asyncio.Task object from a coroutine or future object.""" - task: asyncio.Task = asyncio.ensure_future(coro_or_future, loop=loop) + def schedule_later(self, delay: t.Union[int, float], task_id: t.Hashable, coroutine: t.Coroutine) -> None: + """ + Schedule `coroutine` to be executed after the given `delay` number of seconds. - # Silently ignore exceptions in a callback (handles the CancelledError nonsense) - task.add_done_callback(_silent_exception) - return task + If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This + prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere. + """ + self.schedule(task_id, self._await_later(delay, task_id, coroutine)) + + def cancel(self, task_id: t.Hashable) -> None: + """Unschedule the task identified by `task_id`. Log a warning if the task doesn't exist.""" + self._log.trace(f"Cancelling task #{task_id}...") + + try: + task = self._scheduled_tasks.pop(task_id) + except KeyError: + self._log.warning(f"Failed to unschedule {task_id} (no task found).") + else: + task.cancel() + + self._log.debug(f"Unscheduled task #{task_id} {id(task)}.") + + def cancel_all(self) -> None: + """Unschedule all known tasks.""" + self._log.debug("Unscheduling all tasks") + + for task_id in self._scheduled_tasks.copy(): + self.cancel(task_id) + + async def _await_later(self, delay: t.Union[int, float], task_id: t.Hashable, coroutine: t.Coroutine) -> None: + """Await `coroutine` after the given `delay` number of seconds.""" + try: + self._log.trace(f"Waiting {delay} seconds before awaiting coroutine for #{task_id}.") + await asyncio.sleep(delay) + + # Use asyncio.shield to prevent the coroutine from cancelling itself. + self._log.trace(f"Done waiting for #{task_id}; now awaiting the coroutine.") + await asyncio.shield(coroutine) + finally: + # Close it to prevent unawaited coroutine warnings, + # which would happen if the task was cancelled during the sleep. + # Only close it if it's not been awaited yet. This check is important because the + # coroutine may cancel this task, which would also trigger the finally block. + state = inspect.getcoroutinestate(coroutine) + if state == "CORO_CREATED": + self._log.debug(f"Explicitly closing the coroutine for #{task_id}.") + coroutine.close() + else: + self._log.debug(f"Finally block reached for #{task_id}; {state=}") + + def _task_done_callback(self, task_id: t.Hashable, done_task: asyncio.Task) -> None: + """ + Delete the task and raise its exception if one exists. + If `done_task` and the task associated with `task_id` are different, then the latter + will not be deleted. In this case, a new task was likely rescheduled with the same ID. + """ + self._log.trace(f"Performing done callback for task #{task_id} {id(done_task)}.") + + scheduled_task = self._scheduled_tasks.get(task_id) + + if scheduled_task and done_task is scheduled_task: + # A task for the ID exists and is the same as the done task. + # Since this is the done callback, the task is already done so no need to cancel it. + self._log.trace(f"Deleting task #{task_id} {id(done_task)}.") + del self._scheduled_tasks[task_id] + elif scheduled_task: + # A new task was likely rescheduled with the same ID. + self._log.debug( + f"The scheduled task #{task_id} {id(scheduled_task)} " + f"and the done task {id(done_task)} differ." + ) + elif not done_task.cancelled(): + self._log.warning( + f"Task #{task_id} not found while handling task {id(done_task)}! " + f"A task somehow got unscheduled improperly (i.e. deleted but not cancelled)." + ) -def _silent_exception(future: asyncio.Future) -> None: - """Suppress future's exception.""" - with contextlib.suppress(Exception): - future.exception() + with contextlib.suppress(asyncio.CancelledError): + exception = done_task.exception() + # Log the exception if one exists. + if exception: + self._log.error(f"Error in task #{task_id} {id(done_task)}!", exc_info=exception) diff --git a/bot/utils/services.py b/bot/utils/services.py new file mode 100644 index 000000000..5949c9e48 --- /dev/null +++ b/bot/utils/services.py @@ -0,0 +1,54 @@ +import logging +from typing import Optional + +from aiohttp import ClientConnectorError + +import bot +from bot.constants import URLs + +log = logging.getLogger(__name__) + +FAILED_REQUEST_ATTEMPTS = 3 + + +async def send_to_paste_service(contents: str, *, extension: str = "") -> Optional[str]: + """ + Upload `contents` to the paste service. + + `extension` is added to the output URL + + When an error occurs, `None` is returned, otherwise the generated URL with the suffix. + """ + extension = extension and f".{extension}" + log.debug(f"Sending contents of size {len(contents.encode())} bytes to paste service.") + paste_url = URLs.paste_service.format(key="documents") + for attempt in range(1, FAILED_REQUEST_ATTEMPTS + 1): + try: + async with bot.instance.http_session.post(paste_url, data=contents) as response: + response_json = await response.json() + except ClientConnectorError: + log.warning( + f"Failed to connect to paste service at url {paste_url}, " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + continue + except Exception: + log.exception( + f"An unexpected error has occurred during handling of the request, " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + continue + + if "message" in response_json: + log.warning( + f"Paste service returned error {response_json['message']} with status code {response.status}, " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + continue + elif "key" in response_json: + log.info(f"Successfully uploaded contents to paste service behind key {response_json['key']}.") + return URLs.paste_service.format(key=response_json['key']) + extension + log.warning( + f"Got unexpected JSON response from paste service: {response_json}\n" + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) diff --git a/bot/utils/time.py b/bot/utils/time.py index 77060143c..47e49904b 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -20,7 +20,9 @@ def _stringify_time_unit(value: int, unit: str) -> str: >>> _stringify_time_unit(0, "minutes") "less than a minute" """ - if value == 1: + if unit == "seconds" and value == 0: + return "0 seconds" + elif value == 1: return f"{value} {unit[:-1]}" elif value == 0: return f"less than a {unit[:-1]}" diff --git a/bot/utils/webhooks.py b/bot/utils/webhooks.py new file mode 100644 index 000000000..66f82ec66 --- /dev/null +++ b/bot/utils/webhooks.py @@ -0,0 +1,34 @@ +import logging +from typing import Optional + +import discord +from discord import Embed + +from bot.utils.messages import sub_clyde + +log = logging.getLogger(__name__) + + +async def send_webhook( + webhook: discord.Webhook, + content: Optional[str] = None, + username: Optional[str] = None, + avatar_url: Optional[str] = None, + embed: Optional[Embed] = None, + wait: Optional[bool] = False +) -> discord.Message: + """ + Send a message using the provided webhook. + + This uses sub_clyde() and tries for an HTTPException to ensure it doesn't crash. + """ + try: + return await webhook.send( + content=content, + username=sub_clyde(username), + avatar_url=avatar_url, + embed=embed, + wait=wait, + ) + except discord.HTTPException: + log.exception("Failed to send a message to the webhook!") diff --git a/config-default.yml b/config-default.yml index 2eaf8ee06..ca89bb639 100644 --- a/config-default.yml +++ b/config-default.yml @@ -3,6 +3,16 @@ bot: token: !ENV "BOT_TOKEN" sentry_dsn: !ENV "BOT_SENTRY_DSN" + redis: + host: "redis.default.svc.cluster.local" + port: 6379 + password: !ENV "REDIS_PASSWORD" + use_fakeredis: false + + stats: + statsd_host: "graphite.default.svc.cluster.local" + presence_update_timeout: 300 + cooldowns: # Per channel, per tag. tags: 60 @@ -17,6 +27,7 @@ style: soft_red: 0xcd6d6d soft_green: 0x68c290 soft_orange: 0xf9cb54 + bright_green: 0x01d277 emojis: defcon_disabled: "<:defcondisabled:470326273952972810>" @@ -28,6 +39,21 @@ style: status_dnd: "<:status_dnd:470326272082313216>" status_offline: "<:status_offline:470326266537705472>" + badge_staff: "<:discord_staff:743882896498098226>" + badge_partner: "<:partner:748666453242413136>" + badge_hypesquad: "<:hypesquad_events:743882896892362873>" + badge_bug_hunter: "<:bug_hunter_lvl1:743882896372269137>" + badge_hypesquad_bravery: "<:hypesquad_bravery:743882896745693335>" + badge_hypesquad_brilliance: "<:hypesquad_brilliance:743882896938631248>" + badge_hypesquad_balance: "<:hypesquad_balance:743882896460480625>" + badge_early_supporter: "<:early_supporter:743882896909140058>" + badge_bug_hunter_level_2: "<:bug_hunter_lvl2:743882896611344505>" + badge_verified_bot_developer: "<:verified_bot_dev:743882897299210310>" + + incident_actioned: "<:incident_actioned:719645530128646266>" + incident_unactioned: "<:incident_unactioned:719645583245180960>" + incident_investigating: "<:incident_investigating:719645658671480924>" + failmail: "<:failmail:633660039931887616>" trashcan: "<:trashcan:637136429717389331>" @@ -35,24 +61,12 @@ style: pencil: "\u270F" new: "\U0001F195" cross_mark: "\u274C" + check_mark: "\u2705" - ducky_yellow: &DUCKY_YELLOW 574951975574175744 - ducky_blurple: &DUCKY_BLURPLE 574951975310065675 - ducky_regal: &DUCKY_REGAL 637883439185395712 - ducky_camo: &DUCKY_CAMO 637914731566596096 - ducky_ninja: &DUCKY_NINJA 637923502535606293 - ducky_devil: &DUCKY_DEVIL 637925314982576139 - ducky_tube: &DUCKY_TUBE 637881368008851456 - ducky_hunt: &DUCKY_HUNT 639355090909528084 - ducky_wizard: &DUCKY_WIZARD 639355996954689536 - ducky_party: &DUCKY_PARTY 639468753440210977 - ducky_angel: &DUCKY_ANGEL 640121935610511361 - ducky_maul: &DUCKY_MAUL 640137724958867467 - ducky_santa: &DUCKY_SANTA 655360331002019870 - - upvotes: "<:upvotes:638729835245731840>" - comments: "<:comments:638729835073765387>" - user: "<:user:638729835442602003>" + # emotes used for #reddit + upvotes: "<:reddit_upvotes:755845219890757644>" + comments: "<:reddit_comments:755845255001014384>" + user: "<:reddit_users:755845303822974997>" icons: crown_blurple: "https://cdn.discordapp.com/emojis/469964153289965568.png" @@ -106,172 +120,210 @@ style: voice_state_green: "https://cdn.discordapp.com/emojis/656899770094452754.png" voice_state_red: "https://cdn.discordapp.com/emojis/656899769905709076.png" + green_checkmark: "https://raw.githubusercontent.com/python-discord/branding/master/icons/checkmark/green-checkmark-dist.png" + + guild: id: 267624335836053506 + invite: "https://discord.gg/python" categories: - python_help: 356013061213126657 + help_available: 691405807388196926 + help_in_use: 696958401460043776 + help_dormant: 691405908919451718 + modmail: &MODMAIL 714494672835444826 + logs: &LOGS 468520609152892958 + voice: 356013253765234688 channels: - admins: &ADMINS 365960823622991872 - admin_spam: &ADMIN_SPAM 563594791770914816 - admins_voice: &ADMINS_VOICE 500734494840717332 - announcements: 354619224620138496 - attachment_log: &ATTCH_LOG 649243850006855680 - big_brother_logs: &BBLOGS 468507907357409333 - bot: 267659945086812160 - checkpoint_test: 422077681434099723 - defcon: &DEFCON 464469101889454091 - devlog: &DEVLOG 622895325144940554 - devtest: &DEVTEST 414574275865870337 - esoteric: 470884583684964352 - help_0: 303906576991780866 - help_1: 303906556754395136 - help_2: 303906514266226689 - help_3: 439702951246692352 - help_4: 451312046647148554 - help_5: 454941769734422538 - help_6: 587375753306570782 - help_7: 587375768556797982 - helpers: &HELPERS 385474242440986624 - message_log: &MESSAGE_LOG 467752170159079424 - meta: 429409067623251969 - mod_spam: &MOD_SPAM 620607373828030464 - mods: &MODS 305126844661760000 - mod_alerts: 473092532147060736 - modlog: &MODLOG 282638479504965634 - off_topic_0: 291284109232308226 - off_topic_1: 463035241142026251 - off_topic_2: 463035268514185226 - organisation: &ORGANISATION 551789653284356126 - python: 267624335836053506 - reddit: 458224812528238616 - staff_lounge: &STAFF_LOUNGE 464905259261755392 - staff_voice: &STAFF_VOICE 412375055910043655 - talent_pool: &TALENT_POOL 534321732593647616 - userlog: 528976905546760203 - user_event_a: &USER_EVENT_A 592000283102674944 - verification: 352442727016693763 - voice_log: 640292421988646961 - - staff_channels: [*ADMINS, *ADMIN_SPAM, *MOD_SPAM, *MODS, *HELPERS, *ORGANISATION, *DEFCON] - ignored: [*ADMINS, *MESSAGE_LOG, *MODLOG, *ADMINS_VOICE, *STAFF_VOICE, *ATTCH_LOG] + # Public announcement and news channels + change_log: &CHANGE_LOG 748238795236704388 + announcements: &ANNOUNCEMENTS 354619224620138496 + python_news: &PYNEWS_CHANNEL 704372456592506880 + python_events: &PYEVENTS_CHANNEL 729674110270963822 + mailing_lists: &MAILING_LISTS 704372456592506880 + reddit: &REDDIT_CHANNEL 458224812528238616 + user_event_announcements: &USER_EVENT_A 592000283102674944 + + # Development + dev_contrib: &DEV_CONTRIB 635950537262759947 + dev_core: &DEV_CORE 411200599653351425 + dev_log: &DEV_LOG 622895325144940554 + + # Discussion + meta: 429409067623251969 + python_discussion: &PY_DISCUSSION 267624335836053506 + + # Python Help: Available + cooldown: 720603994149486673 + + # Logs + attachment_log: &ATTACH_LOG 649243850006855680 + message_log: &MESSAGE_LOG 467752170159079424 + mod_log: &MOD_LOG 282638479504965634 + user_log: 528976905546760203 + voice_log: 640292421988646961 + dm_log: 653713721625018428 + + # Off-topic + off_topic_0: 291284109232308226 + off_topic_1: 463035241142026251 + off_topic_2: 463035268514185226 + + # Special + bot_commands: &BOT_CMD 267659945086812160 + esoteric: 470884583684964352 + verification: 352442727016693763 + voice_gate: 764802555427029012 + + # Staff + admins: &ADMINS 365960823622991872 + admin_spam: &ADMIN_SPAM 563594791770914816 + defcon: &DEFCON 464469101889454091 + helpers: &HELPERS 385474242440986624 + incidents: 714214212200562749 + incidents_archive: 720668923636351037 + mods: &MODS 305126844661760000 + mod_alerts: 473092532147060736 + mod_spam: &MOD_SPAM 620607373828030464 + organisation: &ORGANISATION 551789653284356126 + staff_lounge: &STAFF_LOUNGE 464905259261755392 + duck_pond: &DUCK_POND 637820308341915648 + + # Staff announcement channels + staff_announcements: &STAFF_ANNOUNCEMENTS 464033278631084042 + mod_announcements: &MOD_ANNOUNCEMENTS 372115205867700225 + admin_announcements: &ADMIN_ANNOUNCEMENTS 749736155569848370 + + # Voice Channels + admins_voice: &ADMINS_VOICE 500734494840717332 + code_help_voice_1: 751592231726481530 + code_help_voice_2: 764232549840846858 + general_voice: 751591688538947646 + staff_voice: &STAFF_VOICE 412375055910043655 + + # Voice Chat + code_help_chat_1: 755154969761677312 + code_help_chat_2: 766330079135268884 + staff_voice_chat: 541638762007101470 + voice_chat: 412357430186344448 + + # Watch + big_brother_logs: &BB_LOGS 468507907357409333 + talent_pool: &TALENT_POOL 534321732593647616 + + moderation_categories: + - *MODMAIL + - *LOGS + + moderation_channels: + - *ADMINS + - *ADMIN_SPAM + - *MODS + - *MOD_SPAM + + # Modlog cog ignores events which occur in these channels + modlog_blacklist: + - *ADMINS + - *ADMINS_VOICE + - *ATTACH_LOG + - *MESSAGE_LOG + - *MOD_LOG + - *STAFF_VOICE + + reminder_whitelist: + - *BOT_CMD + - *DEV_CONTRIB roles: - admin: &ADMIN_ROLE 267628507062992896 - announcements: 463658397560995840 - champion: 430492892331769857 - contributor: 295488872404484098 - core_developer: 587606783669829632 - helpers: 267630620367257601 - jammer: 591786436651646989 - moderator: &MOD_ROLE 267629731250176001 - muted: &MUTED_ROLE 277914926603829249 - owner: &OWNER_ROLE 267627879762755584 - partners: 323426753857191936 - rockstars: &ROCKSTARS_ROLE 458226413825294336 - team_leader: 501324292341104650 - verified: 352427296948486144 + announcements: 463658397560995840 + contributors: 295488872404484098 + help_cooldown: 699189276025421825 + muted: &MUTED_ROLE 277914926603829249 + partners: 323426753857191936 + python_community: &PY_COMMUNITY_ROLE 458226413825294336 + sprinters: &SPRINTERS 758422482289426471 + + unverified: 739794855945044069 + verified: 352427296948486144 # @Developers on PyDis + voice_verified: 764802720779337729 + + # Staff + admins: &ADMINS_ROLE 267628507062992896 + core_developers: 587606783669829632 + helpers: &HELPERS_ROLE 267630620367257601 + moderators: &MODS_ROLE 267629731250176001 + owners: &OWNERS_ROLE 267627879762755584 + + # Code Jam + jammers: 737249140966162473 + team_leaders: 737250302834638889 + + moderation_roles: + - *OWNERS_ROLE + - *ADMINS_ROLE + - *MODS_ROLE + + staff_roles: + - *OWNERS_ROLE + - *ADMINS_ROLE + - *MODS_ROLE + - *HELPERS_ROLE webhooks: - talent_pool: 569145364800602132 big_brother: 569133704568373283 - reddit: 635408384794951680 + dev_log: 680501655111729222 + dm_log: 654567640664244225 duck_pond: 637821475327311927 + incidents_archive: 720671599790915702 + python_news: &PYNEWS_WEBHOOK 704381182279942324 + reddit: 635408384794951680 + talent_pool: 569145364800602132 filter: - # What do we filter? - filter_zalgo: false - filter_invites: true - filter_domains: true - watch_rich_embeds: true - watch_words: true - watch_tokens: true + filter_zalgo: false + filter_invites: true + filter_domains: true + filter_everyone_ping: true + watch_regex: true + watch_rich_embeds: true # Notify user on filter? # Notifications are not expected for "watchlist" type filters - notify_user_zalgo: false - notify_user_invites: true - notify_user_domains: false + notify_user_zalgo: false + notify_user_invites: true + notify_user_domains: false + notify_user_everyone_ping: true # Filter configuration - ping_everyone: true # Ping @everyone when we send a mod-alert? - - guild_invite_whitelist: - - 280033776820813825 # Functional Programming - - 267624335836053506 # Python Discord - - 440186186024222721 # Python Discord: ModLog Emojis - - 273944235143593984 # STEM - - 348658686962696195 # RLBot - - 531221516914917387 # Pallets - - 249111029668249601 # Gentoo - - 327254708534116352 # Adafruit - - 544525886180032552 # kennethreitz.org - - 590806733924859943 # Discord Hack Week - - 423249981340778496 # Kivy - - 197038439483310086 # Discord Testers - - 286633898581164032 # Ren'Py - - 349505959032389632 # PyGame - - 438622377094414346 # Pyglet - - 524691714909274162 # Panda3D - - 336642139381301249 # discord.py - - domain_blacklist: - - pornhub.com - - liveleak.com - - word_watchlist: - - goo+ks* - - ky+s+ - - ki+ke+s* - - beaner+s? - - coo+ns* - - nig+lets* - - slant-eyes* - - towe?l-?head+s* - - chi*n+k+s* - - spick*s* - - kill* +(?:yo)?urself+ - - jew+s* - - suicide - - rape - - (re+)tar+(d+|t+)(ed)? - - ta+r+d+ - - cunts* - - trann*y - - shemale - - token_watchlist: - - fa+g+s* - - 卐 - - 卍 - - cuck(?!oo+) - - nigg+(?:e*r+|a+h*?|u+h+)s? - - fag+o+t+s* + ping_everyone: true + offensive_msg_delete_days: 7 # How many days before deleting an offensive message? # Censor doesn't apply to these channel_whitelist: - *ADMINS - - *MODLOG + - *MOD_LOG - *MESSAGE_LOG - - *DEVLOG - - *BBLOGS + - *DEV_LOG + - *BB_LOGS - *STAFF_LOUNGE - - *DEVTEST - *TALENT_POOL - *USER_EVENT_A role_whitelist: - - *ADMIN_ROLE - - *MOD_ROLE - - *OWNER_ROLE - - *ROCKSTARS_ROLE + - *ADMINS_ROLE + - *MODS_ROLE + - *OWNERS_ROLE + - *HELPERS_ROLE + - *PY_COMMUNITY_ROLE + - *SPRINTERS keys: site_api: !ENV "BOT_API_KEY" + github: !ENV "GITHUB_API_KEY" urls: @@ -282,28 +334,11 @@ urls: site_staff: &STAFF !JOIN ["staff.", *DOMAIN] site_schema: &SCHEMA "https://" - site_bigbrother_api: !JOIN [*SCHEMA, *API, "/bot/bigbrother"] - site_docs_api: !JOIN [*SCHEMA, *API, "/bot/docs"] - site_superstarify_api: !JOIN [*SCHEMA, *API, "/bot/superstarify"] - site_infractions: !JOIN [*SCHEMA, *API, "/bot/infractions"] - site_infractions_user: !JOIN [*SCHEMA, *API, "/bot/infractions/user/{user_id}"] - site_infractions_type: !JOIN [*SCHEMA, *API, "/bot/infractions/type/{infraction_type}"] - site_infractions_by_id: !JOIN [*SCHEMA, *API, "/bot/infractions/id/{infraction_id}"] - site_infractions_user_type_current: !JOIN [*SCHEMA, *API, "/bot/infractions/user/{user_id}/{infraction_type}/current"] - site_infractions_user_type: !JOIN [*SCHEMA, *API, "/bot/infractions/user/{user_id}/{infraction_type}"] - site_logs_api: !JOIN [*SCHEMA, *API, "/bot/logs"] site_logs_view: !JOIN [*SCHEMA, *STAFF, "/bot/logs"] - site_off_topic_names_api: !JOIN [*SCHEMA, *API, "/bot/off-topic-names"] - site_reminders_api: !JOIN [*SCHEMA, *API, "/bot/reminders"] - site_reminders_user_api: !JOIN [*SCHEMA, *API, "/bot/reminders/user"] - site_settings_api: !JOIN [*SCHEMA, *API, "/bot/settings"] - site_tags_api: !JOIN [*SCHEMA, *API, "/bot/tags"] - site_user_api: !JOIN [*SCHEMA, *API, "/bot/users"] - site_user_complete_api: !JOIN [*SCHEMA, *API, "/bot/users/complete"] paste_service: !JOIN [*SCHEMA, *PASTE, "/{key}"] # Snekbox - snekbox_eval_api: "http://snekbox:8060/eval" + snekbox_eval_api: "http://snekbox.default.svc.cluster.local/eval" # Discord API URLs discord_api: &DISCORD_API "https://discordapp.com/api/v7/" @@ -313,6 +348,7 @@ urls: bot_avatar: "https://raw.githubusercontent.com/discord-python/branding/master/logos/logo_circle/logo_circle.png" github_bot_repo: "https://github.com/python-discord/bot" + anti_spam: # Clean messages that violate a rule. clean_offending: true @@ -331,9 +367,13 @@ anti_spam: interval: 10 max: 7 - burst_shared: - interval: 10 - max: 20 + # Burst shared it (temporarily) disabled to prevent + # the bug that triggers multiple infractions/DMs per + # user. It also tends to catch a lot of innocent users + # now that we're so big. + # burst_shared: + # interval: 10 + # max: 20 chars: interval: 5 @@ -365,36 +405,6 @@ anti_spam: max: 3 -anti_malware: - whitelist: - - '.3gp' - - '.3g2' - - '.avi' - - '.bmp' - - '.gif' - - '.h264' - - '.jpg' - - '.jpeg' - - '.m4v' - - '.mkv' - - '.mov' - - '.mp4' - - '.mpeg' - - '.mpg' - - '.png' - - '.tiff' - - '.wmv' - - '.svg' - - '.psd' # Photoshop - - '.ai' # Illustrator - - '.aep' # After Effects - - '.xcf' # GIMP - - '.mp3' - - '.wav' - - '.ogg' - - '.md' - - reddit: subreddits: - 'r/Python' @@ -402,18 +412,28 @@ reddit: secret: !ENV "REDDIT_SECRET" -wolfram: - # Max requests per day. - user_limit_day: 10 - guild_limit_day: 67 - key: !ENV "WOLFRAM_API_KEY" - - big_brother: log_delay: 15 header_message_limit: 15 +code_block: + # The channels in which code blocks will be detected. They are not subject to a cooldown. + channel_whitelist: + - *BOT_CMD + + # The channels which will be affected by a cooldown. These channels are also whitelisted. + cooldown_channels: + - *PY_DISCUSSION + + # Sending instructions triggers a cooldown on a per-channel basis. + # More instruction messages will not be sent in the same channel until the cooldown has elapsed. + cooldown_seconds: 300 + + # The minimum amount of lines a message or code block must have for instructions to be sent. + minimum_lines: 4 + + free: # Seconds to elapse for a channel # to be considered inactive. @@ -421,17 +441,98 @@ free: cooldown_rate: 1 cooldown_per: 60.0 -mention: - message_timeout: 300 - reset_delay: 5 + +help_channels: + enable: true + + # Minimum interval before allowing a certain user to claim a new help channel + claim_minutes: 15 + + # Roles which are allowed to use the command which makes channels dormant + cmd_whitelist: + - *HELPERS_ROLE + + # Allowed duration of inactivity before making a channel dormant + idle_minutes: 30 + + # Allowed duration of inactivity when channel is empty (due to deleted messages) + # before message making a channel dormant + deleted_idle_minutes: 5 + + # Maximum number of channels to put in the available category + max_available: 2 + + # Maximum number of channels across all 3 categories + # Note Discord has a hard limit of 50 channels per category, so this shouldn't be > 50 + max_total_channels: 32 + + # Prefix for help channel names + name_prefix: 'help-' + + # Notify if more available channels are needed but there are no more dormant ones + notify: true + + # Channel in which to send notifications + notify_channel: *HELPERS + + # Minimum interval between helper notifications + notify_minutes: 15 + + # Mention these roles in notifications + notify_roles: + - *HELPERS_ROLE + redirect_output: delete_invocation: true delete_delay: 15 + duck_pond: - threshold: 5 - custom_emojis: [*DUCKY_YELLOW, *DUCKY_BLURPLE, *DUCKY_CAMO, *DUCKY_DEVIL, *DUCKY_NINJA, *DUCKY_REGAL, *DUCKY_TUBE, *DUCKY_HUNT, *DUCKY_WIZARD, *DUCKY_PARTY, *DUCKY_ANGEL, *DUCKY_MAUL, *DUCKY_SANTA] + threshold: 4 + channel_blacklist: + - *ANNOUNCEMENTS + - *PYNEWS_CHANNEL + - *PYEVENTS_CHANNEL + - *MAILING_LISTS + - *REDDIT_CHANNEL + - *USER_EVENT_A + - *DUCK_POND + - *CHANGE_LOG + - *STAFF_ANNOUNCEMENTS + - *MOD_ANNOUNCEMENTS + - *ADMIN_ANNOUNCEMENTS + + +python_news: + mail_lists: + - 'python-ideas' + - 'python-announce-list' + - 'pypi-announce' + - 'python-dev' + channel: *PYNEWS_CHANNEL + webhook: *PYNEWS_WEBHOOK + + +verification: + unverified_after: 3 # Days after which non-Developers receive the @Unverified role + kicked_after: 30 # Days after which non-Developers get kicked from the guild + reminder_frequency: 28 # Hours between @Unverified pings + bot_message_delete_delay: 10 # Seconds before deleting bots response in #verification + + # Number in range [0, 1] determining the percentage of unverified users that are safe + # to be kicked from the guild in one batch, any larger amount will require staff confirmation, + # set this to 0 to require explicit approval for batches of any size + kick_confirmation_threshold: 0.01 # 1% + + +voice_gate: + minimum_days_member: 3 # How many days the user must have been a member for + minimum_messages: 50 # How many messages a user must have to be eligible for voice + bot_message_delete_delay: 10 # Seconds before deleting bot's response in Voice Gate + minimum_activity_blocks: 3 # Number of 10 minute blocks during which a user must have been active + voice_ping_delete_delay: 60 # Seconds before deleting the bot's ping to user in Voice Gate + config: required_keys: ['bot.token'] diff --git a/docker-compose.yml b/docker-compose.yml index 7281c7953..0002d1d56 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,8 +12,21 @@ services: POSTGRES_PASSWORD: pysite POSTGRES_USER: pysite + redis: + image: redis:5.0.9 + ports: + - "127.0.0.1:6379:6379" + + snekbox: + image: ghcr.io/python-discord/snekbox:latest + init: true + ipc: none + ports: + - "127.0.0.1:8060:8060" + privileged: true + web: - image: pythondiscord/site:latest + image: ghcr.io/python-discord/site:latest command: ["run", "--debug"] networks: default: @@ -23,10 +36,12 @@ services: - staff.web ports: - "127.0.0.1:8000:8000" + tty: true depends_on: - postgres environment: DATABASE_URL: postgres://pysite:pysite@postgres:5432/pysite + METRICITY_DB_URL: postgres://pysite:pysite@postgres:5432/metricity SECRET_KEY: suitable-for-development-only STATIC_ROOT: /var/www/static @@ -37,8 +52,11 @@ services: volumes: - ./logs:/bot/logs - .:/bot:ro + tty: true depends_on: - web + - redis + - snekbox environment: BOT_TOKEN: ${BOT_TOKEN} BOT_API_KEY: badbot13m0n8f570f942013fc818f234916ca531 diff --git a/tests/README.md b/tests/README.md index be78821bf..4f62edd68 100644 --- a/tests/README.md +++ b/tests/README.md @@ -83,7 +83,7 @@ TagContentConverter should return correct values for valid input. As we are trying to test our "units" of code independently, we want to make sure that we do not rely objects and data generated by "external" code. If we we did, then we wouldn't know if the failure we're observing was caused by the code we are actually trying to test or something external to it. -However, the features that we are trying to test often depend on those objects generated by external pieces of code. It would be difficult to test a bot command without having access to a `Context` instance. Fortunately, there's a solution for that: we use fake objects that act like the true object. We call these fake objects "mocks". +However, the features that we are trying to test often depend on those objects generated by external pieces of code. It would be difficult to test a bot command without having access to a `Context` instance. Fortunately, there's a solution for that: we use fake objects that act like the true object. We call these fake objects "mocks". To create these mock object, we mainly use the [`unittest.mock`](https://docs.python.org/3/library/unittest.mock.html) module. In addition, we have also defined a couple of specialized mock objects that mock specific `discord.py` types (see the section on the below.). @@ -114,13 +114,13 @@ class BotCogTests(unittest.TestCase): ### Mocking coroutines -By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. In anticipation of the `AsyncMock` that will be [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest), we have added an `AsyncMock` helper to [`helpers.py`](/tests/helpers.py). Do note that this drop-in replacement only implements an asynchronous `__call__` method, not the additional assertions that will come with the new `AsyncMock` type in Python 3.8. +By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. In anticipation of the `AsyncMock` that will be [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest), we have added an `AsyncMock` helper to [`helpers.py`](/tests/helpers.py). Do note that this drop-in replacement only implements an asynchronous `__call__` method, not the additional assertions that will come with the new `AsyncMock` type in Python 3.8. ### Special mocks for some `discord.py` types To quote Ned Batchelder, Mock objects are "automatic chameleons". This means that they will happily allow the access to any attribute or method and provide a mocked value in return. One downside to this is that if the code you are testing gets the name of the attribute wrong, your mock object will not complain and the test may still pass. -In order to avoid that, we have defined a number of Mock types in [`helpers.py`](/tests/helpers.py) that follow the specifications of the actual Discord types they are mocking. This means that trying to access an attribute or method on a mocked object that does not exist on the equivalent `discord.py` object will result in an `AttributeError`. In addition, these mocks have some sensible defaults and **pass `isinstance` checks for the types they are mocking**. +In order to avoid that, we have defined a number of Mock types in [`helpers.py`](/tests/helpers.py) that follow the specifications of the actual Discord types they are mocking. This means that trying to access an attribute or method on a mocked object that does not exist on the equivalent `discord.py` object will result in an `AttributeError`. In addition, these mocks have some sensible defaults and **pass `isinstance` checks for the types they are mocking**. These special mocks are added when they are needed, so if you think it would be sensible to add another one, feel free to propose one in your PR. @@ -144,7 +144,7 @@ Finally, there are some considerations to make when writing tests, both for writ ### Test coverage is a starting point -Having test coverage is a good starting point for unit testing: If a part of your code was not covered by a test, we know that we have not tested it properly. The reverse is unfortunately not true: Even if the code we are testing has 100% branch coverage, it does not mean it's fully tested or guaranteed to work. +Having test coverage is a good starting point for unit testing: If a part of your code was not covered by a test, we know that we have not tested it properly. The reverse is unfortunately not true: Even if the code we are testing has 100% branch coverage, it does not mean it's fully tested or guaranteed to work. One problem is that 100% branch coverage may be misleading if we haven't tested our code against all the realistic input it may get in production. For instance, take a look at the following `member_information` function and the test we've written for it: @@ -169,7 +169,7 @@ class FunctionsTests(unittest.TestCase): If you were to run this test, not only would the function pass the test, `coverage.py` will also tell us that the test provides 100% branch coverage for the function. Can you spot the bug the test suite did not catch? -The problem here is that we have only tested our function with a member object that had `None` for the `member.joined` attribute. This means that `member.joined.stfptime("%d-%m-%Y")` was never executed during our test, leading to us missing the spelling mistake in `stfptime` (it should be `strftime`). +The problem here is that we have only tested our function with a member object that had `None` for the `member.joined` attribute. This means that `member.joined.stfptime("%d-%m-%Y")` was never executed during our test, leading to us missing the spelling mistake in `stfptime` (it should be `strftime`). Adding another test would not increase the test coverage we have, but it does ensure that we'll notice that this function can fail with realistic data: diff --git a/tests/_autospec.py b/tests/_autospec.py new file mode 100644 index 000000000..ee2fc1973 --- /dev/null +++ b/tests/_autospec.py @@ -0,0 +1,64 @@ +import contextlib +import functools +import unittest.mock +from typing import Callable + + [email protected](unittest.mock._patch.decoration_helper) +def _decoration_helper(self, patched, args, keywargs): + """Skips adding patchings as args if their `dont_pass` attribute is True.""" + # Don't ask what this does. It's just a copy from stdlib, but with the dont_pass check added. + extra_args = [] + with contextlib.ExitStack() as exit_stack: + for patching in patched.patchings: + arg = exit_stack.enter_context(patching) + if not getattr(patching, "dont_pass", False): + # Only add the patching as an arg if dont_pass is False. + if patching.attribute_name is not None: + keywargs.update(arg) + elif patching.new is unittest.mock.DEFAULT: + extra_args.append(arg) + + args += tuple(extra_args) + yield args, keywargs + + [email protected](unittest.mock._patch.copy) +def _copy(self): + """Copy the `dont_pass` attribute along with the standard copy operation.""" + patcher_copy = _copy.original(self) + patcher_copy.dont_pass = getattr(self, "dont_pass", False) + return patcher_copy + + +# Monkey-patch the patcher class :) +_copy.original = unittest.mock._patch.copy +unittest.mock._patch.copy = _copy +unittest.mock._patch.decoration_helper = _decoration_helper + + +def autospec(target, *attributes: str, pass_mocks: bool = True, **patch_kwargs) -> Callable: + """ + Patch multiple `attributes` of a `target` with autospecced mocks and `spec_set` as True. + + If `pass_mocks` is True, pass the autospecced mocks as arguments to the decorated object. + """ + # Caller's kwargs should take priority and overwrite the defaults. + kwargs = dict(spec_set=True, autospec=True) + kwargs.update(patch_kwargs) + + # Import the target if it's a string. + # This is to support both object and string targets like patch.multiple. + if type(target) is str: + target = unittest.mock._importer(target) + + def decorator(func): + for attribute in attributes: + patcher = unittest.mock.patch.object(target, attribute, **kwargs) + if not pass_mocks: + # A custom attribute to keep track of which patchings should be skipped. + patcher.dont_pass = True + func = patcher(func) + return func + return decorator diff --git a/tests/base.py b/tests/base.py index 029a249ed..d99b9ac31 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,6 +1,12 @@ import logging import unittest from contextlib import contextmanager +from typing import Dict + +import discord +from discord.ext import commands + +from tests import helpers class _CaptureLogHandler(logging.Handler): @@ -16,11 +22,16 @@ class _CaptureLogHandler(logging.Handler): self.records.append(record) -class LoggingTestCase(unittest.TestCase): - """TestCase subclass that adds more logging assertion tools.""" +class LoggingTestsMixin: + """ + A mixin that defines additional test methods for logging behavior. + + This mixin relies on the availability of the `fail` attribute defined by the + test classes included in Python's unittest method to signal test failure. + """ @contextmanager - def assertNotLogs(self, logger=None, level=None, msg=None): + def assertNotLogs(self, logger=None, level=None, msg=None): # noqa: N802 """ Asserts that no logs of `level` and higher were emitted by `logger`. @@ -65,3 +76,30 @@ class LoggingTestCase(unittest.TestCase): standard_message = self._truncateMessage(base_message, record_message) msg = self._formatMessage(msg, standard_message) self.fail(msg) + + +class CommandTestCase(unittest.IsolatedAsyncioTestCase): + """TestCase with additional assertions that are useful for testing Discord commands.""" + + async def assertHasPermissionsCheck( # noqa: N802 + self, + cmd: commands.Command, + permissions: Dict[str, bool], + ) -> None: + """ + Test that `cmd` raises a `MissingPermissions` exception if author lacks `permissions`. + + Every permission in `permissions` is expected to be reported as missing. In other words, do + not include permissions which should not raise an exception along with those which should. + """ + # Invert permission values because it's more intuitive to pass to this assertion the same + # permissions as those given to the check decorator. + permissions = {k: not v for k, v in permissions.items()} + + ctx = helpers.MockContext() + ctx.channel.permissions_for.return_value = discord.Permissions(**permissions) + + with self.assertRaises(commands.MissingPermissions) as cm: + await cmd.can_run(ctx) + + self.assertCountEqual(permissions.keys(), cm.exception.missing_perms) diff --git a/tests/bot/cogs/sync/test_roles.py b/tests/bot/cogs/sync/test_roles.py deleted file mode 100644 index 27ae27639..000000000 --- a/tests/bot/cogs/sync/test_roles.py +++ /dev/null @@ -1,126 +0,0 @@ -import unittest - -from bot.cogs.sync.syncers import Role, get_roles_for_sync - - -class GetRolesForSyncTests(unittest.TestCase): - """Tests constructing the roles to synchronize with the site.""" - - def test_get_roles_for_sync_empty_return_for_equal_roles(self): - """No roles should be synced when no diff is found.""" - api_roles = {Role(id=41, name='name', colour=33, permissions=0x8, position=1)} - guild_roles = {Role(id=41, name='name', colour=33, permissions=0x8, position=1)} - - self.assertEqual( - get_roles_for_sync(guild_roles, api_roles), - (set(), set(), set()) - ) - - def test_get_roles_for_sync_returns_roles_to_update_with_non_id_diff(self): - """Roles to be synced are returned when non-ID attributes differ.""" - api_roles = {Role(id=41, name='old name', colour=35, permissions=0x8, position=1)} - guild_roles = {Role(id=41, name='new name', colour=33, permissions=0x8, position=2)} - - self.assertEqual( - get_roles_for_sync(guild_roles, api_roles), - (set(), guild_roles, set()) - ) - - def test_get_roles_only_returns_roles_that_require_update(self): - """Roles that require an update should be returned as the second tuple element.""" - api_roles = { - Role(id=41, name='old name', colour=33, permissions=0x8, position=1), - Role(id=53, name='other role', colour=55, permissions=0, position=3) - } - guild_roles = { - Role(id=41, name='new name', colour=35, permissions=0x8, position=2), - Role(id=53, name='other role', colour=55, permissions=0, position=3) - } - - self.assertEqual( - get_roles_for_sync(guild_roles, api_roles), - ( - set(), - {Role(id=41, name='new name', colour=35, permissions=0x8, position=2)}, - set(), - ) - ) - - def test_get_roles_returns_new_roles_in_first_tuple_element(self): - """Newly created roles are returned as the first tuple element.""" - api_roles = { - Role(id=41, name='name', colour=35, permissions=0x8, position=1), - } - guild_roles = { - Role(id=41, name='name', colour=35, permissions=0x8, position=1), - Role(id=53, name='other role', colour=55, permissions=0, position=2) - } - - self.assertEqual( - get_roles_for_sync(guild_roles, api_roles), - ( - {Role(id=53, name='other role', colour=55, permissions=0, position=2)}, - set(), - set(), - ) - ) - - def test_get_roles_returns_roles_to_update_and_new_roles(self): - """Newly created and updated roles should be returned together.""" - api_roles = { - Role(id=41, name='old name', colour=35, permissions=0x8, position=1), - } - guild_roles = { - Role(id=41, name='new name', colour=40, permissions=0x16, position=2), - Role(id=53, name='other role', colour=55, permissions=0, position=3) - } - - self.assertEqual( - get_roles_for_sync(guild_roles, api_roles), - ( - {Role(id=53, name='other role', colour=55, permissions=0, position=3)}, - {Role(id=41, name='new name', colour=40, permissions=0x16, position=2)}, - set(), - ) - ) - - def test_get_roles_returns_roles_to_delete(self): - """Roles to be deleted should be returned as the third tuple element.""" - api_roles = { - Role(id=41, name='name', colour=35, permissions=0x8, position=1), - Role(id=61, name='to delete', colour=99, permissions=0x9, position=2), - } - guild_roles = { - Role(id=41, name='name', colour=35, permissions=0x8, position=1), - } - - self.assertEqual( - get_roles_for_sync(guild_roles, api_roles), - ( - set(), - set(), - {Role(id=61, name='to delete', colour=99, permissions=0x9, position=2)}, - ) - ) - - def test_get_roles_returns_roles_to_delete_update_and_new_roles(self): - """When roles were added, updated, and removed, all of them are returned properly.""" - api_roles = { - Role(id=41, name='not changed', colour=35, permissions=0x8, position=1), - Role(id=61, name='to delete', colour=99, permissions=0x9, position=2), - Role(id=71, name='to update', colour=99, permissions=0x9, position=3), - } - guild_roles = { - Role(id=41, name='not changed', colour=35, permissions=0x8, position=1), - Role(id=81, name='to create', colour=99, permissions=0x9, position=4), - Role(id=71, name='updated', colour=101, permissions=0x5, position=3), - } - - self.assertEqual( - get_roles_for_sync(guild_roles, api_roles), - ( - {Role(id=81, name='to create', colour=99, permissions=0x9, position=4)}, - {Role(id=71, name='updated', colour=101, permissions=0x5, position=3)}, - {Role(id=61, name='to delete', colour=99, permissions=0x9, position=2)}, - ) - ) diff --git a/tests/bot/cogs/sync/test_users.py b/tests/bot/cogs/sync/test_users.py deleted file mode 100644 index ccaf67490..000000000 --- a/tests/bot/cogs/sync/test_users.py +++ /dev/null @@ -1,84 +0,0 @@ -import unittest - -from bot.cogs.sync.syncers import User, get_users_for_sync - - -def fake_user(**kwargs): - kwargs.setdefault('id', 43) - kwargs.setdefault('name', 'bob the test man') - kwargs.setdefault('discriminator', 1337) - kwargs.setdefault('avatar_hash', None) - kwargs.setdefault('roles', (666,)) - kwargs.setdefault('in_guild', True) - return User(**kwargs) - - -class GetUsersForSyncTests(unittest.TestCase): - """Tests constructing the users to synchronize with the site.""" - - def test_get_users_for_sync_returns_nothing_for_empty_params(self): - """When no users are given, none are returned.""" - self.assertEqual( - get_users_for_sync({}, {}), - (set(), set()) - ) - - def test_get_users_for_sync_returns_nothing_for_equal_users(self): - """When no users are updated, none are returned.""" - api_users = {43: fake_user()} - guild_users = {43: fake_user()} - - self.assertEqual( - get_users_for_sync(guild_users, api_users), - (set(), set()) - ) - - def test_get_users_for_sync_returns_users_to_update_on_non_id_field_diff(self): - """When a non-ID-field differs, the user to update is returned.""" - api_users = {43: fake_user()} - guild_users = {43: fake_user(name='new fancy name')} - - self.assertEqual( - get_users_for_sync(guild_users, api_users), - (set(), {fake_user(name='new fancy name')}) - ) - - def test_get_users_for_sync_returns_users_to_create_with_new_ids_on_guild(self): - """When new users join the guild, they are returned as the first tuple element.""" - api_users = {43: fake_user()} - guild_users = {43: fake_user(), 63: fake_user(id=63)} - - self.assertEqual( - get_users_for_sync(guild_users, api_users), - ({fake_user(id=63)}, set()) - ) - - def test_get_users_for_sync_updates_in_guild_field_on_user_leave(self): - """When a user leaves the guild, the `in_guild` flag is updated to `False`.""" - api_users = {43: fake_user(), 63: fake_user(id=63)} - guild_users = {43: fake_user()} - - self.assertEqual( - get_users_for_sync(guild_users, api_users), - (set(), {fake_user(id=63, in_guild=False)}) - ) - - def test_get_users_for_sync_updates_and_creates_users_as_needed(self): - """When one user left and another one was updated, both are returned.""" - api_users = {43: fake_user()} - guild_users = {63: fake_user(id=63)} - - self.assertEqual( - get_users_for_sync(guild_users, api_users), - ({fake_user(id=63)}, {fake_user(in_guild=False)}) - ) - - def test_get_users_for_sync_does_not_duplicate_update_users(self): - """When the API knows a user the guild doesn't, nothing is performed.""" - api_users = {43: fake_user(in_guild=False)} - guild_users = {} - - self.assertEqual( - get_users_for_sync(guild_users, api_users), - (set(), set()) - ) diff --git a/tests/bot/cogs/test_duck_pond.py b/tests/bot/cogs/test_duck_pond.py deleted file mode 100644 index d07b2bce1..000000000 --- a/tests/bot/cogs/test_duck_pond.py +++ /dev/null @@ -1,584 +0,0 @@ -import asyncio -import logging -import typing -import unittest -from unittest.mock import MagicMock, patch - -import discord - -from bot import constants -from bot.cogs import duck_pond -from tests import base -from tests import helpers - -MODULE_PATH = "bot.cogs.duck_pond" - - -class DuckPondTests(base.LoggingTestCase): - """Tests for DuckPond functionality.""" - - @classmethod - def setUpClass(cls): - """Sets up the objects that only have to be initialized once.""" - cls.nonstaff_member = helpers.MockMember(name="Non-staffer") - - cls.staff_role = helpers.MockRole(name="Staff role", id=constants.STAFF_ROLES[0]) - cls.staff_member = helpers.MockMember(name="staffer", roles=[cls.staff_role]) - - cls.checkmark_emoji = "\N{White Heavy Check Mark}" - cls.thumbs_up_emoji = "\N{Thumbs Up Sign}" - cls.unicode_duck_emoji = "\N{Duck}" - cls.duck_pond_emoji = helpers.MockPartialEmoji(id=constants.DuckPond.custom_emojis[0]) - cls.non_duck_custom_emoji = helpers.MockPartialEmoji(id=123) - - def setUp(self): - """Sets up the objects that need to be refreshed before each test.""" - self.bot = helpers.MockBot(user=helpers.MockMember(id=46692)) - self.cog = duck_pond.DuckPond(bot=self.bot) - - def test_duck_pond_correctly_initializes(self): - """`__init__ should set `bot` and `webhook_id` attributes and schedule `fetch_webhook`.""" - bot = helpers.MockBot() - cog = MagicMock() - - duck_pond.DuckPond.__init__(cog, bot) - - self.assertEqual(cog.bot, bot) - self.assertEqual(cog.webhook_id, constants.Webhooks.duck_pond) - bot.loop.create_loop.called_once_with(cog.fetch_webhook()) - - def test_fetch_webhook_succeeds_without_connectivity_issues(self): - """The `fetch_webhook` method waits until `READY` event and sets the `webhook` attribute.""" - self.bot.fetch_webhook.return_value = "dummy webhook" - self.cog.webhook_id = 1 - - asyncio.run(self.cog.fetch_webhook()) - - self.bot.wait_until_ready.assert_called_once() - self.bot.fetch_webhook.assert_called_once_with(1) - self.assertEqual(self.cog.webhook, "dummy webhook") - - def test_fetch_webhook_logs_when_unable_to_fetch_webhook(self): - """The `fetch_webhook` method should log an exception when it fails to fetch the webhook.""" - self.bot.fetch_webhook.side_effect = discord.HTTPException(response=MagicMock(), message="Not found.") - self.cog.webhook_id = 1 - - log = logging.getLogger('bot.cogs.duck_pond') - with self.assertLogs(logger=log, level=logging.ERROR) as log_watcher: - asyncio.run(self.cog.fetch_webhook()) - - self.bot.wait_until_ready.assert_called_once() - self.bot.fetch_webhook.assert_called_once_with(1) - - self.assertEqual(len(log_watcher.records), 1) - - record = log_watcher.records[0] - self.assertEqual(record.levelno, logging.ERROR) - - def test_is_staff_returns_correct_values_based_on_instance_passed(self): - """The `is_staff` method should return correct values based on the instance passed.""" - test_cases = ( - (helpers.MockUser(name="User instance"), False), - (helpers.MockMember(name="Member instance without staff role"), False), - (helpers.MockMember(name="Member instance with staff role", roles=[self.staff_role]), True) - ) - - for user, expected_return in test_cases: - actual_return = self.cog.is_staff(user) - with self.subTest(user_type=user.name, expected_return=expected_return, actual_return=actual_return): - self.assertEqual(expected_return, actual_return) - - @helpers.async_test - async def test_has_green_checkmark_correctly_detects_presence_of_green_checkmark_emoji(self): - """The `has_green_checkmark` method should only return `True` if one is present.""" - test_cases = ( - ( - "No reactions", helpers.MockMessage(), False - ), - ( - "No green check mark reactions", - helpers.MockMessage(reactions=[ - helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.bot.user]), - helpers.MockReaction(emoji=self.thumbs_up_emoji, users=[self.bot.user]) - ]), - False - ), - ( - "Green check mark reaction, but not from our bot", - helpers.MockMessage(reactions=[ - helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.bot.user]), - helpers.MockReaction(emoji=self.checkmark_emoji, users=[self.staff_member]) - ]), - False - ), - ( - "Green check mark reaction, with one from the bot", - helpers.MockMessage(reactions=[ - helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.bot.user]), - helpers.MockReaction(emoji=self.checkmark_emoji, users=[self.staff_member, self.bot.user]) - ]), - True - ) - ) - - for description, message, expected_return in test_cases: - actual_return = await self.cog.has_green_checkmark(message) - with self.subTest( - test_case=description, - expected_return=expected_return, - actual_return=actual_return - ): - self.assertEqual(expected_return, actual_return) - - def test_send_webhook_correctly_passes_on_arguments(self): - """The `send_webhook` method should pass the arguments to the webhook correctly.""" - self.cog.webhook = helpers.MockAsyncWebhook() - - content = "fake content" - username = "fake username" - avatar_url = "fake avatar_url" - embed = "fake embed" - - asyncio.run(self.cog.send_webhook(content, username, avatar_url, embed)) - - self.cog.webhook.send.assert_called_once_with( - content=content, - username=username, - avatar_url=avatar_url, - embed=embed - ) - - def test_send_webhook_logs_when_sending_message_fails(self): - """The `send_webhook` method should catch a `discord.HTTPException` and log accordingly.""" - self.cog.webhook = helpers.MockAsyncWebhook() - self.cog.webhook.send.side_effect = discord.HTTPException(response=MagicMock(), message="Something failed.") - - log = logging.getLogger('bot.cogs.duck_pond') - with self.assertLogs(logger=log, level=logging.ERROR) as log_watcher: - asyncio.run(self.cog.send_webhook()) - - self.assertEqual(len(log_watcher.records), 1) - - record = log_watcher.records[0] - self.assertEqual(record.levelno, logging.ERROR) - - def _get_reaction( - self, - emoji: typing.Union[str, helpers.MockEmoji], - staff: int = 0, - nonstaff: int = 0 - ) -> helpers.MockReaction: - staffers = [helpers.MockMember(roles=[self.staff_role]) for _ in range(staff)] - nonstaffers = [helpers.MockMember() for _ in range(nonstaff)] - return helpers.MockReaction(emoji=emoji, users=staffers + nonstaffers) - - @helpers.async_test - async def test_count_ducks_correctly_counts_the_number_of_eligible_duck_emojis(self): - """The `count_ducks` method should return the number of unique staffers who gave a duck.""" - test_cases = ( - # Simple test cases - # A message without reactions should return 0 - ( - "No reactions", - helpers.MockMessage(), - 0 - ), - # A message with a non-duck reaction from a non-staffer should return 0 - ( - "Non-duck reaction from non-staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.thumbs_up_emoji, nonstaff=1)]), - 0 - ), - # A message with a non-duck reaction from a staffer should return 0 - ( - "Non-duck reaction from staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.non_duck_custom_emoji, staff=1)]), - 0 - ), - # A message with a non-duck reaction from a non-staffer and staffer should return 0 - ( - "Non-duck reaction from staffer + non-staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.thumbs_up_emoji, staff=1, nonstaff=1)]), - 0 - ), - # A message with a unicode duck reaction from a non-staffer should return 0 - ( - "Unicode Duck Reaction from non-staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.unicode_duck_emoji, nonstaff=1)]), - 0 - ), - # A message with a unicode duck reaction from a staffer should return 1 - ( - "Unicode Duck Reaction from staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.unicode_duck_emoji, staff=1)]), - 1 - ), - # A message with a unicode duck reaction from a non-staffer and staffer should return 1 - ( - "Unicode Duck Reaction from staffer + non-staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.unicode_duck_emoji, staff=1, nonstaff=1)]), - 1 - ), - # A message with a duckpond duck reaction from a non-staffer should return 0 - ( - "Duckpond Duck Reaction from non-staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, nonstaff=1)]), - 0 - ), - # A message with a duckpond duck reaction from a staffer should return 1 - ( - "Duckpond Duck Reaction from staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, staff=1)]), - 1 - ), - # A message with a duckpond duck reaction from a non-staffer and staffer should return 1 - ( - "Duckpond Duck Reaction from staffer + non-staffer", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, staff=1, nonstaff=1)]), - 1 - ), - - # Complex test cases - # A message with duckpond duck reactions from 3 staffers and 2 non-staffers returns 3 - ( - "Duckpond Duck Reaction from 3 staffers + 2 non-staffers", - helpers.MockMessage(reactions=[self._get_reaction(emoji=self.duck_pond_emoji, staff=3, nonstaff=2)]), - 3 - ), - # A staffer with multiple duck reactions only counts once - ( - "Two different duck reactions from the same staffer", - helpers.MockMessage( - reactions=[ - helpers.MockReaction(emoji=self.duck_pond_emoji, users=[self.staff_member]), - helpers.MockReaction(emoji=self.unicode_duck_emoji, users=[self.staff_member]), - ] - ), - 1 - ), - # A non-string emoji does not count (to test the `isinstance(reaction.emoji, str)` elif) - ( - "Reaction with non-Emoji/str emoij from 3 staffers + 2 non-staffers", - helpers.MockMessage(reactions=[self._get_reaction(emoji=100, staff=3, nonstaff=2)]), - 0 - ), - # We correctly sum when multiple reactions are provided. - ( - "Duckpond Duck Reaction from 3 staffers + 2 non-staffers", - helpers.MockMessage( - reactions=[ - self._get_reaction(emoji=self.duck_pond_emoji, staff=3, nonstaff=2), - self._get_reaction(emoji=self.unicode_duck_emoji, staff=4, nonstaff=9), - ] - ), - 3 + 4 - ), - ) - - for description, message, expected_count in test_cases: - actual_count = await self.cog.count_ducks(message) - with self.subTest(test_case=description, expected_count=expected_count, actual_count=actual_count): - self.assertEqual(expected_count, actual_count) - - @helpers.async_test - async def test_relay_message_correctly_relays_content_and_attachments(self): - """The `relay_message` method should correctly relay message content and attachments.""" - send_webhook_path = f"{MODULE_PATH}.DuckPond.send_webhook" - send_attachments_path = f"{MODULE_PATH}.send_attachments" - - self.cog.webhook = helpers.MockAsyncWebhook() - - test_values = ( - (helpers.MockMessage(clean_content="", attachments=[]), False, False), - (helpers.MockMessage(clean_content="message", attachments=[]), True, False), - (helpers.MockMessage(clean_content="", attachments=["attachment"]), False, True), - (helpers.MockMessage(clean_content="message", attachments=["attachment"]), True, True), - ) - - for message, expect_webhook_call, expect_attachment_call in test_values: - with patch(send_webhook_path, new_callable=helpers.AsyncMock) as send_webhook: - with patch(send_attachments_path, new_callable=helpers.AsyncMock) as send_attachments: - with self.subTest(clean_content=message.clean_content, attachments=message.attachments): - await self.cog.relay_message(message) - - self.assertEqual(expect_webhook_call, send_webhook.called) - self.assertEqual(expect_attachment_call, send_attachments.called) - - message.add_reaction.assert_called_once_with(self.checkmark_emoji) - - @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock) - @helpers.async_test - async def test_relay_message_handles_irretrievable_attachment_exceptions(self, send_attachments): - """The `relay_message` method should handle irretrievable attachments.""" - message = helpers.MockMessage(clean_content="message", attachments=["attachment"]) - side_effects = (discord.errors.Forbidden(MagicMock(), ""), discord.errors.NotFound(MagicMock(), "")) - - self.cog.webhook = helpers.MockAsyncWebhook() - log = logging.getLogger("bot.cogs.duck_pond") - - for side_effect in side_effects: - send_attachments.side_effect = side_effect - with patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=helpers.AsyncMock) as send_webhook: - with self.subTest(side_effect=type(side_effect).__name__): - with self.assertNotLogs(logger=log, level=logging.ERROR): - await self.cog.relay_message(message) - - self.assertEqual(send_webhook.call_count, 2) - - @patch(f"{MODULE_PATH}.DuckPond.send_webhook", new_callable=helpers.AsyncMock) - @patch(f"{MODULE_PATH}.send_attachments", new_callable=helpers.AsyncMock) - @helpers.async_test - async def test_relay_message_handles_attachment_http_error(self, send_attachments, send_webhook): - """The `relay_message` method should handle irretrievable attachments.""" - message = helpers.MockMessage(clean_content="message", attachments=["attachment"]) - - self.cog.webhook = helpers.MockAsyncWebhook() - log = logging.getLogger("bot.cogs.duck_pond") - - side_effect = discord.HTTPException(MagicMock(), "") - send_attachments.side_effect = side_effect - with self.subTest(side_effect=type(side_effect).__name__): - with self.assertLogs(logger=log, level=logging.ERROR) as log_watcher: - await self.cog.relay_message(message) - - send_webhook.assert_called_once_with( - content=message.clean_content, - username=message.author.display_name, - avatar_url=message.author.avatar_url - ) - - self.assertEqual(len(log_watcher.records), 1) - - record = log_watcher.records[0] - self.assertEqual(record.levelno, logging.ERROR) - - def _mock_payload(self, label: str, is_custom_emoji: bool, id_: int, emoji_name: str): - """Creates a mock `on_raw_reaction_add` payload with the specified emoji data.""" - payload = MagicMock(name=label) - payload.emoji.is_custom_emoji.return_value = is_custom_emoji - payload.emoji.id = id_ - payload.emoji.name = emoji_name - return payload - - @helpers.async_test - async def test_payload_has_duckpond_emoji_correctly_detects_relevant_emojis(self): - """The `on_raw_reaction_add` event handler should ignore irrelevant emojis.""" - test_values = ( - # Custom Emojis - ( - self._mock_payload( - label="Custom Duckpond Emoji", - is_custom_emoji=True, - id_=constants.DuckPond.custom_emojis[0], - emoji_name="" - ), - True - ), - ( - self._mock_payload( - label="Custom Non-Duckpond Emoji", - is_custom_emoji=True, - id_=123, - emoji_name="" - ), - False - ), - # Unicode Emojis - ( - self._mock_payload( - label="Unicode Duck Emoji", - is_custom_emoji=False, - id_=1, - emoji_name=self.unicode_duck_emoji - ), - True - ), - ( - self._mock_payload( - label="Unicode Non-Duck Emoji", - is_custom_emoji=False, - id_=1, - emoji_name=self.thumbs_up_emoji - ), - False - ), - ) - - for payload, expected_return in test_values: - actual_return = self.cog._payload_has_duckpond_emoji(payload) - with self.subTest(case=payload._mock_name, expected_return=expected_return, actual_return=actual_return): - self.assertEqual(expected_return, actual_return) - - @patch(f"{MODULE_PATH}.discord.utils.get") - @patch(f"{MODULE_PATH}.DuckPond._payload_has_duckpond_emoji", new=MagicMock(return_value=False)) - def test_on_raw_reaction_add_returns_early_with_payload_without_duck_emoji(self, utils_get): - """The `on_raw_reaction_add` method should return early if the payload does not contain a duck emoji.""" - self.assertIsNone(asyncio.run(self.cog.on_raw_reaction_add(payload=MagicMock()))) - - # Ensure we've returned before making an unnecessary API call in the lines of code after the emoji check - utils_get.assert_not_called() - - def _raw_reaction_mocks(self, channel_id, message_id, user_id): - """Sets up mocks for tests of the `on_raw_reaction_add` event listener.""" - channel = helpers.MockTextChannel(id=channel_id) - self.bot.get_all_channels.return_value = (channel,) - - message = helpers.MockMessage(id=message_id) - - channel.fetch_message.return_value = message - - member = helpers.MockMember(id=user_id, roles=[self.staff_role]) - message.guild.members = (member,) - - payload = MagicMock(channel_id=channel_id, message_id=message_id, user_id=user_id) - - return channel, message, member, payload - - @helpers.async_test - async def test_on_raw_reaction_add_returns_for_bot_and_non_staff_members(self): - """The `on_raw_reaction_add` event handler should return for bot users or non-staff members.""" - channel_id = 1234 - message_id = 2345 - user_id = 3456 - - channel, message, _, payload = self._raw_reaction_mocks(channel_id, message_id, user_id) - - test_cases = ( - ("non-staff member", helpers.MockMember(id=user_id)), - ("bot staff member", helpers.MockMember(id=user_id, roles=[self.staff_role], bot=True)), - ) - - payload.emoji = self.duck_pond_emoji - - for description, member in test_cases: - message.guild.members = (member, ) - with self.subTest(test_case=description), patch(f"{MODULE_PATH}.DuckPond.has_green_checkmark") as checkmark: - checkmark.side_effect = AssertionError( - "Expected method to return before calling `self.has_green_checkmark`." - ) - self.assertIsNone(await self.cog.on_raw_reaction_add(payload)) - - # Check that we did make it past the payload checks - channel.fetch_message.assert_called_once() - channel.fetch_message.reset_mock() - - @patch(f"{MODULE_PATH}.DuckPond.is_staff") - @patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) - def test_on_raw_reaction_add_returns_on_message_with_green_checkmark_placed_by_bot(self, count_ducks, is_staff): - """The `on_raw_reaction_add` event should return when the message has a green check mark placed by the bot.""" - channel_id = 31415926535 - message_id = 27182818284 - user_id = 16180339887 - - channel, message, member, payload = self._raw_reaction_mocks(channel_id, message_id, user_id) - - payload.emoji = helpers.MockPartialEmoji(name=self.unicode_duck_emoji) - payload.emoji.is_custom_emoji.return_value = False - - message.reactions = [helpers.MockReaction(emoji=self.checkmark_emoji, users=[self.bot.user])] - - is_staff.return_value = True - count_ducks.side_effect = AssertionError("Expected method to return before calling `self.count_ducks`") - - self.assertIsNone(asyncio.run(self.cog.on_raw_reaction_add(payload))) - - # Assert that we've made it past `self.is_staff` - is_staff.assert_called_once() - - @helpers.async_test - async def test_on_raw_reaction_add_does_not_relay_below_duck_threshold(self): - """The `on_raw_reaction_add` listener should not relay messages or attachments below the duck threshold.""" - test_cases = ( - (constants.DuckPond.threshold - 1, False), - (constants.DuckPond.threshold, True), - (constants.DuckPond.threshold + 1, True), - ) - - channel, message, member, payload = self._raw_reaction_mocks(channel_id=3, message_id=4, user_id=5) - - payload.emoji = self.duck_pond_emoji - - for duck_count, should_relay in test_cases: - with patch(f"{MODULE_PATH}.DuckPond.relay_message", new_callable=helpers.AsyncMock) as relay_message: - with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) as count_ducks: - count_ducks.return_value = duck_count - with self.subTest(duck_count=duck_count, should_relay=should_relay): - await self.cog.on_raw_reaction_add(payload) - - # Confirm that we've made it past counting - count_ducks.assert_called_once() - - # Did we relay a message? - has_relayed = relay_message.called - self.assertEqual(has_relayed, should_relay) - - if should_relay: - relay_message.assert_called_once_with(message) - - @helpers.async_test - async def test_on_raw_reaction_remove_prevents_removal_of_green_checkmark_depending_on_the_duck_count(self): - """The `on_raw_reaction_remove` listener prevents removal of the check mark on messages with enough ducks.""" - checkmark = helpers.MockPartialEmoji(name=self.checkmark_emoji) - - message = helpers.MockMessage(id=1234) - - channel = helpers.MockTextChannel(id=98765) - channel.fetch_message.return_value = message - - self.bot.get_all_channels.return_value = (channel, ) - - payload = MagicMock(channel_id=channel.id, message_id=message.id, emoji=checkmark) - - test_cases = ( - (constants.DuckPond.threshold - 1, False), - (constants.DuckPond.threshold, True), - (constants.DuckPond.threshold + 1, True), - ) - for duck_count, should_re_add_checkmark in test_cases: - with patch(f"{MODULE_PATH}.DuckPond.count_ducks", new_callable=helpers.AsyncMock) as count_ducks: - count_ducks.return_value = duck_count - with self.subTest(duck_count=duck_count, should_re_add_checkmark=should_re_add_checkmark): - await self.cog.on_raw_reaction_remove(payload) - - # Check if we fetched the message - channel.fetch_message.assert_called_once_with(message.id) - - # Check if we actually counted the number of ducks - count_ducks.assert_called_once_with(message) - - has_re_added_checkmark = message.add_reaction.called - self.assertEqual(should_re_add_checkmark, has_re_added_checkmark) - - if should_re_add_checkmark: - message.add_reaction.assert_called_once_with(self.checkmark_emoji) - message.add_reaction.reset_mock() - - # reset mocks - channel.fetch_message.reset_mock() - message.reset_mock() - - def test_on_raw_reaction_remove_ignores_removal_of_non_checkmark_reactions(self): - """The `on_raw_reaction_remove` listener should ignore the removal of non-check mark emojis.""" - channel = helpers.MockTextChannel(id=98765) - - channel.fetch_message.side_effect = AssertionError( - "Expected method to return before calling `channel.fetch_message`" - ) - - self.bot.get_all_channels.return_value = (channel, ) - - payload = MagicMock(emoji=helpers.MockPartialEmoji(name=self.thumbs_up_emoji), channel_id=channel.id) - - self.assertIsNone(asyncio.run(self.cog.on_raw_reaction_remove(payload))) - - channel.fetch_message.assert_not_called() - - -class DuckPondSetupTests(unittest.TestCase): - """Tests setup of the `DuckPond` cog.""" - - def test_setup(self): - """Setup of the extension should call add_cog.""" - bot = helpers.MockBot() - duck_pond.setup(bot) - bot.add_cog.assert_called_once() diff --git a/tests/bot/cogs/test_token_remover.py b/tests/bot/cogs/test_token_remover.py deleted file mode 100644 index a54b839d7..000000000 --- a/tests/bot/cogs/test_token_remover.py +++ /dev/null @@ -1,131 +0,0 @@ -import asyncio -import logging -import unittest -from unittest.mock import MagicMock - -from discord import Colour - -from bot.cogs.token_remover import ( - DELETION_MESSAGE_TEMPLATE, - TokenRemover, - setup as setup_cog, -) -from bot.constants import Channels, Colours, Event, Icons -from tests.helpers import AsyncMock, MockBot, MockMessage - - -class TokenRemoverTests(unittest.TestCase): - """Tests the `TokenRemover` cog.""" - - def setUp(self): - """Adds the cog, a bot, and a message to the instance for usage in tests.""" - self.bot = MockBot() - self.bot.get_cog.return_value = MagicMock() - self.bot.get_cog.return_value.send_log_message = AsyncMock() - self.cog = TokenRemover(bot=self.bot) - - self.msg = MockMessage(id=555, content='') - self.msg.author.__str__ = MagicMock() - self.msg.author.__str__.return_value = 'lemon' - self.msg.author.bot = False - self.msg.author.avatar_url_as.return_value = 'picture-lemon.png' - self.msg.author.id = 42 - self.msg.author.mention = '@lemon' - self.msg.channel.mention = "#lemonade-stand" - - def test_is_valid_user_id_is_true_for_numeric_content(self): - """A string decoding to numeric characters is a valid user ID.""" - # MTIz = base64(123) - self.assertTrue(TokenRemover.is_valid_user_id('MTIz')) - - def test_is_valid_user_id_is_false_for_alphabetic_content(self): - """A string decoding to alphabetic characters is not a valid user ID.""" - # YWJj = base64(abc) - self.assertFalse(TokenRemover.is_valid_user_id('YWJj')) - - def test_is_valid_timestamp_is_true_for_valid_timestamps(self): - """A string decoding to a valid timestamp should be recognized as such.""" - self.assertTrue(TokenRemover.is_valid_timestamp('DN9r_A')) - - def test_is_valid_timestamp_is_false_for_invalid_values(self): - """A string not decoding to a valid timestamp should not be recognized as such.""" - # MTIz = base64(123) - self.assertFalse(TokenRemover.is_valid_timestamp('MTIz')) - - def test_mod_log_property(self): - """The `mod_log` property should ask the bot to return the `ModLog` cog.""" - self.bot.get_cog.return_value = 'lemon' - self.assertEqual(self.cog.mod_log, self.bot.get_cog.return_value) - self.bot.get_cog.assert_called_once_with('ModLog') - - def test_ignores_bot_messages(self): - """When the message event handler is called with a bot message, nothing is done.""" - self.msg.author.bot = True - coroutine = self.cog.on_message(self.msg) - self.assertIsNone(asyncio.run(coroutine)) - - def test_ignores_messages_without_tokens(self): - """Messages without anything looking like a token are ignored.""" - for content in ('', 'lemon wins'): - with self.subTest(content=content): - self.msg.content = content - coroutine = self.cog.on_message(self.msg) - self.assertIsNone(asyncio.run(coroutine)) - - def test_ignores_messages_with_invalid_tokens(self): - """Messages with values that are invalid tokens are ignored.""" - for content in ('foo.bar.baz', 'x.y.'): - with self.subTest(content=content): - self.msg.content = content - coroutine = self.cog.on_message(self.msg) - self.assertIsNone(asyncio.run(coroutine)) - - def test_censors_valid_tokens(self): - """Valid tokens are censored.""" - cases = ( - # (content, censored_token) - ('MTIz.DN9R_A.xyz', 'MTIz.DN9R_A.xxx'), - ) - - for content, censored_token in cases: - with self.subTest(content=content, censored_token=censored_token): - self.msg.content = content - coroutine = self.cog.on_message(self.msg) - with self.assertLogs(logger='bot.cogs.token_remover', level=logging.DEBUG) as cm: - self.assertIsNone(asyncio.run(coroutine)) # no return value - - [line] = cm.output - log_message = ( - "Censored a seemingly valid token sent by " - "lemon (`42`) in #lemonade-stand, " - f"token was `{censored_token}`" - ) - self.assertIn(log_message, line) - - self.msg.delete.assert_called_once_with() - self.msg.channel.send.assert_called_once_with( - DELETION_MESSAGE_TEMPLATE.format(mention='@lemon') - ) - self.bot.get_cog.assert_called_with('ModLog') - self.msg.author.avatar_url_as.assert_called_once_with(static_format='png') - - mod_log = self.bot.get_cog.return_value - mod_log.ignore.assert_called_once_with(Event.message_delete, self.msg.id) - mod_log.send_log_message.assert_called_once_with( - icon_url=Icons.token_removed, - colour=Colour(Colours.soft_red), - title="Token removed!", - text=log_message, - thumbnail='picture-lemon.png', - channel_id=Channels.mod_alerts - ) - - -class TokenRemoverSetupTests(unittest.TestCase): - """Tests setup of the `TokenRemover` cog.""" - - def test_setup(self): - """Setup of the extension should call add_cog.""" - bot = MockBot() - setup_cog(bot) - bot.add_cog.assert_called_once() diff --git a/tests/bot/exts/__init__.py b/tests/bot/exts/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/__init__.py diff --git a/tests/bot/exts/backend/__init__.py b/tests/bot/exts/backend/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/backend/__init__.py diff --git a/tests/bot/exts/backend/sync/__init__.py b/tests/bot/exts/backend/sync/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/backend/sync/__init__.py diff --git a/tests/bot/exts/backend/sync/test_base.py b/tests/bot/exts/backend/sync/test_base.py new file mode 100644 index 000000000..3ad9db9c3 --- /dev/null +++ b/tests/bot/exts/backend/sync/test_base.py @@ -0,0 +1,66 @@ +import unittest +from unittest import mock + + +from bot.api import ResponseCodeError +from bot.exts.backend.sync._syncers import Syncer +from tests import helpers + + +class TestSyncer(Syncer): + """Syncer subclass with mocks for abstract methods for testing purposes.""" + + name = "test" + _get_diff = mock.AsyncMock() + _sync = mock.AsyncMock() + + +class SyncerSyncTests(unittest.IsolatedAsyncioTestCase): + """Tests for main function orchestrating the sync.""" + + def setUp(self): + patcher = mock.patch("bot.instance", new=helpers.MockBot(user=helpers.MockMember(bot=True))) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + + self.guild = helpers.MockGuild() + + TestSyncer._get_diff.reset_mock(return_value=True, side_effect=True) + TestSyncer._sync.reset_mock(return_value=True, side_effect=True) + + # Make sure `_get_diff` returns a MagicMock, not an AsyncMock + TestSyncer._get_diff.return_value = mock.MagicMock() + + async def test_sync_message_edited(self): + """The message should be edited if one was sent, even if the sync has an API error.""" + subtests = ( + (None, None, False), + (helpers.MockMessage(), None, True), + (helpers.MockMessage(), ResponseCodeError(mock.MagicMock()), True), + ) + + for message, side_effect, should_edit in subtests: + with self.subTest(message=message, side_effect=side_effect, should_edit=should_edit): + TestSyncer._sync.side_effect = side_effect + ctx = helpers.MockContext() + ctx.send.return_value = message + + await TestSyncer.sync(self.guild, ctx) + + if should_edit: + message.edit.assert_called_once() + self.assertIn("content", message.edit.call_args[1]) + + async def test_sync_message_sent(self): + """If ctx is given, a new message should be sent.""" + subtests = ( + (None, None), + (helpers.MockContext(), helpers.MockMessage()), + ) + + for ctx, message in subtests: + with self.subTest(ctx=ctx, message=message): + await TestSyncer.sync(self.guild, ctx) + + if ctx is not None: + ctx.send.assert_called_once() diff --git a/tests/bot/exts/backend/sync/test_cog.py b/tests/bot/exts/backend/sync/test_cog.py new file mode 100644 index 000000000..22a07313e --- /dev/null +++ b/tests/bot/exts/backend/sync/test_cog.py @@ -0,0 +1,414 @@ +import unittest +from unittest import mock + +import discord + +from bot import constants +from bot.api import ResponseCodeError +from bot.exts.backend import sync +from bot.exts.backend.sync._cog import Sync +from bot.exts.backend.sync._syncers import Syncer +from tests import helpers +from tests.base import CommandTestCase + + +class SyncExtensionTests(unittest.IsolatedAsyncioTestCase): + """Tests for the sync extension.""" + + @staticmethod + def test_extension_setup(): + """The Sync cog should be added.""" + bot = helpers.MockBot() + sync.setup(bot) + bot.add_cog.assert_called_once() + + +class SyncCogTestCase(unittest.IsolatedAsyncioTestCase): + """Base class for Sync cog tests. Sets up patches for syncers.""" + + def setUp(self): + self.bot = helpers.MockBot() + + role_syncer_patcher = mock.patch( + "bot.exts.backend.sync._syncers.RoleSyncer", + autospec=Syncer, + spec_set=True + ) + user_syncer_patcher = mock.patch( + "bot.exts.backend.sync._syncers.UserSyncer", + autospec=Syncer, + spec_set=True + ) + + self.RoleSyncer = role_syncer_patcher.start() + self.UserSyncer = user_syncer_patcher.start() + + self.addCleanup(role_syncer_patcher.stop) + self.addCleanup(user_syncer_patcher.stop) + + self.cog = Sync(self.bot) + + @staticmethod + def response_error(status: int) -> ResponseCodeError: + """Fixture to return a ResponseCodeError with the given status code.""" + response = mock.MagicMock() + response.status = status + + return ResponseCodeError(response) + + +class SyncCogTests(SyncCogTestCase): + """Tests for the Sync cog.""" + + @mock.patch.object(Sync, "sync_guild", new_callable=mock.MagicMock) + def test_sync_cog_init(self, sync_guild): + """Should instantiate syncers and run a sync for the guild.""" + # Reset because a Sync cog was already instantiated in setUp. + self.RoleSyncer.reset_mock() + self.UserSyncer.reset_mock() + self.bot.loop.create_task = mock.MagicMock() + + mock_sync_guild_coro = mock.MagicMock() + sync_guild.return_value = mock_sync_guild_coro + + Sync(self.bot) + + sync_guild.assert_called_once_with() + self.bot.loop.create_task.assert_called_once_with(mock_sync_guild_coro) + + async def test_sync_cog_sync_guild(self): + """Roles and users should be synced only if a guild is successfully retrieved.""" + for guild in (helpers.MockGuild(), None): + with self.subTest(guild=guild): + self.bot.reset_mock() + self.RoleSyncer.reset_mock() + self.UserSyncer.reset_mock() + + self.bot.get_guild = mock.MagicMock(return_value=guild) + + await self.cog.sync_guild() + + self.bot.wait_until_guild_available.assert_called_once() + self.bot.get_guild.assert_called_once_with(constants.Guild.id) + + if guild is None: + self.RoleSyncer.sync.assert_not_called() + self.UserSyncer.sync.assert_not_called() + else: + self.RoleSyncer.sync.assert_called_once_with(guild) + self.UserSyncer.sync.assert_called_once_with(guild) + + async def patch_user_helper(self, side_effect: BaseException) -> None: + """Helper to set a side effect for bot.api_client.patch and then assert it is called.""" + self.bot.api_client.patch.reset_mock(side_effect=True) + self.bot.api_client.patch.side_effect = side_effect + + user_id, updated_information = 5, {"key": 123} + await self.cog.patch_user(user_id, updated_information) + + self.bot.api_client.patch.assert_called_once_with( + f"bot/users/{user_id}", + json=updated_information, + ) + + async def test_sync_cog_patch_user(self): + """A PATCH request should be sent and 404 errors ignored.""" + for side_effect in (None, self.response_error(404)): + with self.subTest(side_effect=side_effect): + await self.patch_user_helper(side_effect) + + async def test_sync_cog_patch_user_non_404(self): + """A PATCH request should be sent and the error raised if it's not a 404.""" + with self.assertRaises(ResponseCodeError): + await self.patch_user_helper(self.response_error(500)) + + +class SyncCogListenerTests(SyncCogTestCase): + """Tests for the listeners of the Sync cog.""" + + def setUp(self): + super().setUp() + self.cog.patch_user = mock.AsyncMock(spec_set=self.cog.patch_user) + + self.guild_id_patcher = mock.patch("bot.exts.backend.sync._cog.constants.Guild.id", 5) + self.guild_id = self.guild_id_patcher.start() + + self.guild = helpers.MockGuild(id=self.guild_id) + self.other_guild = helpers.MockGuild(id=0) + + def tearDown(self): + self.guild_id_patcher.stop() + + async def test_sync_cog_on_guild_role_create(self): + """A POST request should be sent with the new role's data.""" + self.assertTrue(self.cog.on_guild_role_create.__cog_listener__) + + role_data = { + "colour": 49, + "id": 777, + "name": "rolename", + "permissions": 8, + "position": 23, + } + role = helpers.MockRole(**role_data, guild=self.guild) + await self.cog.on_guild_role_create(role) + + self.bot.api_client.post.assert_called_once_with("bot/roles", json=role_data) + + async def test_sync_cog_on_guild_role_create_ignores_guilds(self): + """Events from other guilds should be ignored.""" + role = helpers.MockRole(guild=self.other_guild) + await self.cog.on_guild_role_create(role) + self.bot.api_client.post.assert_not_awaited() + + async def test_sync_cog_on_guild_role_delete(self): + """A DELETE request should be sent.""" + self.assertTrue(self.cog.on_guild_role_delete.__cog_listener__) + + role = helpers.MockRole(id=99, guild=self.guild) + await self.cog.on_guild_role_delete(role) + + self.bot.api_client.delete.assert_called_once_with("bot/roles/99") + + async def test_sync_cog_on_guild_role_delete_ignores_guilds(self): + """Events from other guilds should be ignored.""" + role = helpers.MockRole(guild=self.other_guild) + await self.cog.on_guild_role_delete(role) + self.bot.api_client.delete.assert_not_awaited() + + async def test_sync_cog_on_guild_role_update(self): + """A PUT request should be sent if the colour, name, permissions, or position changes.""" + self.assertTrue(self.cog.on_guild_role_update.__cog_listener__) + + role_data = { + "colour": 49, + "id": 777, + "name": "rolename", + "permissions": 8, + "position": 23, + } + subtests = ( + (True, ("colour", "name", "permissions", "position")), + (False, ("hoist", "mentionable")), + ) + + for should_put, attributes in subtests: + for attribute in attributes: + with self.subTest(should_put=should_put, changed_attribute=attribute): + self.bot.api_client.put.reset_mock() + + after_role_data = role_data.copy() + after_role_data[attribute] = 876 + + before_role = helpers.MockRole(**role_data, guild=self.guild) + after_role = helpers.MockRole(**after_role_data, guild=self.guild) + + await self.cog.on_guild_role_update(before_role, after_role) + + if should_put: + self.bot.api_client.put.assert_called_once_with( + f"bot/roles/{after_role.id}", + json=after_role_data + ) + else: + self.bot.api_client.put.assert_not_called() + + async def test_sync_cog_on_guild_role_update_ignores_guilds(self): + """Events from other guilds should be ignored.""" + role = helpers.MockRole(guild=self.other_guild) + await self.cog.on_guild_role_update(role, role) + self.bot.api_client.put.assert_not_awaited() + + async def test_sync_cog_on_member_remove(self): + """Member should be patched to set in_guild as False.""" + self.assertTrue(self.cog.on_member_remove.__cog_listener__) + + member = helpers.MockMember(guild=self.guild) + await self.cog.on_member_remove(member) + + self.cog.patch_user.assert_called_once_with( + member.id, + json={"in_guild": False} + ) + + async def test_sync_cog_on_member_remove_ignores_guilds(self): + """Events from other guilds should be ignored.""" + member = helpers.MockMember(guild=self.other_guild) + await self.cog.on_member_remove(member) + self.cog.patch_user.assert_not_awaited() + + async def test_sync_cog_on_member_update_roles(self): + """Members should be patched if their roles have changed.""" + self.assertTrue(self.cog.on_member_update.__cog_listener__) + + # Roles are intentionally unsorted. + before_roles = [helpers.MockRole(id=12), helpers.MockRole(id=30), helpers.MockRole(id=20)] + before_member = helpers.MockMember(roles=before_roles, guild=self.guild) + after_member = helpers.MockMember(roles=before_roles[1:], guild=self.guild) + + await self.cog.on_member_update(before_member, after_member) + + data = {"roles": sorted(role.id for role in after_member.roles)} + self.cog.patch_user.assert_called_once_with(after_member.id, json=data) + + async def test_sync_cog_on_member_update_other(self): + """Members should not be patched if other attributes have changed.""" + self.assertTrue(self.cog.on_member_update.__cog_listener__) + + subtests = ( + ("activities", discord.Game("Pong"), discord.Game("Frogger")), + ("nick", "old nick", "new nick"), + ("status", discord.Status.online, discord.Status.offline), + ) + + for attribute, old_value, new_value in subtests: + with self.subTest(attribute=attribute): + self.cog.patch_user.reset_mock() + + before_member = helpers.MockMember(**{attribute: old_value}, guild=self.guild) + after_member = helpers.MockMember(**{attribute: new_value}, guild=self.guild) + + await self.cog.on_member_update(before_member, after_member) + + self.cog.patch_user.assert_not_called() + + async def test_sync_cog_on_member_update_ignores_guilds(self): + """Events from other guilds should be ignored.""" + member = helpers.MockMember(guild=self.other_guild) + await self.cog.on_member_update(member, member) + self.cog.patch_user.assert_not_awaited() + + async def test_sync_cog_on_user_update(self): + """A user should be patched only if the name, discriminator, or avatar changes.""" + self.assertTrue(self.cog.on_user_update.__cog_listener__) + + before_data = { + "name": "old name", + "discriminator": "1234", + "bot": False, + } + + subtests = ( + (True, "name", "name", "new name", "new name"), + (True, "discriminator", "discriminator", "8765", 8765), + (False, "bot", "bot", True, True), + ) + + for should_patch, attribute, api_field, value, api_value in subtests: + with self.subTest(attribute=attribute): + self.cog.patch_user.reset_mock() + + after_data = before_data.copy() + after_data[attribute] = value + before_user = helpers.MockUser(**before_data) + after_user = helpers.MockUser(**after_data) + + await self.cog.on_user_update(before_user, after_user) + + if should_patch: + self.cog.patch_user.assert_called_once() + + # Don't care if *all* keys are present; only the changed one is required + call_args = self.cog.patch_user.call_args + self.assertEqual(call_args.args[0], after_user.id) + self.assertIn("json", call_args.kwargs) + + self.assertIn("ignore_404", call_args.kwargs) + self.assertTrue(call_args.kwargs["ignore_404"]) + + json = call_args.kwargs["json"] + self.assertIn(api_field, json) + self.assertEqual(json[api_field], api_value) + else: + self.cog.patch_user.assert_not_called() + + async def on_member_join_helper(self, side_effect: Exception) -> dict: + """ + Helper to set `side_effect` for on_member_join and assert a PUT request was sent. + + The request data for the mock member is returned. All exceptions will be re-raised. + """ + member = helpers.MockMember( + discriminator="1234", + roles=[helpers.MockRole(id=22), helpers.MockRole(id=12)], + guild=self.guild, + ) + + data = { + "discriminator": int(member.discriminator), + "id": member.id, + "in_guild": True, + "name": member.name, + "roles": sorted(role.id for role in member.roles) + } + + self.bot.api_client.put.reset_mock(side_effect=True) + self.bot.api_client.put.side_effect = side_effect + + try: + await self.cog.on_member_join(member) + except Exception: + raise + finally: + self.bot.api_client.put.assert_called_once_with( + f"bot/users/{member.id}", + json=data + ) + + return data + + async def test_sync_cog_on_member_join(self): + """Should PUT user's data or POST it if the user doesn't exist.""" + for side_effect in (None, self.response_error(404)): + with self.subTest(side_effect=side_effect): + self.bot.api_client.post.reset_mock() + data = await self.on_member_join_helper(side_effect) + + if side_effect: + self.bot.api_client.post.assert_called_once_with("bot/users", json=data) + else: + self.bot.api_client.post.assert_not_called() + + async def test_sync_cog_on_member_join_non_404(self): + """ResponseCodeError should be re-raised if status code isn't a 404.""" + with self.assertRaises(ResponseCodeError): + await self.on_member_join_helper(self.response_error(500)) + + self.bot.api_client.post.assert_not_called() + + async def test_sync_cog_on_member_join_ignores_guilds(self): + """Events from other guilds should be ignored.""" + member = helpers.MockMember(guild=self.other_guild) + await self.cog.on_member_join(member) + self.bot.api_client.post.assert_not_awaited() + self.bot.api_client.put.assert_not_awaited() + + +class SyncCogCommandTests(SyncCogTestCase, CommandTestCase): + """Tests for the commands in the Sync cog.""" + + async def test_sync_roles_command(self): + """sync() should be called on the RoleSyncer.""" + ctx = helpers.MockContext() + await self.cog.sync_roles_command(self.cog, ctx) + + self.RoleSyncer.sync.assert_called_once_with(ctx.guild, ctx) + + async def test_sync_users_command(self): + """sync() should be called on the UserSyncer.""" + ctx = helpers.MockContext() + await self.cog.sync_users_command(self.cog, ctx) + + self.UserSyncer.sync.assert_called_once_with(ctx.guild, ctx) + + async def test_commands_require_admin(self): + """The sync commands should only run if the author has the administrator permission.""" + cmds = ( + self.cog.sync_group, + self.cog.sync_roles_command, + self.cog.sync_users_command, + ) + + for cmd in cmds: + with self.subTest(cmd=cmd): + await self.assertHasPermissionsCheck(cmd, {"administrator": True}) diff --git a/tests/bot/exts/backend/sync/test_roles.py b/tests/bot/exts/backend/sync/test_roles.py new file mode 100644 index 000000000..541074336 --- /dev/null +++ b/tests/bot/exts/backend/sync/test_roles.py @@ -0,0 +1,159 @@ +import unittest +from unittest import mock + +import discord + +from bot.exts.backend.sync._syncers import RoleSyncer, _Diff, _Role +from tests import helpers + + +def fake_role(**kwargs): + """Fixture to return a dictionary representing a role with default values set.""" + kwargs.setdefault("id", 9) + kwargs.setdefault("name", "fake role") + kwargs.setdefault("colour", 7) + kwargs.setdefault("permissions", 0) + kwargs.setdefault("position", 55) + + return kwargs + + +class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): + """Tests for determining differences between roles in the DB and roles in the Guild cache.""" + + def setUp(self): + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + + @staticmethod + def get_guild(*roles): + """Fixture to return a guild object with the given roles.""" + guild = helpers.MockGuild() + guild.roles = [] + + for role in roles: + mock_role = helpers.MockRole(**role) + mock_role.colour = discord.Colour(role["colour"]) + mock_role.permissions = discord.Permissions(role["permissions"]) + guild.roles.append(mock_role) + + return guild + + async def test_empty_diff_for_identical_roles(self): + """No differences should be found if the roles in the guild and DB are identical.""" + self.bot.api_client.get.return_value = [fake_role()] + guild = self.get_guild(fake_role()) + + actual_diff = await RoleSyncer._get_diff(guild) + expected_diff = (set(), set(), set()) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_for_updated_roles(self): + """Only updated roles should be added to the 'updated' set of the diff.""" + updated_role = fake_role(id=41, name="new") + + self.bot.api_client.get.return_value = [fake_role(id=41, name="old"), fake_role()] + guild = self.get_guild(updated_role, fake_role()) + + actual_diff = await RoleSyncer._get_diff(guild) + expected_diff = (set(), {_Role(**updated_role)}, set()) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_for_new_roles(self): + """Only new roles should be added to the 'created' set of the diff.""" + new_role = fake_role(id=41, name="new") + + self.bot.api_client.get.return_value = [fake_role()] + guild = self.get_guild(fake_role(), new_role) + + actual_diff = await RoleSyncer._get_diff(guild) + expected_diff = ({_Role(**new_role)}, set(), set()) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_for_deleted_roles(self): + """Only deleted roles should be added to the 'deleted' set of the diff.""" + deleted_role = fake_role(id=61, name="deleted") + + self.bot.api_client.get.return_value = [fake_role(), deleted_role] + guild = self.get_guild(fake_role()) + + actual_diff = await RoleSyncer._get_diff(guild) + expected_diff = (set(), set(), {_Role(**deleted_role)}) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_for_new_updated_and_deleted_roles(self): + """When roles are added, updated, and removed, all of them are returned properly.""" + new = fake_role(id=41, name="new") + updated = fake_role(id=71, name="updated") + deleted = fake_role(id=61, name="deleted") + + self.bot.api_client.get.return_value = [ + fake_role(), + fake_role(id=71, name="updated name"), + deleted, + ] + guild = self.get_guild(fake_role(), new, updated) + + actual_diff = await RoleSyncer._get_diff(guild) + expected_diff = ({_Role(**new)}, {_Role(**updated)}, {_Role(**deleted)}) + + self.assertEqual(actual_diff, expected_diff) + + +class RoleSyncerSyncTests(unittest.IsolatedAsyncioTestCase): + """Tests for the API requests that sync roles.""" + + def setUp(self): + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + + async def test_sync_created_roles(self): + """Only POST requests should be made with the correct payload.""" + roles = [fake_role(id=111), fake_role(id=222)] + + role_tuples = {_Role(**role) for role in roles} + diff = _Diff(role_tuples, set(), set()) + await RoleSyncer._sync(diff) + + calls = [mock.call("bot/roles", json=role) for role in roles] + self.bot.api_client.post.assert_has_calls(calls, any_order=True) + self.assertEqual(self.bot.api_client.post.call_count, len(roles)) + + self.bot.api_client.put.assert_not_called() + self.bot.api_client.delete.assert_not_called() + + async def test_sync_updated_roles(self): + """Only PUT requests should be made with the correct payload.""" + roles = [fake_role(id=111), fake_role(id=222)] + + role_tuples = {_Role(**role) for role in roles} + diff = _Diff(set(), role_tuples, set()) + await RoleSyncer._sync(diff) + + calls = [mock.call(f"bot/roles/{role['id']}", json=role) for role in roles] + self.bot.api_client.put.assert_has_calls(calls, any_order=True) + self.assertEqual(self.bot.api_client.put.call_count, len(roles)) + + self.bot.api_client.post.assert_not_called() + self.bot.api_client.delete.assert_not_called() + + async def test_sync_deleted_roles(self): + """Only DELETE requests should be made with the correct payload.""" + roles = [fake_role(id=111), fake_role(id=222)] + + role_tuples = {_Role(**role) for role in roles} + diff = _Diff(set(), set(), role_tuples) + await RoleSyncer._sync(diff) + + calls = [mock.call(f"bot/roles/{role['id']}") for role in roles] + self.bot.api_client.delete.assert_has_calls(calls, any_order=True) + self.assertEqual(self.bot.api_client.delete.call_count, len(roles)) + + self.bot.api_client.post.assert_not_called() + self.bot.api_client.put.assert_not_called() diff --git a/tests/bot/exts/backend/sync/test_users.py b/tests/bot/exts/backend/sync/test_users.py new file mode 100644 index 000000000..61673e1bb --- /dev/null +++ b/tests/bot/exts/backend/sync/test_users.py @@ -0,0 +1,217 @@ +import unittest +from unittest import mock + +from bot.exts.backend.sync._syncers import UserSyncer, _Diff +from tests import helpers + + +def fake_user(**kwargs): + """Fixture to return a dictionary representing a user with default values set.""" + kwargs.setdefault("id", 43) + kwargs.setdefault("name", "bob the test man") + kwargs.setdefault("discriminator", 1337) + kwargs.setdefault("roles", [666]) + kwargs.setdefault("in_guild", True) + + return kwargs + + +class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): + """Tests for determining differences between users in the DB and users in the Guild cache.""" + + def setUp(self): + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + + @staticmethod + def get_guild(*members): + """Fixture to return a guild object with the given members.""" + guild = helpers.MockGuild() + guild.members = [] + + for member in members: + member = member.copy() + del member["in_guild"] + + mock_member = helpers.MockMember(**member) + mock_member.roles = [helpers.MockRole(id=role_id) for role_id in member["roles"]] + + guild.members.append(mock_member) + + return guild + + @staticmethod + def get_mock_member(member: dict): + member = member.copy() + del member["in_guild"] + mock_member = helpers.MockMember(**member) + mock_member.roles = [helpers.MockRole(id=role_id) for role_id in member["roles"]] + return mock_member + + async def test_empty_diff_for_no_users(self): + """When no users are given, an empty diff should be returned.""" + self.bot.api_client.get.return_value = { + "count": 3, + "next_page_no": None, + "previous_page_no": None, + "results": [] + } + guild = self.get_guild() + + actual_diff = await UserSyncer._get_diff(guild) + expected_diff = ([], [], None) + + self.assertEqual(actual_diff, expected_diff) + + async def test_empty_diff_for_identical_users(self): + """No differences should be found if the users in the guild and DB are identical.""" + self.bot.api_client.get.return_value = { + "count": 3, + "next_page_no": None, + "previous_page_no": None, + "results": [fake_user()] + } + guild = self.get_guild(fake_user()) + + guild.get_member.return_value = self.get_mock_member(fake_user()) + actual_diff = await UserSyncer._get_diff(guild) + expected_diff = ([], [], None) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_for_updated_users(self): + """Only updated users should be added to the 'updated' set of the diff.""" + updated_user = fake_user(id=99, name="new") + + self.bot.api_client.get.return_value = { + "count": 3, + "next_page_no": None, + "previous_page_no": None, + "results": [fake_user(id=99, name="old"), fake_user()] + } + guild = self.get_guild(updated_user, fake_user()) + guild.get_member.side_effect = [ + self.get_mock_member(updated_user), + self.get_mock_member(fake_user()) + ] + + actual_diff = await UserSyncer._get_diff(guild) + expected_diff = ([], [{"id": 99, "name": "new"}], None) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_for_new_users(self): + """Only new users should be added to the 'created' list of the diff.""" + new_user = fake_user(id=99, name="new") + + self.bot.api_client.get.return_value = { + "count": 3, + "next_page_no": None, + "previous_page_no": None, + "results": [fake_user()] + } + guild = self.get_guild(fake_user(), new_user) + guild.get_member.side_effect = [ + self.get_mock_member(fake_user()), + self.get_mock_member(new_user) + ] + actual_diff = await UserSyncer._get_diff(guild) + expected_diff = ([new_user], [], None) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_sets_in_guild_false_for_leaving_users(self): + """When a user leaves the guild, the `in_guild` flag is updated to `False`.""" + self.bot.api_client.get.return_value = { + "count": 3, + "next_page_no": None, + "previous_page_no": None, + "results": [fake_user(), fake_user(id=63)] + } + guild = self.get_guild(fake_user()) + guild.get_member.side_effect = [ + self.get_mock_member(fake_user()), + None + ] + + actual_diff = await UserSyncer._get_diff(guild) + expected_diff = ([], [{"id": 63, "in_guild": False}], None) + + self.assertEqual(actual_diff, expected_diff) + + async def test_diff_for_new_updated_and_leaving_users(self): + """When users are added, updated, and removed, all of them are returned properly.""" + new_user = fake_user(id=99, name="new") + + updated_user = fake_user(id=55, name="updated") + + self.bot.api_client.get.return_value = { + "count": 3, + "next_page_no": None, + "previous_page_no": None, + "results": [fake_user(), fake_user(id=55), fake_user(id=63)] + } + guild = self.get_guild(fake_user(), new_user, updated_user) + guild.get_member.side_effect = [ + self.get_mock_member(fake_user()), + self.get_mock_member(updated_user), + None + ] + + actual_diff = await UserSyncer._get_diff(guild) + expected_diff = ([new_user], [{"id": 55, "name": "updated"}, {"id": 63, "in_guild": False}], None) + + self.assertEqual(actual_diff, expected_diff) + + async def test_empty_diff_for_db_users_not_in_guild(self): + """When the DB knows a user, but the guild doesn't, no difference is found.""" + self.bot.api_client.get.return_value = { + "count": 3, + "next_page_no": None, + "previous_page_no": None, + "results": [fake_user(), fake_user(id=63, in_guild=False)] + } + guild = self.get_guild(fake_user()) + guild.get_member.side_effect = [ + self.get_mock_member(fake_user()), + None + ] + + actual_diff = await UserSyncer._get_diff(guild) + expected_diff = ([], [], None) + + self.assertEqual(actual_diff, expected_diff) + + +class UserSyncerSyncTests(unittest.IsolatedAsyncioTestCase): + """Tests for the API requests that sync users.""" + + def setUp(self): + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + + async def test_sync_created_users(self): + """Only POST requests should be made with the correct payload.""" + users = [fake_user(id=111), fake_user(id=222)] + + diff = _Diff(users, [], None) + await UserSyncer._sync(diff) + + self.bot.api_client.post.assert_called_once_with("bot/users", json=diff.created) + + self.bot.api_client.put.assert_not_called() + self.bot.api_client.delete.assert_not_called() + + async def test_sync_updated_users(self): + """Only PUT requests should be made with the correct payload.""" + users = [fake_user(id=111), fake_user(id=222)] + + diff = _Diff([], users, None) + await UserSyncer._sync(diff) + + self.bot.api_client.patch.assert_called_once_with("bot/users/bulk_patch", json=diff.updated) + + self.bot.api_client.post.assert_not_called() + self.bot.api_client.delete.assert_not_called() diff --git a/tests/bot/exts/backend/test_logging.py b/tests/bot/exts/backend/test_logging.py new file mode 100644 index 000000000..466f207d9 --- /dev/null +++ b/tests/bot/exts/backend/test_logging.py @@ -0,0 +1,32 @@ +import unittest +from unittest.mock import patch + +from bot import constants +from bot.exts.backend.logging import Logging +from tests.helpers import MockBot, MockTextChannel + + +class LoggingTests(unittest.IsolatedAsyncioTestCase): + """Test cases for connected login.""" + + def setUp(self): + self.bot = MockBot() + self.cog = Logging(self.bot) + self.dev_log = MockTextChannel(id=1234, name="dev-log") + + @patch("bot.exts.backend.logging.DEBUG_MODE", False) + async def test_debug_mode_false(self): + """Should send connected message to dev-log.""" + self.bot.get_channel.return_value = self.dev_log + + await self.cog.startup_greeting() + self.bot.wait_until_guild_available.assert_awaited_once_with() + self.bot.get_channel.assert_called_once_with(constants.Channels.dev_log) + self.dev_log.send.assert_awaited_once() + + @patch("bot.exts.backend.logging.DEBUG_MODE", True) + async def test_debug_mode_true(self): + """Should not send anything to dev-log.""" + await self.cog.startup_greeting() + self.bot.wait_until_guild_available.assert_awaited_once_with() + self.bot.get_channel.assert_not_called() diff --git a/tests/bot/exts/filters/__init__.py b/tests/bot/exts/filters/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/filters/__init__.py diff --git a/tests/bot/exts/filters/test_antimalware.py b/tests/bot/exts/filters/test_antimalware.py new file mode 100644 index 000000000..3393c6cdc --- /dev/null +++ b/tests/bot/exts/filters/test_antimalware.py @@ -0,0 +1,187 @@ +import unittest +from unittest.mock import AsyncMock, Mock + +from discord import NotFound + +from bot.constants import Channels, STAFF_ROLES +from bot.exts.filters import antimalware +from tests.helpers import MockAttachment, MockBot, MockMessage, MockRole + + +class AntiMalwareCogTests(unittest.IsolatedAsyncioTestCase): + """Test the AntiMalware cog.""" + + def setUp(self): + """Sets up fresh objects for each test.""" + self.bot = MockBot() + self.bot.filter_list_cache = { + "FILE_FORMAT.True": { + ".first": {}, + ".second": {}, + ".third": {}, + } + } + self.cog = antimalware.AntiMalware(self.bot) + self.message = MockMessage() + self.message.webhook_id = None + self.message.author.bot = None + self.whitelist = [".first", ".second", ".third"] + + async def test_message_with_allowed_attachment(self): + """Messages with allowed extensions should not be deleted""" + attachment = MockAttachment(filename="python.first") + self.message.attachments = [attachment] + + await self.cog.on_message(self.message) + self.message.delete.assert_not_called() + + async def test_message_without_attachment(self): + """Messages without attachments should result in no action.""" + await self.cog.on_message(self.message) + self.message.delete.assert_not_called() + + async def test_direct_message_with_attachment(self): + """Direct messages should have no action taken.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + self.message.guild = None + + await self.cog.on_message(self.message) + + self.message.delete.assert_not_called() + + async def test_webhook_message_with_illegal_extension(self): + """A webhook message containing an illegal extension should be ignored.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.webhook_id = 697140105563078727 + self.message.attachments = [attachment] + + await self.cog.on_message(self.message) + + self.message.delete.assert_not_called() + + async def test_bot_message_with_illegal_extension(self): + """A bot message containing an illegal extension should be ignored.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.author.bot = 409107086526644234 + self.message.attachments = [attachment] + + await self.cog.on_message(self.message) + + self.message.delete.assert_not_called() + + async def test_message_with_illegal_extension_gets_deleted(self): + """A message containing an illegal extension should send an embed.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + + await self.cog.on_message(self.message) + + self.message.delete.assert_called_once() + + async def test_message_send_by_staff(self): + """A message send by a member of staff should be ignored.""" + staff_role = MockRole(id=STAFF_ROLES[0]) + self.message.author.roles.append(staff_role) + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + + await self.cog.on_message(self.message) + + self.message.delete.assert_not_called() + + async def test_python_file_redirect_embed_description(self): + """A message containing a .py file should result in an embed redirecting the user to our paste site""" + attachment = MockAttachment(filename="python.py") + self.message.attachments = [attachment] + self.message.channel.send = AsyncMock() + + await self.cog.on_message(self.message) + self.message.channel.send.assert_called_once() + args, kwargs = self.message.channel.send.call_args + embed = kwargs.pop("embed") + + self.assertEqual(embed.description, antimalware.PY_EMBED_DESCRIPTION) + + async def test_txt_file_redirect_embed_description(self): + """A message containing a .txt file should result in the correct embed.""" + attachment = MockAttachment(filename="python.txt") + self.message.attachments = [attachment] + self.message.channel.send = AsyncMock() + antimalware.TXT_EMBED_DESCRIPTION = Mock() + antimalware.TXT_EMBED_DESCRIPTION.format.return_value = "test" + + await self.cog.on_message(self.message) + self.message.channel.send.assert_called_once() + args, kwargs = self.message.channel.send.call_args + embed = kwargs.pop("embed") + cmd_channel = self.bot.get_channel(Channels.bot_commands) + + self.assertEqual(embed.description, antimalware.TXT_EMBED_DESCRIPTION.format.return_value) + antimalware.TXT_EMBED_DESCRIPTION.format.assert_called_with(cmd_channel_mention=cmd_channel.mention) + + async def test_other_disallowed_extension_embed_description(self): + """Test the description for a non .py/.txt disallowed extension.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + self.message.channel.send = AsyncMock() + antimalware.DISALLOWED_EMBED_DESCRIPTION = Mock() + antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value = "test" + + await self.cog.on_message(self.message) + self.message.channel.send.assert_called_once() + args, kwargs = self.message.channel.send.call_args + embed = kwargs.pop("embed") + meta_channel = self.bot.get_channel(Channels.meta) + + self.assertEqual(embed.description, antimalware.DISALLOWED_EMBED_DESCRIPTION.format.return_value) + antimalware.DISALLOWED_EMBED_DESCRIPTION.format.assert_called_with( + joined_whitelist=", ".join(self.whitelist), + blocked_extensions_str=".disallowed", + meta_channel_mention=meta_channel.mention + ) + + async def test_removing_deleted_message_logs(self): + """Removing an already deleted message logs the correct message""" + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + self.message.delete = AsyncMock(side_effect=NotFound(response=Mock(status=""), message="")) + + with self.assertLogs(logger=antimalware.log, level="INFO"): + await self.cog.on_message(self.message) + self.message.delete.assert_called_once() + + async def test_message_with_illegal_attachment_logs(self): + """Deleting a message with an illegal attachment should result in a log.""" + attachment = MockAttachment(filename="python.disallowed") + self.message.attachments = [attachment] + + with self.assertLogs(logger=antimalware.log, level="INFO"): + await self.cog.on_message(self.message) + + async def test_get_disallowed_extensions(self): + """The return value should include all non-whitelisted extensions.""" + test_values = ( + ([], []), + (self.whitelist, []), + ([".first"], []), + ([".first", ".disallowed"], [".disallowed"]), + ([".disallowed"], [".disallowed"]), + ([".disallowed", ".illegal"], [".disallowed", ".illegal"]), + ) + + for extensions, expected_disallowed_extensions in test_values: + with self.subTest(extensions=extensions, expected_disallowed_extensions=expected_disallowed_extensions): + self.message.attachments = [MockAttachment(filename=f"filename{extension}") for extension in extensions] + disallowed_extensions = self.cog._get_disallowed_extensions(self.message) + self.assertCountEqual(disallowed_extensions, expected_disallowed_extensions) + + +class AntiMalwareSetupTests(unittest.TestCase): + """Tests setup of the `AntiMalware` cog.""" + + def test_setup(self): + """Setup of the extension should call add_cog.""" + bot = MockBot() + antimalware.setup(bot) + bot.add_cog.assert_called_once() diff --git a/tests/bot/cogs/test_antispam.py b/tests/bot/exts/filters/test_antispam.py index ce5472c71..6a0e4fded 100644 --- a/tests/bot/cogs/test_antispam.py +++ b/tests/bot/exts/filters/test_antispam.py @@ -1,6 +1,6 @@ import unittest -from bot.cogs import antispam +from bot.exts.filters import antispam class AntispamConfigurationValidationTests(unittest.TestCase): diff --git a/tests/bot/cogs/test_security.py b/tests/bot/exts/filters/test_security.py index 9d1a62f7e..c0c3baa42 100644 --- a/tests/bot/cogs/test_security.py +++ b/tests/bot/exts/filters/test_security.py @@ -3,7 +3,7 @@ from unittest.mock import MagicMock from discord.ext.commands import NoPrivateMessage -from bot.cogs import security +from bot.exts.filters import security from tests.helpers import MockBot, MockContext diff --git a/tests/bot/exts/filters/test_token_remover.py b/tests/bot/exts/filters/test_token_remover.py new file mode 100644 index 000000000..f99cc3370 --- /dev/null +++ b/tests/bot/exts/filters/test_token_remover.py @@ -0,0 +1,408 @@ +import unittest +from re import Match +from unittest import mock +from unittest.mock import MagicMock + +from discord import Colour, NotFound + +from bot import constants +from bot.exts.filters import token_remover +from bot.exts.filters.token_remover import Token, TokenRemover +from bot.exts.moderation.modlog import ModLog +from bot.utils.messages import format_user +from tests.helpers import MockBot, MockMessage, autospec + + +class TokenRemoverTests(unittest.IsolatedAsyncioTestCase): + """Tests the `TokenRemover` cog.""" + + def setUp(self): + """Adds the cog, a bot, and a message to the instance for usage in tests.""" + self.bot = MockBot() + self.cog = TokenRemover(bot=self.bot) + + self.msg = MockMessage(id=555, content="hello world") + self.msg.channel.mention = "#lemonade-stand" + self.msg.guild.get_member.return_value.bot = False + self.msg.guild.get_member.return_value.__str__.return_value = "Woody" + self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name) + self.msg.author.avatar_url_as.return_value = "picture-lemon.png" + + def test_extract_user_id_valid(self): + """Should consider user IDs valid if they decode into an integer ID.""" + id_pairs = ( + ("NDcyMjY1OTQzMDYyNDEzMzMy", 472265943062413332), + ("NDc1MDczNjI5Mzk5NTQ3OTA0", 475073629399547904), + ("NDY3MjIzMjMwNjUwNzc3NjQx", 467223230650777641), + ) + + for token_id, user_id in id_pairs: + with self.subTest(token_id=token_id): + result = TokenRemover.extract_user_id(token_id) + self.assertEqual(result, user_id) + + def test_extract_user_id_invalid(self): + """Should consider non-digit and non-ASCII IDs invalid.""" + ids = ( + ("SGVsbG8gd29ybGQ", "non-digit ASCII"), + ("0J_RgNC40LLQtdGCINC80LjRgA", "cyrillic text"), + ("4pO14p6L4p6C4pG34p264pGl8J-EiOKSj-KCieKBsA", "Unicode digits"), + ("4oaA4oaB4oWh4oWi4Lyz4Lyq4Lyr4LG9", "Unicode numerals"), + ("8J2fjvCdn5nwnZ-k8J2fr_Cdn7rgravvvJngr6c", "Unicode decimals"), + ("{hello}[world]&(bye!)", "ASCII invalid Base64"), + ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), + ) + + for user_id, msg in ids: + with self.subTest(msg=msg): + result = TokenRemover.extract_user_id(user_id) + self.assertIsNone(result) + + def test_is_valid_timestamp_valid(self): + """Should consider timestamps valid if they're greater than the Discord epoch.""" + timestamps = ( + "XsyRkw", + "Xrim9Q", + "XsyR-w", + "XsySD_", + "Dn9r_A", + ) + + for timestamp in timestamps: + with self.subTest(timestamp=timestamp): + result = TokenRemover.is_valid_timestamp(timestamp) + self.assertTrue(result) + + def test_is_valid_timestamp_invalid(self): + """Should consider timestamps invalid if they're before Discord epoch or can't be parsed.""" + timestamps = ( + ("B4Yffw", "DISCORD_EPOCH - TOKEN_EPOCH - 1"), + ("ew", "123"), + ("AoIKgA", "42076800"), + ("{hello}[world]&(bye!)", "ASCII invalid Base64"), + ("Þíß-ï§-ňøẗ-våłìÐ", "Unicode invalid Base64"), + ) + + for timestamp, msg in timestamps: + with self.subTest(msg=msg): + result = TokenRemover.is_valid_timestamp(timestamp) + self.assertFalse(result) + + def test_is_valid_hmac_valid(self): + """Should consider an HMAC valid if it has at least 3 unique characters.""" + valid_hmacs = ( + "VXmErH7j511turNpfURmb0rVNm8", + "Ysnu2wacjaKs7qnoo46S8Dm2us8", + "sJf6omBPORBPju3WJEIAcwW9Zds", + "s45jqDV_Iisn-symw0yDRrk_jf4", + ) + + for hmac in valid_hmacs: + with self.subTest(msg=hmac): + result = TokenRemover.is_maybe_valid_hmac(hmac) + self.assertTrue(result) + + def test_is_invalid_hmac_invalid(self): + """Should consider an HMAC invalid if has fewer than 3 unique characters.""" + invalid_hmacs = ( + ("xxxxxxxxxxxxxxxxxx", "Single character"), + ("XxXxXxXxXxXxXxXxXx", "Single character alternating case"), + ("ASFasfASFasfASFASsf", "Three characters alternating-case"), + ("asdasdasdasdasdasdasd", "Three characters one case"), + ) + + for hmac, msg in invalid_hmacs: + with self.subTest(msg=msg): + result = TokenRemover.is_maybe_valid_hmac(hmac) + self.assertFalse(result) + + def test_mod_log_property(self): + """The `mod_log` property should ask the bot to return the `ModLog` cog.""" + self.bot.get_cog.return_value = 'lemon' + self.assertEqual(self.cog.mod_log, self.bot.get_cog.return_value) + self.bot.get_cog.assert_called_once_with('ModLog') + + async def test_on_message_edit_uses_on_message(self): + """The edit listener should delegate handling of the message to the normal listener.""" + self.cog.on_message = mock.create_autospec(self.cog.on_message, spec_set=True) + + await self.cog.on_message_edit(MockMessage(), self.msg) + self.cog.on_message.assert_awaited_once_with(self.msg) + + @autospec(TokenRemover, "find_token_in_message", "take_action") + async def test_on_message_takes_action(self, find_token_in_message, take_action): + """Should take action if a valid token is found when a message is sent.""" + cog = TokenRemover(self.bot) + found_token = "foobar" + find_token_in_message.return_value = found_token + + await cog.on_message(self.msg) + + find_token_in_message.assert_called_once_with(self.msg) + take_action.assert_awaited_once_with(cog, self.msg, found_token) + + @autospec(TokenRemover, "find_token_in_message", "take_action") + async def test_on_message_skips_missing_token(self, find_token_in_message, take_action): + """Shouldn't take action if a valid token isn't found when a message is sent.""" + cog = TokenRemover(self.bot) + find_token_in_message.return_value = False + + await cog.on_message(self.msg) + + find_token_in_message.assert_called_once_with(self.msg) + take_action.assert_not_awaited() + + @autospec(TokenRemover, "find_token_in_message") + async def test_on_message_ignores_dms_bots(self, find_token_in_message): + """Shouldn't parse a message if it is a DM or authored by a bot.""" + cog = TokenRemover(self.bot) + dm_msg = MockMessage(guild=None) + bot_msg = MockMessage(author=MagicMock(bot=True)) + + for msg in (dm_msg, bot_msg): + await cog.on_message(msg) + find_token_in_message.assert_not_called() + + @autospec("bot.exts.filters.token_remover", "TOKEN_RE") + def test_find_token_no_matches(self, token_re): + """None should be returned if the regex matches no tokens in a message.""" + token_re.finditer.return_value = () + + return_value = TokenRemover.find_token_in_message(self.msg) + + self.assertIsNone(return_value) + token_re.finditer.assert_called_once_with(self.msg.content) + + @autospec(TokenRemover, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac") + @autospec("bot.exts.filters.token_remover", "Token") + @autospec("bot.exts.filters.token_remover", "TOKEN_RE") + def test_find_token_valid_match( + self, + token_re, + token_cls, + extract_user_id, + is_valid_timestamp, + is_maybe_valid_hmac, + ): + """The first match with a valid user ID, timestamp, and HMAC should be returned as a `Token`.""" + matches = [ + mock.create_autospec(Match, spec_set=True, instance=True), + mock.create_autospec(Match, spec_set=True, instance=True), + ] + tokens = [ + mock.create_autospec(Token, spec_set=True, instance=True), + mock.create_autospec(Token, spec_set=True, instance=True), + ] + + token_re.finditer.return_value = matches + token_cls.side_effect = tokens + extract_user_id.side_effect = (None, True) # The 1st match will be invalid, 2nd one valid. + is_valid_timestamp.return_value = True + is_maybe_valid_hmac.return_value = True + + return_value = TokenRemover.find_token_in_message(self.msg) + + self.assertEqual(tokens[1], return_value) + token_re.finditer.assert_called_once_with(self.msg.content) + + @autospec(TokenRemover, "extract_user_id", "is_valid_timestamp", "is_maybe_valid_hmac") + @autospec("bot.exts.filters.token_remover", "Token") + @autospec("bot.exts.filters.token_remover", "TOKEN_RE") + def test_find_token_invalid_matches( + self, + token_re, + token_cls, + extract_user_id, + is_valid_timestamp, + is_maybe_valid_hmac, + ): + """None should be returned if no matches have valid user IDs, HMACs, and timestamps.""" + token_re.finditer.return_value = [mock.create_autospec(Match, spec_set=True, instance=True)] + token_cls.return_value = mock.create_autospec(Token, spec_set=True, instance=True) + extract_user_id.return_value = None + is_valid_timestamp.return_value = False + is_maybe_valid_hmac.return_value = False + + return_value = TokenRemover.find_token_in_message(self.msg) + + self.assertIsNone(return_value) + token_re.finditer.assert_called_once_with(self.msg.content) + + def test_regex_invalid_tokens(self): + """Messages without anything looking like a token are not matched.""" + tokens = ( + "", + "lemon wins", + "..", + "x.y", + "x.y.", + ".y.z", + ".y.", + "..z", + "x..z", + " . . ", + "\n.\n.\n", + "hellö.world.bye", + "base64.nötbåse64.morebase64", + "19jd3J.dfkm3d.€víł§tüff", + ) + + for token in tokens: + with self.subTest(token=token): + results = token_remover.TOKEN_RE.findall(token) + self.assertEqual(len(results), 0) + + def test_regex_valid_tokens(self): + """Messages that look like tokens should be matched.""" + # Don't worry, these tokens have been invalidated. + tokens = ( + "NDcyMjY1OTQzMDYy_DEzMz-y.XsyRkw.VXmErH7j511turNpfURmb0rVNm8", + "NDcyMjY1OTQzMDYyNDEzMzMy.Xrim9Q.Ysnu2wacjaKs7qnoo46S8Dm2us8", + "NDc1MDczNjI5Mzk5NTQ3OTA0.XsyR-w.sJf6omBPORBPju3WJEIAcwW9Zds", + "NDY3MjIzMjMwNjUwNzc3NjQx.XsySD_.s45jqDV_Iisn-symw0yDRrk_jf4", + ) + + for token in tokens: + with self.subTest(token=token): + results = token_remover.TOKEN_RE.fullmatch(token) + self.assertIsNotNone(results, f"{token} was not matched by the regex") + + def test_regex_matches_multiple_valid(self): + """Should support multiple matches in the middle of a string.""" + token_1 = "NDY3MjIzMjMwNjUwNzc3NjQx.XsyWGg.uFNEQPCc4ePwGh7egG8UicQssz8" + token_2 = "NDcyMjY1OTQzMDYyNDEzMzMy.XsyWMw.l8XPnDqb0lp-EiQ2g_0xVFT1pyc" + message = f"garbage {token_1} hello {token_2} world" + + results = token_remover.TOKEN_RE.finditer(message) + results = [match[0] for match in results] + self.assertCountEqual((token_1, token_2), results) + + @autospec("bot.exts.filters.token_remover", "LOG_MESSAGE") + def test_format_log_message(self, log_message): + """Should correctly format the log message with info from the message and token.""" + token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + log_message.format.return_value = "Howdy" + + return_value = TokenRemover.format_log_message(self.msg, token) + + self.assertEqual(return_value, log_message.format.return_value) + log_message.format.assert_called_once_with( + author=format_user(self.msg.author), + channel=self.msg.channel.mention, + user_id=token.user_id, + timestamp=token.timestamp, + hmac="x" * len(token.hmac), + ) + + @autospec("bot.exts.filters.token_remover", "UNKNOWN_USER_LOG_MESSAGE") + def test_format_userid_log_message_unknown(self, unknown_user_log_message): + """Should correctly format the user ID portion when the actual user it belongs to is unknown.""" + token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + unknown_user_log_message.format.return_value = " Partner" + msg = MockMessage(id=555, content="hello world") + msg.guild.get_member.return_value = None + + return_value = TokenRemover.format_userid_log_message(msg, token) + + self.assertEqual(return_value, (unknown_user_log_message.format.return_value, False)) + unknown_user_log_message.format.assert_called_once_with(user_id=472265943062413332) + + @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE") + def test_format_userid_log_message_bot(self, known_user_log_message): + """Should correctly format the user ID portion when the ID belongs to a known bot.""" + token = Token("NDcyMjY1OTQzMDYyNDEzMzMy", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + known_user_log_message.format.return_value = " Partner" + msg = MockMessage(id=555, content="hello world") + msg.guild.get_member.return_value.__str__.return_value = "Sam" + msg.guild.get_member.return_value.bot = True + + return_value = TokenRemover.format_userid_log_message(msg, token) + + self.assertEqual(return_value, (known_user_log_message.format.return_value, False)) + + known_user_log_message.format.assert_called_once_with( + user_id=472265943062413332, + user_name="Sam", + kind="BOT", + ) + + @autospec("bot.exts.filters.token_remover", "KNOWN_USER_LOG_MESSAGE") + def test_format_log_message_user_token_user(self, user_token_message): + """Should correctly format the user ID portion when the ID belongs to a known user.""" + token = Token("NDY3MjIzMjMwNjUwNzc3NjQx", "XsySD_", "s45jqDV_Iisn-symw0yDRrk_jf4") + user_token_message.format.return_value = "Partner" + + return_value = TokenRemover.format_userid_log_message(self.msg, token) + + self.assertEqual(return_value, (user_token_message.format.return_value, True)) + user_token_message.format.assert_called_once_with( + user_id=467223230650777641, + user_name="Woody", + kind="USER", + ) + + @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) + @autospec("bot.exts.filters.token_remover", "log") + @autospec(TokenRemover, "format_log_message", "format_userid_log_message") + async def test_take_action(self, format_log_message, format_userid_log_message, logger, mod_log_property): + """Should delete the message and send a mod log.""" + cog = TokenRemover(self.bot) + mod_log = mock.create_autospec(ModLog, spec_set=True, instance=True) + token = mock.create_autospec(Token, spec_set=True, instance=True) + token.user_id = "no-id" + log_msg = "testing123" + userid_log_message = "userid-log-message" + + mod_log_property.return_value = mod_log + format_log_message.return_value = log_msg + format_userid_log_message.return_value = (userid_log_message, True) + + await cog.take_action(self.msg, token) + + self.msg.delete.assert_called_once_with() + self.msg.channel.send.assert_called_once_with( + token_remover.DELETION_MESSAGE_TEMPLATE.format(mention=self.msg.author.mention) + ) + + format_log_message.assert_called_once_with(self.msg, token) + format_userid_log_message.assert_called_once_with(self.msg, token) + logger.debug.assert_called_with(log_msg) + self.bot.stats.incr.assert_called_once_with("tokens.removed_tokens") + + mod_log.ignore.assert_called_once_with(constants.Event.message_delete, self.msg.id) + mod_log.send_log_message.assert_called_once_with( + icon_url=constants.Icons.token_removed, + colour=Colour(constants.Colours.soft_red), + title="Token removed!", + text=log_msg + "\n" + userid_log_message, + thumbnail=self.msg.author.avatar_url_as.return_value, + channel_id=constants.Channels.mod_alerts, + ping_everyone=True, + ) + + @mock.patch.object(TokenRemover, "mod_log", new_callable=mock.PropertyMock) + async def test_take_action_delete_failure(self, mod_log_property): + """Shouldn't send any messages if the token message can't be deleted.""" + cog = TokenRemover(self.bot) + mod_log_property.return_value = mock.create_autospec(ModLog, spec_set=True, instance=True) + self.msg.delete.side_effect = NotFound(MagicMock(), MagicMock()) + + token = mock.create_autospec(Token, spec_set=True, instance=True) + await cog.take_action(self.msg, token) + + self.msg.delete.assert_called_once_with() + self.msg.channel.send.assert_not_awaited() + + +class TokenRemoverExtensionTests(unittest.TestCase): + """Tests for the token_remover extension.""" + + @autospec("bot.exts.filters.token_remover", "TokenRemover") + def test_extension_setup(self, cog): + """The TokenRemover cog should be added.""" + bot = MockBot() + token_remover.setup(bot) + + cog.assert_called_once_with(bot) + bot.add_cog.assert_called_once() + self.assertTrue(isinstance(bot.add_cog.call_args.args[0], TokenRemover)) diff --git a/tests/bot/exts/info/__init__.py b/tests/bot/exts/info/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/info/__init__.py diff --git a/tests/bot/cogs/test_information.py b/tests/bot/exts/info/test_information.py index 4496a2ae0..d077be960 100644 --- a/tests/bot/cogs/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -1,4 +1,3 @@ -import asyncio import textwrap import unittest import unittest.mock @@ -6,20 +5,19 @@ import unittest.mock import discord from bot import constants -from bot.cogs import information -from bot.decorators import InChannelCheckFailure +from bot.exts.info import information +from bot.utils.checks import InWhitelistCheckFailure from tests import helpers +COG_PATH = "bot.exts.info.information.Information" -COG_PATH = "bot.cogs.information.Information" - -class InformationCogTests(unittest.TestCase): +class InformationCogTests(unittest.IsolatedAsyncioTestCase): """Tests the Information cog.""" @classmethod def setUpClass(cls): - cls.moderator_role = helpers.MockRole(name="Moderator", id=constants.Roles.moderator) + cls.moderator_role = helpers.MockRole(name="Moderator", id=constants.Roles.moderators) def setUp(self): """Sets up fresh objects for each test.""" @@ -30,27 +28,24 @@ class InformationCogTests(unittest.TestCase): self.ctx = helpers.MockContext() self.ctx.author.roles.append(self.moderator_role) - def test_roles_command_command(self): + async def test_roles_command_command(self): """Test if the `role_info` command correctly returns the `moderator_role`.""" self.ctx.guild.roles.append(self.moderator_role) - self.cog.roles_info.can_run = helpers.AsyncMock() + self.cog.roles_info.can_run = unittest.mock.AsyncMock() self.cog.roles_info.can_run.return_value = True - coroutine = self.cog.roles_info.callback(self.cog, self.ctx) - - self.assertIsNone(asyncio.run(coroutine)) + self.assertIsNone(await self.cog.roles_info(self.cog, self.ctx)) self.ctx.send.assert_called_once() _, kwargs = self.ctx.send.call_args embed = kwargs.pop('embed') - self.assertEqual(embed.title, "Role information") + self.assertEqual(embed.title, "Role information (Total 1 role)") self.assertEqual(embed.colour, discord.Colour.blurple()) - self.assertEqual(embed.description, f"`{self.moderator_role.id}` - {self.moderator_role.mention}\n") - self.assertEqual(embed.footer.text, "Total roles: 1") + self.assertEqual(embed.description, f"\n`{self.moderator_role.id}` - {self.moderator_role.mention}\n") - def test_role_info_command(self): + async def test_role_info_command(self): """Tests the `role info` command.""" dummy_role = helpers.MockRole( name="Dummy", @@ -72,12 +67,10 @@ class InformationCogTests(unittest.TestCase): self.ctx.guild.roles.append([dummy_role, admin_role]) - self.cog.role_info.can_run = helpers.AsyncMock() + self.cog.role_info.can_run = unittest.mock.AsyncMock() self.cog.role_info.can_run.return_value = True - coroutine = self.cog.role_info.callback(self.cog, self.ctx, dummy_role, admin_role) - - self.assertIsNone(asyncio.run(coroutine)) + self.assertIsNone(await self.cog.role_info(self.cog, self.ctx, dummy_role, admin_role)) self.assertEqual(self.ctx.send.call_count, 2) @@ -99,86 +92,18 @@ class InformationCogTests(unittest.TestCase): self.assertEqual(admin_embed.title, "Admins info") self.assertEqual(admin_embed.colour, discord.Colour.red()) - @unittest.mock.patch('bot.cogs.information.time_since') - def test_server_info_command(self, time_since_patch): - time_since_patch.return_value = '2 days ago' - - self.ctx.guild = helpers.MockGuild( - features=('lemons', 'apples'), - region="The Moon", - roles=[self.moderator_role], - channels=[ - discord.TextChannel( - state={}, - guild=self.ctx.guild, - data={'id': 42, 'name': 'lemons-offering', 'position': 22, 'type': 'text'} - ), - discord.CategoryChannel( - state={}, - guild=self.ctx.guild, - data={'id': 5125, 'name': 'the-lemon-collection', 'position': 22, 'type': 'category'} - ), - discord.VoiceChannel( - state={}, - guild=self.ctx.guild, - data={'id': 15290, 'name': 'listen-to-lemon', 'position': 22, 'type': 'voice'} - ) - ], - members=[ - *(helpers.MockMember(status='online') for _ in range(2)), - *(helpers.MockMember(status='idle') for _ in range(1)), - *(helpers.MockMember(status='dnd') for _ in range(4)), - *(helpers.MockMember(status='offline') for _ in range(3)), - ], - member_count=1_234, - icon_url='a-lemon.jpg', - ) - - coroutine = self.cog.server_info.callback(self.cog, self.ctx) - self.assertIsNone(asyncio.run(coroutine)) - time_since_patch.assert_called_once_with(self.ctx.guild.created_at, precision='days') - _, kwargs = self.ctx.send.call_args - embed = kwargs.pop('embed') - self.assertEqual(embed.colour, discord.Colour.blurple()) - self.assertEqual( - embed.description, - textwrap.dedent( - f""" - **Server information** - Created: {time_since_patch.return_value} - Voice region: {self.ctx.guild.region} - Features: {', '.join(self.ctx.guild.features)} - - **Counts** - Members: {self.ctx.guild.member_count:,} - Roles: {len(self.ctx.guild.roles)} - Text: 1 - Voice: 1 - Channel categories: 1 - - **Members** - {constants.Emojis.status_online} 2 - {constants.Emojis.status_idle} 1 - {constants.Emojis.status_dnd} 4 - {constants.Emojis.status_offline} 3 - """ - ) - ) - self.assertEqual(embed.thumbnail.url, 'a-lemon.jpg') - - -class UserInfractionHelperMethodTests(unittest.TestCase): +class UserInfractionHelperMethodTests(unittest.IsolatedAsyncioTestCase): """Tests for the helper methods of the `!user` command.""" def setUp(self): """Common set-up steps done before for each test.""" self.bot = helpers.MockBot() - self.bot.api_client.get = helpers.AsyncMock() + self.bot.api_client.get = unittest.mock.AsyncMock() self.cog = information.Information(self.bot) self.member = helpers.MockMember(id=1234) - def test_user_command_helper_method_get_requests(self): + async def test_user_command_helper_method_get_requests(self): """The helper methods should form the correct get requests.""" test_values = ( { @@ -200,11 +125,11 @@ class UserInfractionHelperMethodTests(unittest.TestCase): endpoint, params = test_value["expected_args"] with self.subTest(method=helper_method, endpoint=endpoint, params=params): - asyncio.run(helper_method(self.member)) + await helper_method(self.member) self.bot.api_client.get.assert_called_once_with(endpoint, params=params) self.bot.api_client.get.reset_mock() - def _method_subtests(self, method, test_values, default_header): + async def _method_subtests(self, method, test_values, default_header): """Helper method that runs the subtests for the different helper methods.""" for test_value in test_values: api_response = test_value["api response"] @@ -213,12 +138,12 @@ class UserInfractionHelperMethodTests(unittest.TestCase): with self.subTest(method=method, api_response=api_response, expected_lines=expected_lines): self.bot.api_client.get.return_value = api_response - expected_output = "\n".join(default_header + expected_lines) - actual_output = asyncio.run(method(self.member)) + expected_output = "\n".join(expected_lines) + actual_output = await method(self.member) - self.assertEqual(expected_output, actual_output) + self.assertEqual((default_header, expected_output), actual_output) - def test_basic_user_infraction_counts_returns_correct_strings(self): + async def test_basic_user_infraction_counts_returns_correct_strings(self): """The method should correctly list both the total and active number of non-hidden infractions.""" test_values = ( # No infractions means zero counts @@ -247,16 +172,16 @@ class UserInfractionHelperMethodTests(unittest.TestCase): }, ) - header = ["**Infractions**"] + header = "Infractions" - self._method_subtests(self.cog.basic_user_infraction_counts, test_values, header) + await self._method_subtests(self.cog.basic_user_infraction_counts, test_values, header) - def test_expanded_user_infraction_counts_returns_correct_strings(self): + async def test_expanded_user_infraction_counts_returns_correct_strings(self): """The method should correctly list the total and active number of all infractions split by infraction type.""" test_values = ( { "api response": [], - "expected_lines": ["This user has never received an infraction."], + "expected_lines": ["No infractions"], }, # Shows non-hidden inactive infraction as expected { @@ -302,24 +227,24 @@ class UserInfractionHelperMethodTests(unittest.TestCase): }, ) - header = ["**Infractions**"] + header = "Infractions" - self._method_subtests(self.cog.expanded_user_infraction_counts, test_values, header) + await self._method_subtests(self.cog.expanded_user_infraction_counts, test_values, header) - def test_user_nomination_counts_returns_correct_strings(self): + async def test_user_nomination_counts_returns_correct_strings(self): """The method should list the number of active and historical nominations for the user.""" test_values = ( { "api response": [], - "expected_lines": ["This user has never been nominated."], + "expected_lines": ["No nominations"], }, { "api response": [{'active': True}], - "expected_lines": ["This user is **currently** nominated (1 nomination in total)."], + "expected_lines": ["This user is **currently** nominated", "(1 nomination in total)"], }, { "api response": [{'active': True}, {'active': False}], - "expected_lines": ["This user is **currently** nominated (2 nominations in total)."], + "expected_lines": ["This user is **currently** nominated", "(2 nominations in total)"], }, { "api response": [{'active': False}], @@ -332,48 +257,57 @@ class UserInfractionHelperMethodTests(unittest.TestCase): ) - header = ["**Nominations**"] + header = "Nominations" - self._method_subtests(self.cog.user_nomination_counts, test_values, header) + await self._method_subtests(self.cog.user_nomination_counts, test_values, header) [email protected]("bot.cogs.information.time_since", new=unittest.mock.MagicMock(return_value="1 year ago")) [email protected]("bot.cogs.information.constants.MODERATION_CHANNELS", new=[50]) -class UserEmbedTests(unittest.TestCase): [email protected]("bot.exts.info.information.time_since", new=unittest.mock.MagicMock(return_value="1 year ago")) [email protected]("bot.exts.info.information.constants.MODERATION_CHANNELS", new=[50]) +class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """Tests for the creation of the `!user` embed.""" def setUp(self): """Common set-up steps done before for each test.""" self.bot = helpers.MockBot() - self.bot.api_client.get = helpers.AsyncMock() + self.bot.api_client.get = unittest.mock.AsyncMock() self.cog = information.Information(self.bot) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) - def test_create_user_embed_uses_string_representation_of_user_in_title_if_nick_is_not_available(self): + @unittest.mock.patch( + f"{COG_PATH}.basic_user_infraction_counts", + new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) + ) + async def test_create_user_embed_uses_string_representation_of_user_in_title_if_nick_is_not_available(self): """The embed should use the string representation of the user if they don't have a nick.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() user.nick = None user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.title, "Mr. Hemlock") - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) - def test_create_user_embed_uses_nick_in_title_if_available(self): + @unittest.mock.patch( + f"{COG_PATH}.basic_user_infraction_counts", + new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) + ) + async def test_create_user_embed_uses_nick_in_title_if_available(self): """The embed should use the nick if it's available.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() user.nick = "Cat lover" user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.title, "Cat lover (Mr. Hemlock)") - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) - def test_create_user_embed_ignores_everyone_role(self): + @unittest.mock.patch( + f"{COG_PATH}.basic_user_infraction_counts", + new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) + ) + async def test_create_user_embed_ignores_everyone_role(self): """Created `!user` embeds should not contain mention of the @everyone-role.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) admins_role = helpers.MockRole(name='Admins') @@ -382,80 +316,93 @@ class UserEmbedTests(unittest.TestCase): # A `MockMember` has the @Everyone role by default; we add the Admins to that. user = helpers.MockMember(roles=[admins_role], top_role=admins_role) - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) - self.assertIn("&Admins", embed.description) - self.assertNotIn("&Everyone", embed.description) + self.assertIn("&Admins", embed.fields[1].value) + self.assertNotIn("&Everyone", embed.fields[1].value) - @unittest.mock.patch(f"{COG_PATH}.expanded_user_infraction_counts", new_callable=helpers.AsyncMock) - @unittest.mock.patch(f"{COG_PATH}.user_nomination_counts", new_callable=helpers.AsyncMock) - def test_create_user_embed_expanded_information_in_moderation_channels(self, nomination_counts, infraction_counts): + @unittest.mock.patch(f"{COG_PATH}.expanded_user_infraction_counts", new_callable=unittest.mock.AsyncMock) + @unittest.mock.patch(f"{COG_PATH}.user_nomination_counts", new_callable=unittest.mock.AsyncMock) + async def test_create_user_embed_expanded_information_in_moderation_channels( + self, + nomination_counts, + infraction_counts + ): """The embed should contain expanded infractions and nomination info in mod channels.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=50)) moderators_role = helpers.MockRole(name='Moderators') moderators_role.colour = 100 - infraction_counts.return_value = "expanded infractions info" - nomination_counts.return_value = "nomination info" + infraction_counts.return_value = ("Infractions", "expanded infractions info") + nomination_counts.return_value = ("Nominations", "nomination info") user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) nomination_counts.assert_called_once_with(user) self.assertEqual( textwrap.dedent(f""" - **User Information** Created: {"1 year ago"} Profile: {user.mention} ID: {user.id} + """).strip(), + embed.fields[0].value + ) - **Member Information** + self.assertEqual( + textwrap.dedent(f""" Joined: {"1 year ago"} + Verified: {"True"} Roles: &Moderators - - expanded infractions info - - nomination info """).strip(), - embed.description + embed.fields[1].value ) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new_callable=helpers.AsyncMock) - def test_create_user_embed_basic_information_outside_of_moderation_channels(self, infraction_counts): + @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new_callable=unittest.mock.AsyncMock) + async def test_create_user_embed_basic_information_outside_of_moderation_channels(self, infraction_counts): """The embed should contain only basic infraction data outside of mod channels.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=100)) moderators_role = helpers.MockRole(name='Moderators') moderators_role.colour = 100 - infraction_counts.return_value = "basic infractions info" + infraction_counts.return_value = ("Infractions", "basic infractions info") user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) self.assertEqual( textwrap.dedent(f""" - **User Information** Created: {"1 year ago"} Profile: {user.mention} ID: {user.id} + """).strip(), + embed.fields[0].value + ) - **Member Information** + self.assertEqual( + textwrap.dedent(f""" Joined: {"1 year ago"} Roles: &Moderators - - basic infractions info """).strip(), - embed.description + embed.fields[1].value + ) + + self.assertEqual( + "basic infractions info", + embed.fields[2].value ) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) - def test_create_user_embed_uses_top_role_colour_when_user_has_roles(self): + @unittest.mock.patch( + f"{COG_PATH}.basic_user_infraction_counts", + new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) + ) + async def test_create_user_embed_uses_top_role_colour_when_user_has_roles(self): """The embed should be created with the colour of the top role, if a top role is available.""" ctx = helpers.MockContext() @@ -463,35 +410,41 @@ class UserEmbedTests(unittest.TestCase): moderators_role.colour = 100 user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.colour, discord.Colour(moderators_role.colour)) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) - def test_create_user_embed_uses_blurple_colour_when_user_has_no_roles(self): + @unittest.mock.patch( + f"{COG_PATH}.basic_user_infraction_counts", + new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) + ) + async def test_create_user_embed_uses_blurple_colour_when_user_has_no_roles(self): """The embed should be created with a blurple colour if the user has no assigned roles.""" ctx = helpers.MockContext() user = helpers.MockMember(id=217) - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.colour, discord.Colour.blurple()) - @unittest.mock.patch(f"{COG_PATH}.basic_user_infraction_counts", new=helpers.AsyncMock(return_value="")) - def test_create_user_embed_uses_png_format_of_user_avatar_as_thumbnail(self): + @unittest.mock.patch( + f"{COG_PATH}.basic_user_infraction_counts", + new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) + ) + async def test_create_user_embed_uses_png_format_of_user_avatar_as_thumbnail(self): """The embed thumbnail should be set to the user's avatar in `png` format.""" ctx = helpers.MockContext() user = helpers.MockMember(id=217) user.avatar_url_as.return_value = "avatar url" - embed = asyncio.run(self.cog.create_user_embed(ctx, user)) + embed = await self.cog.create_user_embed(ctx, user) - user.avatar_url_as.assert_called_once_with(format="png") + user.avatar_url_as.assert_called_once_with(static_format="png") self.assertEqual(embed.thumbnail.url, "avatar url") [email protected]("bot.cogs.information.constants") -class UserCommandTests(unittest.TestCase): [email protected]("bot.exts.info.information.constants") +class UserCommandTests(unittest.IsolatedAsyncioTestCase): """Tests for the `!user` command.""" def setUp(self): @@ -507,76 +460,70 @@ class UserCommandTests(unittest.TestCase): self.moderator = helpers.MockMember(id=2, name="riffautae", roles=[self.moderator_role]) self.target = helpers.MockMember(id=3, name="__fluzz__") - def test_regular_member_cannot_target_another_member(self, constants): + # There's no way to mock the channel constant without deferring imports. The constant is + # used as a default value for a parameter, which gets defined upon import. + self.bot_command_channel = helpers.MockTextChannel(id=constants.Channels.bot_commands) + + async def test_regular_member_cannot_target_another_member(self, constants): """A regular user should not be able to use `!user` targeting another user.""" constants.MODERATION_ROLES = [self.moderator_role.id] - ctx = helpers.MockContext(author=self.author) - asyncio.run(self.cog.user_info.callback(self.cog, ctx, self.target)) + await self.cog.user_info(self.cog, ctx, self.target) ctx.send.assert_called_once_with("You may not use this command on users other than yourself.") - def test_regular_member_cannot_use_command_outside_of_bot_commands(self, constants): + async def test_regular_member_cannot_use_command_outside_of_bot_commands(self, constants): """A regular user should not be able to use this command outside of bot-commands.""" constants.MODERATION_ROLES = [self.moderator_role.id] constants.STAFF_ROLES = [self.moderator_role.id] - constants.Channels.bot = 50 - ctx = helpers.MockContext(author=self.author, channel=helpers.MockTextChannel(id=100)) msg = "Sorry, but you may only use this command within <#50>." - with self.assertRaises(InChannelCheckFailure, msg=msg): - asyncio.run(self.cog.user_info.callback(self.cog, ctx)) + with self.assertRaises(InWhitelistCheckFailure, msg=msg): + await self.cog.user_info(self.cog, ctx) - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) - def test_regular_user_may_use_command_in_bot_commands_channel(self, create_embed, constants): + @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") + async def test_regular_user_may_use_command_in_bot_commands_channel(self, create_embed, constants): """A regular user should be allowed to use `!user` targeting themselves in bot-commands.""" constants.STAFF_ROLES = [self.moderator_role.id] - constants.Channels.bot = 50 + ctx = helpers.MockContext(author=self.author, channel=self.bot_command_channel) - ctx = helpers.MockContext(author=self.author, channel=helpers.MockTextChannel(id=50)) - - asyncio.run(self.cog.user_info.callback(self.cog, ctx)) + await self.cog.user_info(self.cog, ctx) create_embed.assert_called_once_with(ctx, self.author) ctx.send.assert_called_once() - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) - def test_regular_user_can_explicitly_target_themselves(self, create_embed, constants): + @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") + async def test_regular_user_can_explicitly_target_themselves(self, create_embed, _): """A user should target itself with `!user` when a `user` argument was not provided.""" constants.STAFF_ROLES = [self.moderator_role.id] - constants.Channels.bot = 50 - - ctx = helpers.MockContext(author=self.author, channel=helpers.MockTextChannel(id=50)) + ctx = helpers.MockContext(author=self.author, channel=self.bot_command_channel) - asyncio.run(self.cog.user_info.callback(self.cog, ctx, self.author)) + await self.cog.user_info(self.cog, ctx, self.author) create_embed.assert_called_once_with(ctx, self.author) ctx.send.assert_called_once() - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) - def test_staff_members_can_bypass_channel_restriction(self, create_embed, constants): + @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") + async def test_staff_members_can_bypass_channel_restriction(self, create_embed, constants): """Staff members should be able to bypass the bot-commands channel restriction.""" constants.STAFF_ROLES = [self.moderator_role.id] - constants.Channels.bot = 50 - ctx = helpers.MockContext(author=self.moderator, channel=helpers.MockTextChannel(id=200)) - asyncio.run(self.cog.user_info.callback(self.cog, ctx)) + await self.cog.user_info(self.cog, ctx) create_embed.assert_called_once_with(ctx, self.moderator) ctx.send.assert_called_once() - @unittest.mock.patch("bot.cogs.information.Information.create_user_embed", new_callable=helpers.AsyncMock) - def test_moderators_can_target_another_member(self, create_embed, constants): + @unittest.mock.patch("bot.exts.info.information.Information.create_user_embed") + async def test_moderators_can_target_another_member(self, create_embed, constants): """A moderator should be able to use `!user` targeting another user.""" constants.MODERATION_ROLES = [self.moderator_role.id] constants.STAFF_ROLES = [self.moderator_role.id] - ctx = helpers.MockContext(author=self.moderator, channel=helpers.MockTextChannel(id=50)) - asyncio.run(self.cog.user_info.callback(self.cog, ctx, self.target)) + await self.cog.user_info(self.cog, ctx, self.target) create_embed.assert_called_once_with(ctx, self.target) ctx.send.assert_called_once() diff --git a/tests/bot/exts/moderation/__init__.py b/tests/bot/exts/moderation/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/moderation/__init__.py diff --git a/tests/bot/exts/moderation/infraction/__init__.py b/tests/bot/exts/moderation/infraction/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/moderation/infraction/__init__.py diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py new file mode 100644 index 000000000..bf557a484 --- /dev/null +++ b/tests/bot/exts/moderation/infraction/test_infractions.py @@ -0,0 +1,201 @@ +import textwrap +import unittest +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +from bot.constants import Event +from bot.exts.moderation.infraction.infractions import Infractions +from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockRole + + +class TruncationTests(unittest.IsolatedAsyncioTestCase): + """Tests for ban and kick command reason truncation.""" + + def setUp(self): + self.bot = MockBot() + self.cog = Infractions(self.bot) + self.user = MockMember(id=1234, top_role=MockRole(id=3577, position=10)) + self.target = MockMember(id=1265, top_role=MockRole(id=9876, position=0)) + self.guild = MockGuild(id=4567) + self.ctx = MockContext(bot=self.bot, author=self.user, guild=self.guild) + + @patch("bot.exts.moderation.infraction._utils.get_active_infraction") + @patch("bot.exts.moderation.infraction._utils.post_infraction") + async def test_apply_ban_reason_truncation(self, post_infraction_mock, get_active_mock): + """Should truncate reason for `ctx.guild.ban`.""" + get_active_mock.return_value = None + post_infraction_mock.return_value = {"foo": "bar"} + + self.cog.apply_infraction = AsyncMock() + self.bot.get_cog.return_value = AsyncMock() + self.cog.mod_log.ignore = Mock() + self.ctx.guild.ban = Mock() + + await self.cog.apply_ban(self.ctx, self.target, "foo bar" * 3000) + self.ctx.guild.ban.assert_called_once_with( + self.target, + reason=textwrap.shorten("foo bar" * 3000, 512, placeholder="..."), + delete_message_days=0 + ) + self.cog.apply_infraction.assert_awaited_once_with( + self.ctx, {"foo": "bar"}, self.target, self.ctx.guild.ban.return_value + ) + + @patch("bot.exts.moderation.infraction._utils.post_infraction") + async def test_apply_kick_reason_truncation(self, post_infraction_mock): + """Should truncate reason for `Member.kick`.""" + post_infraction_mock.return_value = {"foo": "bar"} + + self.cog.apply_infraction = AsyncMock() + self.cog.mod_log.ignore = Mock() + self.target.kick = Mock() + + await self.cog.apply_kick(self.ctx, self.target, "foo bar" * 3000) + self.target.kick.assert_called_once_with(reason=textwrap.shorten("foo bar" * 3000, 512, placeholder="...")) + self.cog.apply_infraction.assert_awaited_once_with( + self.ctx, {"foo": "bar"}, self.target, self.target.kick.return_value + ) + + +@patch("bot.exts.moderation.infraction.infractions.constants.Roles.voice_verified", new=123456) +class VoiceBanTests(unittest.IsolatedAsyncioTestCase): + """Tests for voice ban related functions and commands.""" + + def setUp(self): + self.bot = MockBot() + self.mod = MockMember(top_role=10) + self.user = MockMember(top_role=1, roles=[MockRole(id=123456)]) + self.guild = MockGuild() + self.ctx = MockContext(bot=self.bot, author=self.mod) + self.cog = Infractions(self.bot) + + async def test_permanent_voice_ban(self): + """Should call voice ban applying function without expiry.""" + self.cog.apply_voice_ban = AsyncMock() + self.assertIsNone(await self.cog.voiceban(self.cog, self.ctx, self.user, reason="foobar")) + self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar") + + async def test_temporary_voice_ban(self): + """Should call voice ban applying function with expiry.""" + self.cog.apply_voice_ban = AsyncMock() + self.assertIsNone(await self.cog.tempvoiceban(self.cog, self.ctx, self.user, "baz", reason="foobar")) + self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at="baz") + + async def test_voice_unban(self): + """Should call infraction pardoning function.""" + self.cog.pardon_infraction = AsyncMock() + self.assertIsNone(await self.cog.unvoiceban(self.cog, self.ctx, self.user)) + self.cog.pardon_infraction.assert_awaited_once_with(self.ctx, "voice_ban", self.user) + + @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") + @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") + async def test_voice_ban_user_have_active_infraction(self, get_active_infraction, post_infraction_mock): + """Should return early when user already have Voice Ban infraction.""" + get_active_infraction.return_value = {"foo": "bar"} + self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar")) + get_active_infraction.assert_awaited_once_with(self.ctx, self.user, "voice_ban") + post_infraction_mock.assert_not_awaited() + + @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") + @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") + async def test_voice_ban_infraction_post_failed(self, get_active_infraction, post_infraction_mock): + """Should return early when posting infraction fails.""" + self.cog.mod_log.ignore = MagicMock() + get_active_infraction.return_value = None + post_infraction_mock.return_value = None + self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar")) + post_infraction_mock.assert_awaited_once() + self.cog.mod_log.ignore.assert_not_called() + + @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") + @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") + async def test_voice_ban_infraction_post_add_kwargs(self, get_active_infraction, post_infraction_mock): + """Should pass all kwargs passed to apply_voice_ban to post_infraction.""" + get_active_infraction.return_value = None + # We don't want that this continue yet + post_infraction_mock.return_value = None + self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar", my_kwarg=23)) + post_infraction_mock.assert_awaited_once_with( + self.ctx, self.user, "voice_ban", "foobar", active=True, my_kwarg=23 + ) + + @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") + @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") + async def test_voice_ban_mod_log_ignore(self, get_active_infraction, post_infraction_mock): + """Should ignore Voice Verified role removing.""" + self.cog.mod_log.ignore = MagicMock() + self.cog.apply_infraction = AsyncMock() + self.user.remove_roles = MagicMock(return_value="my_return_value") + + get_active_infraction.return_value = None + post_infraction_mock.return_value = {"foo": "bar"} + + self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar")) + self.cog.mod_log.ignore.assert_called_once_with(Event.member_update, self.user.id) + + @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") + @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") + async def test_voice_ban_apply_infraction(self, get_active_infraction, post_infraction_mock): + """Should ignore Voice Verified role removing.""" + self.cog.mod_log.ignore = MagicMock() + self.cog.apply_infraction = AsyncMock() + self.user.remove_roles = MagicMock(return_value="my_return_value") + + get_active_infraction.return_value = None + post_infraction_mock.return_value = {"foo": "bar"} + + self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar")) + self.user.remove_roles.assert_called_once_with(self.cog._voice_verified_role, reason="foobar") + self.cog.apply_infraction.assert_awaited_once_with(self.ctx, {"foo": "bar"}, self.user, "my_return_value") + + @patch("bot.exts.moderation.infraction.infractions._utils.post_infraction") + @patch("bot.exts.moderation.infraction.infractions._utils.get_active_infraction") + async def test_voice_ban_truncate_reason(self, get_active_infraction, post_infraction_mock): + """Should truncate reason for voice ban.""" + self.cog.mod_log.ignore = MagicMock() + self.cog.apply_infraction = AsyncMock() + self.user.remove_roles = MagicMock(return_value="my_return_value") + + get_active_infraction.return_value = None + post_infraction_mock.return_value = {"foo": "bar"} + + self.assertIsNone(await self.cog.apply_voice_ban(self.ctx, self.user, "foobar" * 3000)) + self.user.remove_roles.assert_called_once_with( + self.cog._voice_verified_role, reason=textwrap.shorten("foobar" * 3000, 512, placeholder="...") + ) + self.cog.apply_infraction.assert_awaited_once_with(self.ctx, {"foo": "bar"}, self.user, "my_return_value") + + async def test_voice_unban_user_not_found(self): + """Should include info to return dict when user was not found from guild.""" + self.guild.get_member.return_value = None + result = await self.cog.pardon_voice_ban(self.user.id, self.guild, "foobar") + self.assertEqual(result, {"Info": "User was not found in the guild."}) + + @patch("bot.exts.moderation.infraction.infractions._utils.notify_pardon") + @patch("bot.exts.moderation.infraction.infractions.format_user") + async def test_voice_unban_user_found(self, format_user_mock, notify_pardon_mock): + """Should add role back with ignoring, notify user and return log dictionary..""" + self.guild.get_member.return_value = self.user + notify_pardon_mock.return_value = True + format_user_mock.return_value = "my-user" + + result = await self.cog.pardon_voice_ban(self.user.id, self.guild, "foobar") + self.assertEqual(result, { + "Member": "my-user", + "DM": "Sent" + }) + notify_pardon_mock.assert_awaited_once() + + @patch("bot.exts.moderation.infraction.infractions._utils.notify_pardon") + @patch("bot.exts.moderation.infraction.infractions.format_user") + async def test_voice_unban_dm_fail(self, format_user_mock, notify_pardon_mock): + """Should add role back with ignoring, notify user and return log dictionary..""" + self.guild.get_member.return_value = self.user + notify_pardon_mock.return_value = False + format_user_mock.return_value = "my-user" + + result = await self.cog.pardon_voice_ban(self.user.id, self.guild, "foobar") + self.assertEqual(result, { + "Member": "my-user", + "DM": "**Failed**" + }) + notify_pardon_mock.assert_awaited_once() diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py new file mode 100644 index 000000000..5b62463e0 --- /dev/null +++ b/tests/bot/exts/moderation/infraction/test_utils.py @@ -0,0 +1,359 @@ +import unittest +from collections import namedtuple +from datetime import datetime +from unittest.mock import AsyncMock, MagicMock, call, patch + +from discord import Embed, Forbidden, HTTPException, NotFound + +from bot.api import ResponseCodeError +from bot.constants import Colours, Icons +from bot.exts.moderation.infraction import _utils as utils +from tests.helpers import MockBot, MockContext, MockMember, MockUser + + +class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase): + """Tests Moderation utils.""" + + def setUp(self): + self.bot = MockBot() + self.member = MockMember(id=1234) + self.user = MockUser(id=1234) + self.ctx = MockContext(bot=self.bot, author=self.member) + + async def test_post_user(self): + """Should POST a new user and return the response if successful or otherwise send an error message.""" + user = MockUser(discriminator=5678, id=1234, name="Test user") + not_user = MagicMock(discriminator=3333, id=5678, name="Wrong user") + test_cases = [ + { + "user": user, + "post_result": "bar", + "raise_error": None, + "payload": { + "discriminator": 5678, + "id": self.user.id, + "in_guild": False, + "name": "Test user", + "roles": [] + } + }, + { + "user": self.member, + "post_result": "foo", + "raise_error": ResponseCodeError(MagicMock(status=400), "foo"), + "payload": { + "discriminator": 0, + "id": self.member.id, + "in_guild": False, + "name": "Name unknown", + "roles": [] + } + }, + { + "user": not_user, + "post_result": "bar", + "raise_error": None, + "payload": { + "discriminator": not_user.discriminator, + "id": not_user.id, + "in_guild": False, + "name": not_user.name, + "roles": [] + } + } + ] + + for case in test_cases: + user = case["user"] + post_result = case["post_result"] + raise_error = case["raise_error"] + payload = case["payload"] + + with self.subTest(user=user, post_result=post_result, raise_error=raise_error, payload=payload): + self.bot.api_client.post.reset_mock(side_effect=True) + self.ctx.bot.api_client.post.return_value = post_result + + self.ctx.bot.api_client.post.side_effect = raise_error + + result = await utils.post_user(self.ctx, user) + + if raise_error: + self.assertIsNone(result) + self.ctx.send.assert_awaited_once() + self.assertIn(str(raise_error.status), self.ctx.send.call_args[0][0]) + else: + self.assertEqual(result, post_result) + self.bot.api_client.post.assert_awaited_once_with("bot/users", json=payload) + + async def test_get_active_infraction(self): + """ + Should request the API for active infractions and return infraction if the user has one or `None` otherwise. + + A message should be sent to the context indicating a user already has an infraction, if that's the case. + """ + test_case = namedtuple("test_case", ["get_return_value", "expected_output", "infraction_nr", "send_msg"]) + test_cases = [ + test_case([], None, None, True), + test_case([{"id": 123987}], {"id": 123987}, "123987", False), + test_case([{"id": 123987}], {"id": 123987}, "123987", True) + ] + + for case in test_cases: + with self.subTest(return_value=case.get_return_value, expected=case.expected_output): + self.bot.api_client.get.reset_mock() + self.ctx.send.reset_mock() + + params = { + "active": "true", + "type": "ban", + "user__id": str(self.member.id) + } + + self.bot.api_client.get.return_value = case.get_return_value + + result = await utils.get_active_infraction(self.ctx, self.member, "ban", send_msg=case.send_msg) + self.assertEqual(result, case.expected_output) + self.bot.api_client.get.assert_awaited_once_with("bot/infractions", params=params) + + if case.send_msg and case.get_return_value: + self.ctx.send.assert_awaited_once() + sent_message = self.ctx.send.call_args[0][0] + self.assertIn(case.infraction_nr, sent_message) + self.assertIn("ban", sent_message) + else: + self.ctx.send.assert_not_awaited() + + @patch("bot.exts.moderation.infraction._utils.send_private_embed") + async def test_notify_infraction(self, send_private_embed_mock): + """ + Should send an embed of a certain format as a DM and return `True` if DM successful. + + Appealable infractions should have the appeal message in the embed's footer. + """ + test_cases = [ + { + "args": (self.user, "ban", "2020-02-26 09:20 (23 hours and 59 minutes)"), + "expected_output": Embed( + title=utils.INFRACTION_TITLE, + description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( + type="Ban", + expires="2020-02-26 09:20 (23 hours and 59 minutes)", + reason="No reason provided." + ), + colour=Colours.soft_red, + url=utils.RULES_URL + ).set_author( + name=utils.INFRACTION_AUTHOR_NAME, + url=utils.RULES_URL, + icon_url=Icons.token_removed + ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER), + "send_result": True + }, + { + "args": (self.user, "warning", None, "Test reason."), + "expected_output": Embed( + title=utils.INFRACTION_TITLE, + description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( + type="Warning", + expires="N/A", + reason="Test reason." + ), + colour=Colours.soft_red, + url=utils.RULES_URL + ).set_author( + name=utils.INFRACTION_AUTHOR_NAME, + url=utils.RULES_URL, + icon_url=Icons.token_removed + ), + "send_result": False + }, + { + "args": (self.user, "note", None, None, Icons.defcon_denied), + "expected_output": Embed( + title=utils.INFRACTION_TITLE, + description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( + type="Note", + expires="N/A", + reason="No reason provided." + ), + colour=Colours.soft_red, + url=utils.RULES_URL + ).set_author( + name=utils.INFRACTION_AUTHOR_NAME, + url=utils.RULES_URL, + icon_url=Icons.defcon_denied + ), + "send_result": False + }, + { + "args": (self.user, "mute", "2020-02-26 09:20 (23 hours and 59 minutes)", "Test", Icons.defcon_denied), + "expected_output": Embed( + title=utils.INFRACTION_TITLE, + description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( + type="Mute", + expires="2020-02-26 09:20 (23 hours and 59 minutes)", + reason="Test" + ), + colour=Colours.soft_red, + url=utils.RULES_URL + ).set_author( + name=utils.INFRACTION_AUTHOR_NAME, + url=utils.RULES_URL, + icon_url=Icons.defcon_denied + ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER), + "send_result": False + }, + { + "args": (self.user, "mute", None, "foo bar" * 4000, Icons.defcon_denied), + "expected_output": Embed( + title=utils.INFRACTION_TITLE, + description=utils.INFRACTION_DESCRIPTION_TEMPLATE.format( + type="Mute", + expires="N/A", + reason="foo bar" * 4000 + )[:2045] + "...", + colour=Colours.soft_red, + url=utils.RULES_URL + ).set_author( + name=utils.INFRACTION_AUTHOR_NAME, + url=utils.RULES_URL, + icon_url=Icons.defcon_denied + ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER), + "send_result": True + } + ] + + for case in test_cases: + with self.subTest(args=case["args"], expected=case["expected_output"], send=case["send_result"]): + send_private_embed_mock.reset_mock() + + send_private_embed_mock.return_value = case["send_result"] + result = await utils.notify_infraction(*case["args"]) + + self.assertEqual(case["send_result"], result) + + embed = send_private_embed_mock.call_args[0][1] + + self.assertEqual(embed.to_dict(), case["expected_output"].to_dict()) + + send_private_embed_mock.assert_awaited_once_with(case["args"][0], embed) + + @patch("bot.exts.moderation.infraction._utils.send_private_embed") + async def test_notify_pardon(self, send_private_embed_mock): + """Should send an embed of a certain format as a DM and return `True` if DM successful.""" + test_case = namedtuple("test_case", ["args", "icon", "send_result"]) + test_cases = [ + test_case((self.user, "Test title", "Example content"), Icons.user_verified, True), + test_case((self.user, "Test title", "Example content", Icons.user_update), Icons.user_update, False) + ] + + for case in test_cases: + expected = Embed( + description="Example content", + colour=Colours.soft_green + ).set_author( + name="Test title", + icon_url=case.icon + ) + + with self.subTest(args=case.args, expected=expected): + send_private_embed_mock.reset_mock() + + send_private_embed_mock.return_value = case.send_result + + result = await utils.notify_pardon(*case.args) + self.assertEqual(case.send_result, result) + + embed = send_private_embed_mock.call_args[0][1] + self.assertEqual(embed.to_dict(), expected.to_dict()) + + send_private_embed_mock.assert_awaited_once_with(case.args[0], embed) + + async def test_send_private_embed(self): + """Should DM the user and return `True` on success or `False` on failure.""" + embed = Embed(title="Test", description="Test val") + + test_case = namedtuple("test_case", ["expected_output", "raised_exception"]) + test_cases = [ + test_case(True, None), + test_case(False, HTTPException(AsyncMock(), AsyncMock())), + test_case(False, Forbidden(AsyncMock(), AsyncMock())), + test_case(False, NotFound(AsyncMock(), AsyncMock())) + ] + + for case in test_cases: + with self.subTest(expected=case.expected_output, raised=case.raised_exception): + self.user.send.reset_mock(side_effect=True) + self.user.send.side_effect = case.raised_exception + + result = await utils.send_private_embed(self.user, embed) + + self.assertEqual(result, case.expected_output) + self.user.send.assert_awaited_once_with(embed=embed) + + +class TestPostInfraction(unittest.IsolatedAsyncioTestCase): + """Tests for the `post_infraction` function.""" + + def setUp(self): + self.bot = MockBot() + self.member = MockMember(id=1234) + self.user = MockUser(id=1234) + self.ctx = MockContext(bot=self.bot, author=self.member) + + async def test_normal_post_infraction(self): + """Should return response from POST request if there are no errors.""" + now = datetime.now() + payload = { + "actor": self.ctx.author.id, + "hidden": True, + "reason": "Test reason", + "type": "ban", + "user": self.member.id, + "active": False, + "expires_at": now.isoformat() + } + + self.ctx.bot.api_client.post.return_value = "foo" + actual = await utils.post_infraction(self.ctx, self.member, "ban", "Test reason", now, True, False) + + self.assertEqual(actual, "foo") + self.ctx.bot.api_client.post.assert_awaited_once_with("bot/infractions", json=payload) + + async def test_unknown_error_post_infraction(self): + """Should send an error message to chat when a non-400 error occurs.""" + self.ctx.bot.api_client.post.side_effect = ResponseCodeError(AsyncMock(), AsyncMock()) + self.ctx.bot.api_client.post.side_effect.status = 500 + + actual = await utils.post_infraction(self.ctx, self.user, "ban", "Test reason") + self.assertIsNone(actual) + + self.assertTrue("500" in self.ctx.send.call_args[0][0]) + + @patch("bot.exts.moderation.infraction._utils.post_user", return_value=None) + async def test_user_not_found_none_post_infraction(self, post_user_mock): + """Should abort and return `None` when a new user fails to be posted.""" + self.bot.api_client.post.side_effect = ResponseCodeError(MagicMock(status=400), {"user": "foo"}) + + actual = await utils.post_infraction(self.ctx, self.user, "mute", "Test reason") + self.assertIsNone(actual) + post_user_mock.assert_awaited_once_with(self.ctx, self.user) + + @patch("bot.exts.moderation.infraction._utils.post_user", return_value="bar") + async def test_first_fail_second_success_user_post_infraction(self, post_user_mock): + """Should post the user if they don't exist, POST infraction again, and return the response if successful.""" + payload = { + "actor": self.ctx.author.id, + "hidden": False, + "reason": "Test reason", + "type": "mute", + "user": self.user.id, + "active": True + } + + self.bot.api_client.post.side_effect = [ResponseCodeError(MagicMock(status=400), {"user": "foo"}), "foo"] + + actual = await utils.post_infraction(self.ctx, self.user, "mute", "Test reason") + self.assertEqual(actual, "foo") + self.bot.api_client.post.assert_has_awaits([call("bot/infractions", json=payload)] * 2) + post_user_mock.assert_awaited_once_with(self.ctx, self.user) diff --git a/tests/bot/exts/moderation/test_incidents.py b/tests/bot/exts/moderation/test_incidents.py new file mode 100644 index 000000000..cbf7f7bcf --- /dev/null +++ b/tests/bot/exts/moderation/test_incidents.py @@ -0,0 +1,770 @@ +import asyncio +import enum +import logging +import typing as t +import unittest +from unittest.mock import AsyncMock, MagicMock, call, patch + +import aiohttp +import discord + +from bot.constants import Colours +from bot.exts.moderation import incidents +from tests.helpers import ( + MockAsyncWebhook, + MockAttachment, + MockBot, + MockMember, + MockMessage, + MockReaction, + MockRole, + MockTextChannel, + MockUser, +) + + +class MockAsyncIterable: + """ + Helper for mocking asynchronous for loops. + + It does not appear that the `unittest` library currently provides anything that would + allow us to simply mock an async iterator, such as `discord.TextChannel.history`. + + We therefore write our own helper to wrap a regular synchronous iterable, and feed + its values via `__anext__` rather than `__next__`. + + This class was written for the purposes of testing the `Incidents` cog - it may not + be generic enough to be placed in the `tests.helpers` module. + """ + + def __init__(self, messages: t.Iterable): + """Take a sync iterable to be wrapped.""" + self.iter_messages = iter(messages) + + def __aiter__(self): + """Return `self` as we provide the `__anext__` method.""" + return self + + async def __anext__(self): + """ + Feed the next item, or raise `StopAsyncIteration`. + + Since we're wrapping a sync iterator, it will communicate that it has been depleted + by raising a `StopIteration`. The `async for` construct does not expect it, and we + therefore need to substitute it for the appropriate exception type. + """ + try: + return next(self.iter_messages) + except StopIteration: + raise StopAsyncIteration + + +class MockSignal(enum.Enum): + A = "A" + B = "B" + + +mock_404 = discord.NotFound( + response=MagicMock(aiohttp.ClientResponse), # Mock the erroneous response + message="Not found", +) + + +class TestDownloadFile(unittest.IsolatedAsyncioTestCase): + """Collection of tests for the `download_file` helper function.""" + + async def test_download_file_success(self): + """If `to_file` succeeds, function returns the acquired `discord.File`.""" + file = MagicMock(discord.File, filename="bigbadlemon.jpg") + attachment = MockAttachment(to_file=AsyncMock(return_value=file)) + + acquired_file = await incidents.download_file(attachment) + self.assertIs(file, acquired_file) + + async def test_download_file_404(self): + """If `to_file` encounters a 404, function handles the exception & returns None.""" + attachment = MockAttachment(to_file=AsyncMock(side_effect=mock_404)) + + acquired_file = await incidents.download_file(attachment) + self.assertIsNone(acquired_file) + + async def test_download_file_fail(self): + """If `to_file` fails on a non-404 error, function logs the exception & returns None.""" + arbitrary_error = discord.HTTPException(MagicMock(aiohttp.ClientResponse), "Arbitrary API error") + attachment = MockAttachment(to_file=AsyncMock(side_effect=arbitrary_error)) + + with self.assertLogs(logger=incidents.log, level=logging.ERROR): + acquired_file = await incidents.download_file(attachment) + + self.assertIsNone(acquired_file) + + +class TestMakeEmbed(unittest.IsolatedAsyncioTestCase): + """Collection of tests for the `make_embed` helper function.""" + + async def test_make_embed_actioned(self): + """Embed is coloured green and footer contains 'Actioned' when `outcome=Signal.ACTIONED`.""" + embed, file = await incidents.make_embed(MockMessage(), incidents.Signal.ACTIONED, MockMember()) + + self.assertEqual(embed.colour.value, Colours.soft_green) + self.assertIn("Actioned", embed.footer.text) + + async def test_make_embed_not_actioned(self): + """Embed is coloured red and footer contains 'Rejected' when `outcome=Signal.NOT_ACTIONED`.""" + embed, file = await incidents.make_embed(MockMessage(), incidents.Signal.NOT_ACTIONED, MockMember()) + + self.assertEqual(embed.colour.value, Colours.soft_red) + self.assertIn("Rejected", embed.footer.text) + + async def test_make_embed_content(self): + """Incident content appears as embed description.""" + incident = MockMessage(content="this is an incident") + embed, file = await incidents.make_embed(incident, incidents.Signal.ACTIONED, MockMember()) + + self.assertEqual(incident.content, embed.description) + + async def test_make_embed_with_attachment_succeeds(self): + """Incident's attachment is downloaded and displayed in the embed's image field.""" + file = MagicMock(discord.File, filename="bigbadjoe.jpg") + attachment = MockAttachment(filename="bigbadjoe.jpg") + incident = MockMessage(content="this is an incident", attachments=[attachment]) + + # Patch `download_file` to return our `file` + with patch("bot.exts.moderation.incidents.download_file", AsyncMock(return_value=file)): + embed, returned_file = await incidents.make_embed(incident, incidents.Signal.ACTIONED, MockMember()) + + self.assertIs(file, returned_file) + self.assertEqual("attachment://bigbadjoe.jpg", embed.image.url) + + async def test_make_embed_with_attachment_fails(self): + """Incident's attachment fails to download, proxy url is linked instead.""" + attachment = MockAttachment(proxy_url="discord.com/bigbadjoe.jpg") + incident = MockMessage(content="this is an incident", attachments=[attachment]) + + # Patch `download_file` to return None as if the download failed + with patch("bot.exts.moderation.incidents.download_file", AsyncMock(return_value=None)): + embed, returned_file = await incidents.make_embed(incident, incidents.Signal.ACTIONED, MockMember()) + + self.assertIsNone(returned_file) + + # The author name field is simply expected to have something in it, we do not assert the message + self.assertGreater(len(embed.author.name), 0) + self.assertEqual(embed.author.url, "discord.com/bigbadjoe.jpg") # However, it should link the exact url + + +@patch("bot.constants.Channels.incidents", 123) +class TestIsIncident(unittest.TestCase): + """ + Collection of tests for the `is_incident` helper function. + + In `setUp`, we will create a mock message which should qualify as an incident. Each + test case will then mutate this instance to make it **not** qualify, in various ways. + + Notice that we patch the #incidents channel id globally for this class. + """ + + def setUp(self) -> None: + """Prepare a mock message which should qualify as an incident.""" + self.incident = MockMessage( + channel=MockTextChannel(id=123), + content="this is an incident", + author=MockUser(bot=False), + pinned=False, + ) + + def test_is_incident_true(self): + """Message qualifies as an incident if unchanged.""" + self.assertTrue(incidents.is_incident(self.incident)) + + def check_false(self): + """Assert that `self.incident` does **not** qualify as an incident.""" + self.assertFalse(incidents.is_incident(self.incident)) + + def test_is_incident_false_channel(self): + """Message doesn't qualify if sent outside of #incidents.""" + self.incident.channel = MockTextChannel(id=456) + self.check_false() + + def test_is_incident_false_content(self): + """Message doesn't qualify if content begins with hash symbol.""" + self.incident.content = "# this is a comment message" + self.check_false() + + def test_is_incident_false_author(self): + """Message doesn't qualify if author is a bot.""" + self.incident.author = MockUser(bot=True) + self.check_false() + + def test_is_incident_false_pinned(self): + """Message doesn't qualify if it is pinned.""" + self.incident.pinned = True + self.check_false() + + +class TestOwnReactions(unittest.TestCase): + """Assertions for the `own_reactions` function.""" + + def test_own_reactions(self): + """Only bot's own emoji are extracted from the input incident.""" + reactions = ( + MockReaction(emoji="A", me=True), + MockReaction(emoji="B", me=True), + MockReaction(emoji="C", me=False), + ) + message = MockMessage(reactions=reactions) + self.assertSetEqual(incidents.own_reactions(message), {"A", "B"}) + + +@patch("bot.exts.moderation.incidents.ALL_SIGNALS", {"A", "B"}) +class TestHasSignals(unittest.TestCase): + """ + Assertions for the `has_signals` function. + + We patch `ALL_SIGNALS` globally. Each test function then patches `own_reactions` + as appropriate. + """ + + def test_has_signals_true(self): + """True when `own_reactions` returns all emoji in `ALL_SIGNALS`.""" + message = MockMessage() + own_reactions = MagicMock(return_value={"A", "B"}) + + with patch("bot.exts.moderation.incidents.own_reactions", own_reactions): + self.assertTrue(incidents.has_signals(message)) + + def test_has_signals_false(self): + """False when `own_reactions` does not return all emoji in `ALL_SIGNALS`.""" + message = MockMessage() + own_reactions = MagicMock(return_value={"A", "C"}) + + with patch("bot.exts.moderation.incidents.own_reactions", own_reactions): + self.assertFalse(incidents.has_signals(message)) + + +@patch("bot.exts.moderation.incidents.Signal", MockSignal) +class TestAddSignals(unittest.IsolatedAsyncioTestCase): + """ + Assertions for the `add_signals` coroutine. + + These are all fairly similar and could go into a single test function, but I found the + patching & sub-testing fairly awkward in that case and decided to split them up + to avoid unnecessary syntax noise. + """ + + def setUp(self): + """Prepare a mock incident message for tests to use.""" + self.incident = MockMessage() + + @patch("bot.exts.moderation.incidents.own_reactions", MagicMock(return_value=set())) + async def test_add_signals_missing(self): + """All emoji are added when none are present.""" + await incidents.add_signals(self.incident) + self.incident.add_reaction.assert_has_calls([call("A"), call("B")]) + + @patch("bot.exts.moderation.incidents.own_reactions", MagicMock(return_value={"A"})) + async def test_add_signals_partial(self): + """Only missing emoji are added when some are present.""" + await incidents.add_signals(self.incident) + self.incident.add_reaction.assert_has_calls([call("B")]) + + @patch("bot.exts.moderation.incidents.own_reactions", MagicMock(return_value={"A", "B"})) + async def test_add_signals_present(self): + """No emoji are added when all are present.""" + await incidents.add_signals(self.incident) + self.incident.add_reaction.assert_not_called() + + +class TestIncidents(unittest.IsolatedAsyncioTestCase): + """ + Tests for bound methods of the `Incidents` cog. + + Use this as a base class for `Incidents` tests - it will prepare a fresh instance + for each test function, but not make any assertions on its own. Tests can mutate + the instance as they wish. + """ + + def setUp(self): + """ + Prepare a fresh `Incidents` instance for each test. + + Note that this will not schedule `crawl_incidents` in the background, as everything + is being mocked. The `crawl_task` attribute will end up being None. + """ + self.cog_instance = incidents.Incidents(MockBot()) + + +@patch("asyncio.sleep", AsyncMock()) # Prevent the coro from sleeping to speed up the test +class TestCrawlIncidents(TestIncidents): + """ + Tests for the `Incidents.crawl_incidents` coroutine. + + Apart from `test_crawl_incidents_waits_until_cache_ready`, all tests in this class + will patch the return values of `is_incident` and `has_signal` and then observe + whether the `AsyncMock` for `add_signals` was awaited or not. + + The `add_signals` mock is added by each test separately to ensure it is clean (has not + been awaited by another test yet). The mock can be reset, but this appears to be the + cleaner way. + + For each test, we inject a mock channel with a history of 1 message only (see: `setUp`). + """ + + def setUp(self): + """For each test, ensure `bot.get_channel` returns a channel with 1 arbitrary message.""" + super().setUp() # First ensure we get `cog_instance` from parent + + incidents_history = MagicMock(return_value=MockAsyncIterable([MockMessage()])) + self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(history=incidents_history)) + + async def test_crawl_incidents_waits_until_cache_ready(self): + """ + The coroutine will await the `wait_until_guild_available` event. + + Since this task is schedule in the `__init__`, it is critical that it waits for the + cache to be ready, so that it can safely get the #incidents channel. + """ + await self.cog_instance.crawl_incidents() + self.cog_instance.bot.wait_until_guild_available.assert_awaited() + + @patch("bot.exts.moderation.incidents.add_signals", AsyncMock()) + @patch("bot.exts.moderation.incidents.is_incident", MagicMock(return_value=False)) # Message doesn't qualify + @patch("bot.exts.moderation.incidents.has_signals", MagicMock(return_value=False)) + async def test_crawl_incidents_noop_if_is_not_incident(self): + """Signals are not added for a non-incident message.""" + await self.cog_instance.crawl_incidents() + incidents.add_signals.assert_not_awaited() + + @patch("bot.exts.moderation.incidents.add_signals", AsyncMock()) + @patch("bot.exts.moderation.incidents.is_incident", MagicMock(return_value=True)) # Message qualifies + @patch("bot.exts.moderation.incidents.has_signals", MagicMock(return_value=True)) # But already has signals + async def test_crawl_incidents_noop_if_message_already_has_signals(self): + """Signals are not added for messages which already have them.""" + await self.cog_instance.crawl_incidents() + incidents.add_signals.assert_not_awaited() + + @patch("bot.exts.moderation.incidents.add_signals", AsyncMock()) + @patch("bot.exts.moderation.incidents.is_incident", MagicMock(return_value=True)) # Message qualifies + @patch("bot.exts.moderation.incidents.has_signals", MagicMock(return_value=False)) # And doesn't have signals + async def test_crawl_incidents_add_signals_called(self): + """Message has signals added as it does not have them yet and qualifies as an incident.""" + await self.cog_instance.crawl_incidents() + incidents.add_signals.assert_awaited_once() + + +class TestArchive(TestIncidents): + """Tests for the `Incidents.archive` coroutine.""" + + async def test_archive_webhook_not_found(self): + """ + Method recovers and returns False when the webhook is not found. + + Implicitly, this also tests that the error is handled internally and doesn't + propagate out of the method, which is just as important. + """ + self.cog_instance.bot.fetch_webhook = AsyncMock(side_effect=mock_404) + self.assertFalse( + await self.cog_instance.archive(incident=MockMessage(), outcome=MagicMock(), actioned_by=MockMember()) + ) + + async def test_archive_relays_incident(self): + """ + If webhook is found, method relays `incident` properly. + + This test will assert that the fetched webhook's `send` method is fed the correct arguments, + and that the `archive` method returns True. + """ + webhook = MockAsyncWebhook() + self.cog_instance.bot.fetch_webhook = AsyncMock(return_value=webhook) # Patch in our webhook + + # Define our own `incident` to be archived + incident = MockMessage( + content="this is an incident", + author=MockUser(name="author_name", avatar_url="author_avatar"), + id=123, + ) + built_embed = MagicMock(discord.Embed, id=123) # We patch `make_embed` to return this + + with patch("bot.exts.moderation.incidents.make_embed", AsyncMock(return_value=(built_embed, None))): + archive_return = await self.cog_instance.archive(incident, MagicMock(value="A"), MockMember()) + + # Now we check that the webhook was given the correct args, and that `archive` returned True + webhook.send.assert_called_once_with( + embed=built_embed, + username="author_name", + avatar_url="author_avatar", + file=None, + ) + self.assertTrue(archive_return) + + async def test_archive_clyde_username(self): + """ + The archive webhook username is cleansed using `sub_clyde`. + + Discord will reject any webhook with "clyde" in the username field, as it impersonates + the official Clyde bot. Since we do not control what the username will be (the incident + author name is used), we must ensure the name is cleansed, otherwise the relay may fail. + + This test assumes the username is passed as a kwarg. If this test fails, please review + whether the passed argument is being retrieved correctly. + """ + webhook = MockAsyncWebhook() + self.cog_instance.bot.fetch_webhook = AsyncMock(return_value=webhook) + + message_from_clyde = MockMessage(author=MockUser(name="clyde the great")) + await self.cog_instance.archive(message_from_clyde, MagicMock(incidents.Signal), MockMember()) + + self.assertNotIn("clyde", webhook.send.call_args.kwargs["username"]) + + +class TestMakeConfirmationTask(TestIncidents): + """ + Tests for the `Incidents.make_confirmation_task` method. + + Writing tests for this method is difficult, as it mostly just delegates the provided + information elsewhere. There is very little internal logic. Whether our approach + works conceptually is difficult to prove using unit tests. + """ + + def test_make_confirmation_task_check(self): + """ + The internal check will recognize the passed incident. + + This is a little tricky - we first pass a message with a specific `id` in, and then + retrieve the built check from the `call_args` of the `wait_for` method. This relies + on the check being passed as a kwarg. + + Once the check is retrieved, we assert that it gives True for our incident's `id`, + and False for any other. + + If this function begins to fail, first check that `created_check` is being retrieved + correctly. It should be the function that is built locally in the tested method. + """ + self.cog_instance.make_confirmation_task(MockMessage(id=123)) + + self.cog_instance.bot.wait_for.assert_called_once() + created_check = self.cog_instance.bot.wait_for.call_args.kwargs["check"] + + # The `message_id` matches the `id` of our incident + self.assertTrue(created_check(payload=MagicMock(message_id=123))) + + # This `message_id` does not match + self.assertFalse(created_check(payload=MagicMock(message_id=0))) + + +@patch("bot.exts.moderation.incidents.ALLOWED_ROLES", {1, 2}) +@patch("bot.exts.moderation.incidents.Incidents.make_confirmation_task", AsyncMock()) # Generic awaitable +class TestProcessEvent(TestIncidents): + """Tests for the `Incidents.process_event` coroutine.""" + + async def test_process_event_bad_role(self): + """The reaction is removed when the author lacks all allowed roles.""" + incident = MockMessage() + member = MockMember(roles=[MockRole(id=0)]) # Must have role 1 or 2 + + await self.cog_instance.process_event("reaction", incident, member) + incident.remove_reaction.assert_called_once_with("reaction", member) + + async def test_process_event_bad_emoji(self): + """ + The reaction is removed when an invalid emoji is used. + + This requires that we pass in a `member` with valid roles, as we need the role check + to succeed. + """ + incident = MockMessage() + member = MockMember(roles=[MockRole(id=1)]) # Member has allowed role + + await self.cog_instance.process_event("invalid_signal", incident, member) + incident.remove_reaction.assert_called_once_with("invalid_signal", member) + + async def test_process_event_no_archive_on_investigating(self): + """Message is not archived on `Signal.INVESTIGATING`.""" + with patch("bot.exts.moderation.incidents.Incidents.archive", AsyncMock()) as mocked_archive: + await self.cog_instance.process_event( + reaction=incidents.Signal.INVESTIGATING.value, + incident=MockMessage(), + member=MockMember(roles=[MockRole(id=1)]), + ) + + mocked_archive.assert_not_called() + + async def test_process_event_no_delete_if_archive_fails(self): + """ + Original message is not deleted when `Incidents.archive` returns False. + + This is the way of signaling that the relay failed, and we should not remove the original, + as that would result in losing the incident record. + """ + incident = MockMessage() + + with patch("bot.exts.moderation.incidents.Incidents.archive", AsyncMock(return_value=False)): + await self.cog_instance.process_event( + reaction=incidents.Signal.ACTIONED.value, + incident=incident, + member=MockMember(roles=[MockRole(id=1)]) + ) + + incident.delete.assert_not_called() + + async def test_process_event_confirmation_task_is_awaited(self): + """Task given by `Incidents.make_confirmation_task` is awaited before method exits.""" + mock_task = AsyncMock() + + with patch("bot.exts.moderation.incidents.Incidents.make_confirmation_task", mock_task): + await self.cog_instance.process_event( + reaction=incidents.Signal.ACTIONED.value, + incident=MockMessage(), + member=MockMember(roles=[MockRole(id=1)]) + ) + + mock_task.assert_awaited() + + async def test_process_event_confirmation_task_timeout_is_handled(self): + """ + Confirmation task `asyncio.TimeoutError` is handled gracefully. + + We have `make_confirmation_task` return a mock with a side effect, and then catch the + exception should it propagate out of `process_event`. This is so that we can then manually + fail the test with a more informative message than just the plain traceback. + """ + mock_task = AsyncMock(side_effect=asyncio.TimeoutError()) + + try: + with patch("bot.exts.moderation.incidents.Incidents.make_confirmation_task", mock_task): + await self.cog_instance.process_event( + reaction=incidents.Signal.ACTIONED.value, + incident=MockMessage(), + member=MockMember(roles=[MockRole(id=1)]) + ) + except asyncio.TimeoutError: + self.fail("TimeoutError was not handled gracefully, and propagated out of `process_event`!") + + +class TestResolveMessage(TestIncidents): + """Tests for the `Incidents.resolve_message` coroutine.""" + + async def test_resolve_message_pass_message_id(self): + """Method will call `_get_message` with the passed `message_id`.""" + await self.cog_instance.resolve_message(123) + self.cog_instance.bot._connection._get_message.assert_called_once_with(123) + + async def test_resolve_message_in_cache(self): + """ + No API call is made if the queried message exists in the cache. + + We mock the `_get_message` return value regardless of input. Whether it finds the message + internally is considered d.py's responsibility, not ours. + """ + cached_message = MockMessage(id=123) + self.cog_instance.bot._connection._get_message = MagicMock(return_value=cached_message) + + return_value = await self.cog_instance.resolve_message(123) + + self.assertIs(return_value, cached_message) + self.cog_instance.bot.get_channel.assert_not_called() # The `fetch_message` line was never hit + + async def test_resolve_message_not_in_cache(self): + """ + The message is retrieved from the API if it isn't cached. + + This is desired behaviour for messages which exist, but were sent before the bot's + current session. + """ + self.cog_instance.bot._connection._get_message = MagicMock(return_value=None) # Cache returns None + + # API returns our message + uncached_message = MockMessage() + fetch_message = AsyncMock(return_value=uncached_message) + self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(fetch_message=fetch_message)) + + retrieved_message = await self.cog_instance.resolve_message(123) + self.assertIs(retrieved_message, uncached_message) + + async def test_resolve_message_doesnt_exist(self): + """ + If the API returns a 404, the function handles it gracefully and returns None. + + This is an edge-case happening with racing events - event A will relay the message + to the archive and delete the original. Once event B acquires the `event_lock`, + it will not find the message in the cache, and will ask the API. + """ + self.cog_instance.bot._connection._get_message = MagicMock(return_value=None) # Cache returns None + + fetch_message = AsyncMock(side_effect=mock_404) + self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(fetch_message=fetch_message)) + + self.assertIsNone(await self.cog_instance.resolve_message(123)) + + async def test_resolve_message_fetch_fails(self): + """ + Non-404 errors are handled, logged & None is returned. + + In contrast with a 404, this should make an error-level log. We assert that at least + one such log was made - we do not make any assertions about the log's message. + """ + self.cog_instance.bot._connection._get_message = MagicMock(return_value=None) # Cache returns None + + arbitrary_error = discord.HTTPException( + response=MagicMock(aiohttp.ClientResponse), + message="Arbitrary error", + ) + fetch_message = AsyncMock(side_effect=arbitrary_error) + self.cog_instance.bot.get_channel = MagicMock(return_value=MockTextChannel(fetch_message=fetch_message)) + + with self.assertLogs(logger=incidents.log, level=logging.ERROR): + self.assertIsNone(await self.cog_instance.resolve_message(123)) + + +@patch("bot.constants.Channels.incidents", 123) +class TestOnRawReactionAdd(TestIncidents): + """ + Tests for the `Incidents.on_raw_reaction_add` listener. + + Writing tests for this listener comes with additional complexity due to the listener + awaiting the `crawl_task` task. See `asyncSetUp` for further details, which attempts + to make unit testing this function possible. + """ + + def setUp(self): + """ + Prepare & assign `payload` attribute. + + This attribute represents an *ideal* payload which will not be rejected by the + listener. As each test will receive a fresh instance, it can be mutated to + observe how the listener's behaviour changes with different attributes on + the passed payload. + """ + super().setUp() # Ensure `cog_instance` is assigned + + self.payload = MagicMock( + discord.RawReactionActionEvent, + channel_id=123, # Patched at class level + message_id=456, + member=MockMember(bot=False), + emoji="reaction", + ) + + async def asyncSetUp(self): # noqa: N802 + """ + Prepare an empty task and assign it as `crawl_task`. + + It appears that the `unittest` framework does not provide anything for mocking + asyncio tasks. An `AsyncMock` instance can be called and then awaited, however, + it does not provide the `done` method or any other parts of the `asyncio.Task` + interface. + + Although we do not need to make any assertions about the task itself while + testing the listener, the code will still await it and call the `done` method, + and so we must inject something that will not fail on either action. + + Note that this is done in an `asyncSetUp`, which runs after `setUp`. + The justification is that creating an actual task requires the event + loop to be ready, which is not the case in the `setUp`. + """ + mock_task = asyncio.create_task(AsyncMock()()) # Mock async func, then a coro + self.cog_instance.crawl_task = mock_task + + async def test_on_raw_reaction_add_wrong_channel(self): + """ + Events outside of #incidents will be ignored. + + We check this by asserting that `resolve_message` was never queried. + """ + self.payload.channel_id = 0 + self.cog_instance.resolve_message = AsyncMock() + + await self.cog_instance.on_raw_reaction_add(self.payload) + self.cog_instance.resolve_message.assert_not_called() + + async def test_on_raw_reaction_add_user_is_bot(self): + """ + Events dispatched by bot accounts will be ignored. + + We check this by asserting that `resolve_message` was never queried. + """ + self.payload.member = MockMember(bot=True) + self.cog_instance.resolve_message = AsyncMock() + + await self.cog_instance.on_raw_reaction_add(self.payload) + self.cog_instance.resolve_message.assert_not_called() + + async def test_on_raw_reaction_add_message_doesnt_exist(self): + """ + Listener gracefully handles the case where `resolve_message` gives None. + + We check this by asserting that `process_event` was never called. + """ + self.cog_instance.process_event = AsyncMock() + self.cog_instance.resolve_message = AsyncMock(return_value=None) + + await self.cog_instance.on_raw_reaction_add(self.payload) + self.cog_instance.process_event.assert_not_called() + + async def test_on_raw_reaction_add_message_is_not_an_incident(self): + """ + The event won't be processed if the related message is not an incident. + + This is an edge-case that can happen if someone manually leaves a reaction + on a pinned message, or a comment. + + We check this by asserting that `process_event` was never called. + """ + self.cog_instance.process_event = AsyncMock() + self.cog_instance.resolve_message = AsyncMock(return_value=MockMessage()) + + with patch("bot.exts.moderation.incidents.is_incident", MagicMock(return_value=False)): + await self.cog_instance.on_raw_reaction_add(self.payload) + + self.cog_instance.process_event.assert_not_called() + + async def test_on_raw_reaction_add_valid_event_is_processed(self): + """ + If the reaction event is valid, it is passed to `process_event`. + + This is the case when everything goes right: + * The reaction was placed in #incidents, and not by a bot + * The message was found successfully + * The message qualifies as an incident + + Additionally, we check that all arguments were passed as expected. + """ + incident = MockMessage(id=1) + + self.cog_instance.process_event = AsyncMock() + self.cog_instance.resolve_message = AsyncMock(return_value=incident) + + with patch("bot.exts.moderation.incidents.is_incident", MagicMock(return_value=True)): + await self.cog_instance.on_raw_reaction_add(self.payload) + + self.cog_instance.process_event.assert_called_with( + "reaction", # Defined in `self.payload` + incident, + self.payload.member, + ) + + +class TestOnMessage(TestIncidents): + """ + Tests for the `Incidents.on_message` listener. + + Notice the decorators mocking the `is_incident` return value. The `is_incidents` + function is tested in `TestIsIncident` - here we do not worry about it. + """ + + @patch("bot.exts.moderation.incidents.is_incident", MagicMock(return_value=True)) + async def test_on_message_incident(self): + """Messages qualifying as incidents are passed to `add_signals`.""" + incident = MockMessage() + + with patch("bot.exts.moderation.incidents.add_signals", AsyncMock()) as mock_add_signals: + await self.cog_instance.on_message(incident) + + mock_add_signals.assert_called_once_with(incident) + + @patch("bot.exts.moderation.incidents.is_incident", MagicMock(return_value=False)) + async def test_on_message_non_incident(self): + """Messages not qualifying as incidents are ignored.""" + with patch("bot.exts.moderation.incidents.add_signals", AsyncMock()) as mock_add_signals: + await self.cog_instance.on_message(MockMessage()) + + mock_add_signals.assert_not_called() diff --git a/tests/bot/exts/moderation/test_modlog.py b/tests/bot/exts/moderation/test_modlog.py new file mode 100644 index 000000000..f8f142484 --- /dev/null +++ b/tests/bot/exts/moderation/test_modlog.py @@ -0,0 +1,29 @@ +import unittest + +import discord + +from bot.exts.moderation.modlog import ModLog +from tests.helpers import MockBot, MockTextChannel + + +class ModLogTests(unittest.IsolatedAsyncioTestCase): + """Tests for moderation logs.""" + + def setUp(self): + self.bot = MockBot() + self.cog = ModLog(self.bot) + self.channel = MockTextChannel() + + async def test_log_entry_description_truncation(self): + """Test that embed description for ModLog entry is truncated.""" + self.bot.get_channel.return_value = self.channel + await self.cog.send_log_message( + icon_url="foo", + colour=discord.Colour.blue(), + title="bar", + text="foo bar" * 3000 + ) + embed = self.channel.send.call_args[1]["embed"] + self.assertEqual( + embed.description, ("foo bar" * 3000)[:2045] + "..." + ) diff --git a/tests/bot/exts/moderation/test_silence.py b/tests/bot/exts/moderation/test_silence.py new file mode 100644 index 000000000..fa5fc9e81 --- /dev/null +++ b/tests/bot/exts/moderation/test_silence.py @@ -0,0 +1,493 @@ +import asyncio +import unittest +from datetime import datetime, timezone +from unittest import mock +from unittest.mock import Mock + +from async_rediscache import RedisSession +from discord import PermissionOverwrite + +from bot.constants import Channels, Guild, Roles +from bot.exts.moderation import silence +from tests.helpers import MockBot, MockContext, MockTextChannel, autospec + +redis_session = None +redis_loop = asyncio.get_event_loop() + + +def setUpModule(): # noqa: N802 + """Create and connect to the fakeredis session.""" + global redis_session + redis_session = RedisSession(use_fakeredis=True) + redis_loop.run_until_complete(redis_session.connect()) + + +def tearDownModule(): # noqa: N802 + """Close the fakeredis session.""" + if redis_session: + redis_loop.run_until_complete(redis_session.close()) + + +# Have to subclass it because builtins can't be patched. +class PatchedDatetime(datetime): + """A datetime object with a mocked now() function.""" + + now = mock.create_autospec(datetime, "now") + + +class SilenceNotifierTests(unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.alert_channel = MockTextChannel() + self.notifier = silence.SilenceNotifier(self.alert_channel) + self.notifier.stop = self.notifier_stop_mock = Mock() + self.notifier.start = self.notifier_start_mock = Mock() + + def test_add_channel_adds_channel(self): + """Channel is added to `_silenced_channels` with the current loop.""" + channel = Mock() + with mock.patch.object(self.notifier, "_silenced_channels") as silenced_channels: + self.notifier.add_channel(channel) + silenced_channels.__setitem__.assert_called_with(channel, self.notifier._current_loop) + + def test_add_channel_starts_loop(self): + """Loop is started if `_silenced_channels` was empty.""" + self.notifier.add_channel(Mock()) + self.notifier_start_mock.assert_called_once() + + def test_add_channel_skips_start_with_channels(self): + """Loop start is not called when `_silenced_channels` is not empty.""" + with mock.patch.object(self.notifier, "_silenced_channels"): + self.notifier.add_channel(Mock()) + self.notifier_start_mock.assert_not_called() + + def test_remove_channel_removes_channel(self): + """Channel is removed from `_silenced_channels`.""" + channel = Mock() + with mock.patch.object(self.notifier, "_silenced_channels") as silenced_channels: + self.notifier.remove_channel(channel) + silenced_channels.__delitem__.assert_called_with(channel) + + def test_remove_channel_stops_loop(self): + """Notifier loop is stopped if `_silenced_channels` is empty after remove.""" + with mock.patch.object(self.notifier, "_silenced_channels", __bool__=lambda _: False): + self.notifier.remove_channel(Mock()) + self.notifier_stop_mock.assert_called_once() + + def test_remove_channel_skips_stop_with_channels(self): + """Notifier loop is not stopped if `_silenced_channels` is not empty after remove.""" + self.notifier.remove_channel(Mock()) + self.notifier_stop_mock.assert_not_called() + + async def test_notifier_private_sends_alert(self): + """Alert is sent on 15 min intervals.""" + test_cases = (900, 1800, 2700) + for current_loop in test_cases: + with self.subTest(current_loop=current_loop): + with mock.patch.object(self.notifier, "_current_loop", new=current_loop): + await self.notifier._notifier() + self.alert_channel.send.assert_called_once_with( + f"<@&{Roles.moderators}> currently silenced channels: " + ) + self.alert_channel.send.reset_mock() + + async def test_notifier_skips_alert(self): + """Alert is skipped on first loop or not an increment of 900.""" + test_cases = (0, 15, 5000) + for current_loop in test_cases: + with self.subTest(current_loop=current_loop): + with mock.patch.object(self.notifier, "_current_loop", new=current_loop): + await self.notifier._notifier() + self.alert_channel.send.assert_not_called() + + +@autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False) +class SilenceCogTests(unittest.IsolatedAsyncioTestCase): + """Tests for the general functionality of the Silence cog.""" + + @autospec(silence, "Scheduler", pass_mocks=False) + def setUp(self) -> None: + self.bot = MockBot() + self.cog = silence.Silence(self.bot) + + @autospec(silence, "SilenceNotifier", pass_mocks=False) + async def test_async_init_got_guild(self): + """Bot got guild after it became available.""" + await self.cog._async_init() + self.bot.wait_until_guild_available.assert_awaited_once() + self.bot.get_guild.assert_called_once_with(Guild.id) + + @autospec(silence, "SilenceNotifier", pass_mocks=False) + async def test_async_init_got_channels(self): + """Got channels from bot.""" + self.bot.get_channel.side_effect = lambda id_: MockTextChannel(id=id_) + + await self.cog._async_init() + self.assertEqual(self.cog._mod_alerts_channel.id, Channels.mod_alerts) + + @autospec(silence, "SilenceNotifier") + async def test_async_init_got_notifier(self, notifier): + """Notifier was started with channel.""" + self.bot.get_channel.side_effect = lambda id_: MockTextChannel(id=id_) + + await self.cog._async_init() + notifier.assert_called_once_with(MockTextChannel(id=Channels.mod_log)) + self.assertEqual(self.cog.notifier, notifier.return_value) + + @autospec(silence, "SilenceNotifier", pass_mocks=False) + async def test_async_init_rescheduled(self): + """`_reschedule_` coroutine was awaited.""" + self.cog._reschedule = mock.create_autospec(self.cog._reschedule) + await self.cog._async_init() + self.cog._reschedule.assert_awaited_once_with() + + def test_cog_unload_cancelled_tasks(self): + """The init task was cancelled.""" + self.cog._init_task = asyncio.Future() + self.cog.cog_unload() + + # It's too annoying to test cancel_all since it's a done callback and wrapped in a lambda. + self.assertTrue(self.cog._init_task.cancelled()) + + @autospec("discord.ext.commands", "has_any_role") + @mock.patch.object(silence, "MODERATION_ROLES", new=(1, 2, 3)) + async def test_cog_check(self, role_check): + """Role check was called with `MODERATION_ROLES`""" + ctx = MockContext() + role_check.return_value.predicate = mock.AsyncMock() + + await self.cog.cog_check(ctx) + role_check.assert_called_once_with(*(1, 2, 3)) + role_check.return_value.predicate.assert_awaited_once_with(ctx) + + +@autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False) +class RescheduleTests(unittest.IsolatedAsyncioTestCase): + """Tests for the rescheduling of cached unsilences.""" + + @autospec(silence, "Scheduler", "SilenceNotifier", pass_mocks=False) + def setUp(self): + self.bot = MockBot() + self.cog = silence.Silence(self.bot) + self.cog._unsilence_wrapper = mock.create_autospec(self.cog._unsilence_wrapper) + + with mock.patch.object(self.cog, "_reschedule", autospec=True): + asyncio.run(self.cog._async_init()) # Populate instance attributes. + + async def test_skipped_missing_channel(self): + """Did nothing because the channel couldn't be retrieved.""" + self.cog.unsilence_timestamps.items.return_value = [(123, -1), (123, 1), (123, 10000000000)] + self.bot.get_channel.return_value = None + + await self.cog._reschedule() + + self.cog.notifier.add_channel.assert_not_called() + self.cog._unsilence_wrapper.assert_not_called() + self.cog.scheduler.schedule_later.assert_not_called() + + async def test_added_permanent_to_notifier(self): + """Permanently silenced channels were added to the notifier.""" + channels = [MockTextChannel(id=123), MockTextChannel(id=456)] + self.bot.get_channel.side_effect = channels + self.cog.unsilence_timestamps.items.return_value = [(123, -1), (456, -1)] + + await self.cog._reschedule() + + self.cog.notifier.add_channel.assert_any_call(channels[0]) + self.cog.notifier.add_channel.assert_any_call(channels[1]) + + self.cog._unsilence_wrapper.assert_not_called() + self.cog.scheduler.schedule_later.assert_not_called() + + async def test_unsilenced_expired(self): + """Unsilenced expired silences.""" + channels = [MockTextChannel(id=123), MockTextChannel(id=456)] + self.bot.get_channel.side_effect = channels + self.cog.unsilence_timestamps.items.return_value = [(123, 100), (456, 200)] + + await self.cog._reschedule() + + self.cog._unsilence_wrapper.assert_any_call(channels[0]) + self.cog._unsilence_wrapper.assert_any_call(channels[1]) + + self.cog.notifier.add_channel.assert_not_called() + self.cog.scheduler.schedule_later.assert_not_called() + + @mock.patch.object(silence, "datetime", new=PatchedDatetime) + async def test_rescheduled_active(self): + """Rescheduled active silences.""" + channels = [MockTextChannel(id=123), MockTextChannel(id=456)] + self.bot.get_channel.side_effect = channels + self.cog.unsilence_timestamps.items.return_value = [(123, 2000), (456, 3000)] + silence.datetime.now.return_value = datetime.fromtimestamp(1000, tz=timezone.utc) + + self.cog._unsilence_wrapper = mock.MagicMock() + unsilence_return = self.cog._unsilence_wrapper.return_value + + await self.cog._reschedule() + + # Yuck. + calls = [mock.call(1000, 123, unsilence_return), mock.call(2000, 456, unsilence_return)] + self.cog.scheduler.schedule_later.assert_has_calls(calls) + + unsilence_calls = [mock.call(channel) for channel in channels] + self.cog._unsilence_wrapper.assert_has_calls(unsilence_calls) + + self.cog.notifier.add_channel.assert_not_called() + + +@autospec(silence.Silence, "previous_overwrites", "unsilence_timestamps", pass_mocks=False) +class SilenceTests(unittest.IsolatedAsyncioTestCase): + """Tests for the silence command and its related helper methods.""" + + @autospec(silence.Silence, "_reschedule", pass_mocks=False) + @autospec(silence, "Scheduler", "SilenceNotifier", pass_mocks=False) + def setUp(self) -> None: + self.bot = MockBot() + self.cog = silence.Silence(self.bot) + self.cog._init_task = asyncio.Future() + self.cog._init_task.set_result(None) + + # Avoid unawaited coroutine warnings. + self.cog.scheduler.schedule_later.side_effect = lambda delay, task_id, coro: coro.close() + + asyncio.run(self.cog._async_init()) # Populate instance attributes. + + self.channel = MockTextChannel() + self.overwrite = PermissionOverwrite(stream=True, send_messages=True, add_reactions=False) + self.channel.overwrites_for.return_value = self.overwrite + + async def test_sent_correct_message(self): + """Appropriate failure/success message was sent by the command.""" + test_cases = ( + (0.0001, silence.MSG_SILENCE_SUCCESS.format(duration=0.0001), True,), + (None, silence.MSG_SILENCE_PERMANENT, True,), + (5, silence.MSG_SILENCE_FAIL, False,), + ) + for duration, message, was_silenced in test_cases: + ctx = MockContext() + with mock.patch.object(self.cog, "_set_silence_overwrites", return_value=was_silenced): + with self.subTest(was_silenced=was_silenced, message=message, duration=duration): + await self.cog.silence.callback(self.cog, ctx, duration) + ctx.send.assert_called_once_with(message) + + async def test_skipped_already_silenced(self): + """Permissions were not set and `False` was returned for an already silenced channel.""" + subtests = ( + (False, PermissionOverwrite(send_messages=False, add_reactions=False)), + (True, PermissionOverwrite(send_messages=True, add_reactions=True)), + (True, PermissionOverwrite(send_messages=False, add_reactions=False)), + ) + + for contains, overwrite in subtests: + with self.subTest(contains=contains, overwrite=overwrite): + self.cog.scheduler.__contains__.return_value = contains + channel = MockTextChannel() + channel.overwrites_for.return_value = overwrite + + self.assertFalse(await self.cog._set_silence_overwrites(channel)) + channel.set_permissions.assert_not_called() + + async def test_silenced_channel(self): + """Channel had `send_message` and `add_reactions` permissions revoked for verified role.""" + self.assertTrue(await self.cog._set_silence_overwrites(self.channel)) + self.assertFalse(self.overwrite.send_messages) + self.assertFalse(self.overwrite.add_reactions) + self.channel.set_permissions.assert_awaited_once_with( + self.cog._everyone_role, + overwrite=self.overwrite + ) + + async def test_preserved_other_overwrites(self): + """Channel's other unrelated overwrites were not changed.""" + prev_overwrite_dict = dict(self.overwrite) + await self.cog._set_silence_overwrites(self.channel) + new_overwrite_dict = dict(self.overwrite) + + # Remove 'send_messages' & 'add_reactions' keys because they were changed by the method. + del prev_overwrite_dict['send_messages'] + del prev_overwrite_dict['add_reactions'] + del new_overwrite_dict['send_messages'] + del new_overwrite_dict['add_reactions'] + + self.assertDictEqual(prev_overwrite_dict, new_overwrite_dict) + + async def test_temp_not_added_to_notifier(self): + """Channel was not added to notifier if a duration was set for the silence.""" + with mock.patch.object(self.cog, "_set_silence_overwrites", return_value=True): + await self.cog.silence.callback(self.cog, MockContext(), 15) + self.cog.notifier.add_channel.assert_not_called() + + async def test_indefinite_added_to_notifier(self): + """Channel was added to notifier if a duration was not set for the silence.""" + with mock.patch.object(self.cog, "_set_silence_overwrites", return_value=True): + await self.cog.silence.callback(self.cog, MockContext(), None) + self.cog.notifier.add_channel.assert_called_once() + + async def test_silenced_not_added_to_notifier(self): + """Channel was not added to the notifier if it was already silenced.""" + with mock.patch.object(self.cog, "_set_silence_overwrites", return_value=False): + await self.cog.silence.callback(self.cog, MockContext(), 15) + self.cog.notifier.add_channel.assert_not_called() + + async def test_cached_previous_overwrites(self): + """Channel's previous overwrites were cached.""" + overwrite_json = '{"send_messages": true, "add_reactions": false}' + await self.cog._set_silence_overwrites(self.channel) + self.cog.previous_overwrites.set.assert_called_once_with(self.channel.id, overwrite_json) + + @autospec(silence, "datetime") + async def test_cached_unsilence_time(self, datetime_mock): + """The UTC POSIX timestamp for the unsilence was cached.""" + now_timestamp = 100 + duration = 15 + timestamp = now_timestamp + duration * 60 + datetime_mock.now.return_value = datetime.fromtimestamp(now_timestamp, tz=timezone.utc) + + ctx = MockContext(channel=self.channel) + await self.cog.silence.callback(self.cog, ctx, duration) + + self.cog.unsilence_timestamps.set.assert_awaited_once_with(ctx.channel.id, timestamp) + datetime_mock.now.assert_called_once_with(tz=timezone.utc) # Ensure it's using an aware dt. + + async def test_cached_indefinite_time(self): + """A value of -1 was cached for a permanent silence.""" + ctx = MockContext(channel=self.channel) + await self.cog.silence.callback(self.cog, ctx, None) + self.cog.unsilence_timestamps.set.assert_awaited_once_with(ctx.channel.id, -1) + + async def test_scheduled_task(self): + """An unsilence task was scheduled.""" + ctx = MockContext(channel=self.channel, invoke=mock.MagicMock()) + + await self.cog.silence.callback(self.cog, ctx, 5) + + args = (300, ctx.channel.id, ctx.invoke.return_value) + self.cog.scheduler.schedule_later.assert_called_once_with(*args) + ctx.invoke.assert_called_once_with(self.cog.unsilence) + + async def test_permanent_not_scheduled(self): + """A task was not scheduled for a permanent silence.""" + ctx = MockContext(channel=self.channel) + await self.cog.silence.callback(self.cog, ctx, None) + self.cog.scheduler.schedule_later.assert_not_called() + + +@autospec(silence.Silence, "unsilence_timestamps", pass_mocks=False) +class UnsilenceTests(unittest.IsolatedAsyncioTestCase): + """Tests for the unsilence command and its related helper methods.""" + + @autospec(silence.Silence, "_reschedule", pass_mocks=False) + @autospec(silence, "Scheduler", "SilenceNotifier", pass_mocks=False) + def setUp(self) -> None: + self.bot = MockBot(get_channel=lambda _: MockTextChannel()) + self.cog = silence.Silence(self.bot) + self.cog._init_task = asyncio.Future() + self.cog._init_task.set_result(None) + + overwrites_cache = mock.create_autospec(self.cog.previous_overwrites, spec_set=True) + self.cog.previous_overwrites = overwrites_cache + + asyncio.run(self.cog._async_init()) # Populate instance attributes. + + self.cog.scheduler.__contains__.return_value = True + overwrites_cache.get.return_value = '{"send_messages": true, "add_reactions": false}' + self.channel = MockTextChannel() + self.overwrite = PermissionOverwrite(stream=True, send_messages=False, add_reactions=False) + self.channel.overwrites_for.return_value = self.overwrite + + async def test_sent_correct_message(self): + """Appropriate failure/success message was sent by the command.""" + unsilenced_overwrite = PermissionOverwrite(send_messages=True, add_reactions=True) + test_cases = ( + (True, silence.MSG_UNSILENCE_SUCCESS, unsilenced_overwrite), + (False, silence.MSG_UNSILENCE_FAIL, unsilenced_overwrite), + (False, silence.MSG_UNSILENCE_MANUAL, self.overwrite), + (False, silence.MSG_UNSILENCE_MANUAL, PermissionOverwrite(send_messages=False)), + (False, silence.MSG_UNSILENCE_MANUAL, PermissionOverwrite(add_reactions=False)), + ) + for was_unsilenced, message, overwrite in test_cases: + ctx = MockContext() + with self.subTest(was_unsilenced=was_unsilenced, message=message, overwrite=overwrite): + with mock.patch.object(self.cog, "_unsilence", return_value=was_unsilenced): + ctx.channel.overwrites_for.return_value = overwrite + await self.cog.unsilence.callback(self.cog, ctx) + ctx.channel.send.assert_called_once_with(message) + + async def test_skipped_already_unsilenced(self): + """Permissions were not set and `False` was returned for an already unsilenced channel.""" + self.cog.scheduler.__contains__.return_value = False + self.cog.previous_overwrites.get.return_value = None + channel = MockTextChannel() + + self.assertFalse(await self.cog._unsilence(channel)) + channel.set_permissions.assert_not_called() + + async def test_restored_overwrites(self): + """Channel's `send_message` and `add_reactions` overwrites were restored.""" + await self.cog._unsilence(self.channel) + self.channel.set_permissions.assert_awaited_once_with( + self.cog._everyone_role, + overwrite=self.overwrite, + ) + + # Recall that these values are determined by the fixture. + self.assertTrue(self.overwrite.send_messages) + self.assertFalse(self.overwrite.add_reactions) + + async def test_cache_miss_used_default_overwrites(self): + """Both overwrites were set to None due previous values not being found in the cache.""" + self.cog.previous_overwrites.get.return_value = None + + await self.cog._unsilence(self.channel) + self.channel.set_permissions.assert_awaited_once_with( + self.cog._everyone_role, + overwrite=self.overwrite, + ) + + self.assertIsNone(self.overwrite.send_messages) + self.assertIsNone(self.overwrite.add_reactions) + + async def test_cache_miss_sent_mod_alert(self): + """A message was sent to the mod alerts channel.""" + self.cog.previous_overwrites.get.return_value = None + + await self.cog._unsilence(self.channel) + self.cog._mod_alerts_channel.send.assert_awaited_once() + + async def test_removed_notifier(self): + """Channel was removed from `notifier`.""" + await self.cog._unsilence(self.channel) + self.cog.notifier.remove_channel.assert_called_once_with(self.channel) + + async def test_deleted_cached_overwrite(self): + """Channel was deleted from the overwrites cache.""" + await self.cog._unsilence(self.channel) + self.cog.previous_overwrites.delete.assert_awaited_once_with(self.channel.id) + + async def test_deleted_cached_time(self): + """Channel was deleted from the timestamp cache.""" + await self.cog._unsilence(self.channel) + self.cog.unsilence_timestamps.delete.assert_awaited_once_with(self.channel.id) + + async def test_cancelled_task(self): + """The scheduled unsilence task should be cancelled.""" + await self.cog._unsilence(self.channel) + self.cog.scheduler.cancel.assert_called_once_with(self.channel.id) + + async def test_preserved_other_overwrites(self): + """Channel's other unrelated overwrites were not changed, including cache misses.""" + for overwrite_json in ('{"send_messages": true, "add_reactions": null}', None): + with self.subTest(overwrite_json=overwrite_json): + self.cog.previous_overwrites.get.return_value = overwrite_json + + prev_overwrite_dict = dict(self.overwrite) + await self.cog._unsilence(self.channel) + new_overwrite_dict = dict(self.overwrite) + + # Remove these keys because they were modified by the unsilence. + del prev_overwrite_dict['send_messages'] + del prev_overwrite_dict['add_reactions'] + del new_overwrite_dict['send_messages'] + del new_overwrite_dict['add_reactions'] + + self.assertDictEqual(prev_overwrite_dict, new_overwrite_dict) diff --git a/tests/bot/exts/moderation/test_slowmode.py b/tests/bot/exts/moderation/test_slowmode.py new file mode 100644 index 000000000..dad751e0d --- /dev/null +++ b/tests/bot/exts/moderation/test_slowmode.py @@ -0,0 +1,113 @@ +import unittest +from unittest import mock + +from dateutil.relativedelta import relativedelta + +from bot.constants import Emojis +from bot.exts.moderation.slowmode import Slowmode +from tests.helpers import MockBot, MockContext, MockTextChannel + + +class SlowmodeTests(unittest.IsolatedAsyncioTestCase): + + def setUp(self) -> None: + self.bot = MockBot() + self.cog = Slowmode(self.bot) + self.ctx = MockContext() + + async def test_get_slowmode_no_channel(self) -> None: + """Get slowmode without a given channel.""" + self.ctx.channel = MockTextChannel(name='python-general', slowmode_delay=5) + + await self.cog.get_slowmode(self.cog, self.ctx, None) + self.ctx.send.assert_called_once_with("The slowmode delay for #python-general is 5 seconds.") + + async def test_get_slowmode_with_channel(self) -> None: + """Get slowmode with a given channel.""" + text_channel = MockTextChannel(name='python-language', slowmode_delay=2) + + await self.cog.get_slowmode(self.cog, self.ctx, text_channel) + self.ctx.send.assert_called_once_with('The slowmode delay for #python-language is 2 seconds.') + + async def test_set_slowmode_no_channel(self) -> None: + """Set slowmode without a given channel.""" + test_cases = ( + ('helpers', 23, True, f'{Emojis.check_mark} The slowmode delay for #helpers is now 23 seconds.'), + ('mods', 76526, False, f'{Emojis.cross_mark} The slowmode delay must be between 0 and 6 hours.'), + ('admins', 97, True, f'{Emojis.check_mark} The slowmode delay for #admins is now 1 minute and 37 seconds.') + ) + + for channel_name, seconds, edited, result_msg in test_cases: + with self.subTest( + channel_mention=channel_name, + seconds=seconds, + edited=edited, + result_msg=result_msg + ): + self.ctx.channel = MockTextChannel(name=channel_name) + + await self.cog.set_slowmode(self.cog, self.ctx, None, relativedelta(seconds=seconds)) + + if edited: + self.ctx.channel.edit.assert_awaited_once_with(slowmode_delay=float(seconds)) + else: + self.ctx.channel.edit.assert_not_called() + + self.ctx.send.assert_called_once_with(result_msg) + + self.ctx.reset_mock() + + async def test_set_slowmode_with_channel(self) -> None: + """Set slowmode with a given channel.""" + test_cases = ( + ('bot-commands', 12, True, f'{Emojis.check_mark} The slowmode delay for #bot-commands is now 12 seconds.'), + ('mod-spam', 21, True, f'{Emojis.check_mark} The slowmode delay for #mod-spam is now 21 seconds.'), + ('admin-spam', 4323598, False, f'{Emojis.cross_mark} The slowmode delay must be between 0 and 6 hours.') + ) + + for channel_name, seconds, edited, result_msg in test_cases: + with self.subTest( + channel_mention=channel_name, + seconds=seconds, + edited=edited, + result_msg=result_msg + ): + text_channel = MockTextChannel(name=channel_name) + + await self.cog.set_slowmode(self.cog, self.ctx, text_channel, relativedelta(seconds=seconds)) + + if edited: + text_channel.edit.assert_awaited_once_with(slowmode_delay=float(seconds)) + else: + text_channel.edit.assert_not_called() + + self.ctx.send.assert_called_once_with(result_msg) + + self.ctx.reset_mock() + + async def test_reset_slowmode_no_channel(self) -> None: + """Reset slowmode without a given channel.""" + self.ctx.channel = MockTextChannel(name='careers', slowmode_delay=6) + + await self.cog.reset_slowmode(self.cog, self.ctx, None) + self.ctx.send.assert_called_once_with( + f'{Emojis.check_mark} The slowmode delay for #careers has been reset to 0 seconds.' + ) + + async def test_reset_slowmode_with_channel(self) -> None: + """Reset slowmode with a given channel.""" + text_channel = MockTextChannel(name='meta', slowmode_delay=1) + + await self.cog.reset_slowmode(self.cog, self.ctx, text_channel) + self.ctx.send.assert_called_once_with( + f'{Emojis.check_mark} The slowmode delay for #meta has been reset to 0 seconds.' + ) + + @mock.patch("bot.exts.moderation.slowmode.has_any_role") + @mock.patch("bot.exts.moderation.slowmode.MODERATION_ROLES", new=(1, 2, 3)) + async def test_cog_check(self, role_check): + """Role check is called with `MODERATION_ROLES`""" + role_check.return_value.predicate = mock.AsyncMock() + await self.cog.cog_check(self.ctx) + role_check.assert_called_once_with(*(1, 2, 3)) + role_check.return_value.predicate.assert_awaited_once_with(self.ctx) diff --git a/tests/bot/exts/test_cogs.py b/tests/bot/exts/test_cogs.py new file mode 100644 index 000000000..f8e120262 --- /dev/null +++ b/tests/bot/exts/test_cogs.py @@ -0,0 +1,82 @@ +"""Test suite for general tests which apply to all cogs.""" + +import importlib +import pkgutil +import typing as t +import unittest +from collections import defaultdict +from types import ModuleType +from unittest import mock + +from discord.ext import commands + +from bot import exts + + +class CommandNameTests(unittest.TestCase): + """Tests for shadowing command names and aliases.""" + + @staticmethod + def walk_commands(cog: commands.Cog) -> t.Iterator[commands.Command]: + """An iterator that recursively walks through `cog`'s commands and subcommands.""" + # Can't use Bot.walk_commands() or Cog.get_commands() cause those are instance methods. + for command in cog.__cog_commands__: + if command.parent is None: + yield command + if isinstance(command, commands.GroupMixin): + # Annoyingly it returns duplicates for each alias so use a set to fix that + yield from set(command.walk_commands()) + + @staticmethod + def walk_modules() -> t.Iterator[ModuleType]: + """Yield imported modules from the bot.exts subpackage.""" + def on_error(name: str) -> t.NoReturn: + raise ImportError(name=name) # pragma: no cover + + # The mock prevents asyncio.get_event_loop() from being called. + with mock.patch("discord.ext.tasks.loop"): + prefix = f"{exts.__name__}." + for module in pkgutil.walk_packages(exts.__path__, prefix, onerror=on_error): + if not module.ispkg: + yield importlib.import_module(module.name) + + @staticmethod + def walk_cogs(module: ModuleType) -> t.Iterator[commands.Cog]: + """Yield all cogs defined in an extension.""" + for obj in module.__dict__.values(): + # Check if it's a class type cause otherwise issubclass() may raise a TypeError. + is_cog = isinstance(obj, type) and issubclass(obj, commands.Cog) + if is_cog and obj.__module__ == module.__name__: + yield obj + + @staticmethod + def get_qualified_names(command: commands.Command) -> t.List[str]: + """Return a list of all qualified names, including aliases, for the `command`.""" + names = [f"{command.full_parent_name} {alias}".strip() for alias in command.aliases] + names.append(command.qualified_name) + names += getattr(command, "root_aliases", []) + + return names + + def get_all_commands(self) -> t.Iterator[commands.Command]: + """Yield all commands for all cogs in all extensions.""" + for module in self.walk_modules(): + for cog in self.walk_cogs(module): + for cmd in self.walk_commands(cog): + yield cmd + + def test_names_dont_shadow(self): + """Names and aliases of commands should be unique.""" + all_names = defaultdict(list) + for cmd in self.get_all_commands(): + func_name = f"{cmd.module}.{cmd.callback.__qualname__}" + + for name in self.get_qualified_names(cmd): + with self.subTest(cmd=func_name, name=name): + if name in all_names: # pragma: no cover + conflicts = ", ".join(all_names.get(name, "")) + self.fail( + f"Name '{name}' of the command {func_name} conflicts with {conflicts}." + ) + + all_names[name].append(func_name) diff --git a/tests/bot/exts/utils/__init__.py b/tests/bot/exts/utils/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/utils/__init__.py diff --git a/tests/bot/exts/utils/test_jams.py b/tests/bot/exts/utils/test_jams.py new file mode 100644 index 000000000..45e7b5b51 --- /dev/null +++ b/tests/bot/exts/utils/test_jams.py @@ -0,0 +1,173 @@ +import unittest +from unittest.mock import AsyncMock, MagicMock, create_autospec + +from discord import CategoryChannel + +from bot.constants import Roles +from bot.exts.utils import jams +from tests.helpers import MockBot, MockContext, MockGuild, MockMember, MockRole, MockTextChannel + + +def get_mock_category(channel_count: int, name: str) -> CategoryChannel: + """Return a mocked code jam category.""" + category = create_autospec(CategoryChannel, spec_set=True, instance=True) + category.name = name + category.channels = [MockTextChannel() for _ in range(channel_count)] + + return category + + +class JamCreateTeamTests(unittest.IsolatedAsyncioTestCase): + """Tests for `createteam` command.""" + + def setUp(self): + self.bot = MockBot() + self.admin_role = MockRole(name="Admins", id=Roles.admins) + self.command_user = MockMember([self.admin_role]) + self.guild = MockGuild([self.admin_role]) + self.ctx = MockContext(bot=self.bot, author=self.command_user, guild=self.guild) + self.cog = jams.CodeJams(self.bot) + + async def test_too_small_amount_of_team_members_passed(self): + """Should `ctx.send` and exit early when too small amount of members.""" + for case in (1, 2): + with self.subTest(amount_of_members=case): + self.cog.create_channels = AsyncMock() + self.cog.add_roles = AsyncMock() + + self.ctx.reset_mock() + members = (MockMember() for _ in range(case)) + await self.cog.createteam(self.cog, self.ctx, "foo", members) + + self.ctx.send.assert_awaited_once() + self.cog.create_channels.assert_not_awaited() + self.cog.add_roles.assert_not_awaited() + + async def test_duplicate_members_provided(self): + """Should `ctx.send` and exit early because duplicate members provided and total there is only 1 member.""" + self.cog.create_channels = AsyncMock() + self.cog.add_roles = AsyncMock() + + member = MockMember() + await self.cog.createteam(self.cog, self.ctx, "foo", (member for _ in range(5))) + + self.ctx.send.assert_awaited_once() + self.cog.create_channels.assert_not_awaited() + self.cog.add_roles.assert_not_awaited() + + async def test_result_sending(self): + """Should call `ctx.send` when everything goes right.""" + self.cog.create_channels = AsyncMock() + self.cog.add_roles = AsyncMock() + + members = [MockMember() for _ in range(5)] + await self.cog.createteam(self.cog, self.ctx, "foo", members) + + self.cog.create_channels.assert_awaited_once() + self.cog.add_roles.assert_awaited_once() + self.ctx.send.assert_awaited_once() + + async def test_category_doesnt_exist(self): + """Should create a new code jam category.""" + subtests = ( + [], + [get_mock_category(jams.MAX_CHANNELS - 1, jams.CATEGORY_NAME)], + [get_mock_category(jams.MAX_CHANNELS - 2, "other")], + ) + + for categories in subtests: + self.guild.reset_mock() + self.guild.categories = categories + + with self.subTest(categories=categories): + actual_category = await self.cog.get_category(self.guild) + + self.guild.create_category_channel.assert_awaited_once() + category_overwrites = self.guild.create_category_channel.call_args[1]["overwrites"] + + self.assertFalse(category_overwrites[self.guild.default_role].read_messages) + self.assertTrue(category_overwrites[self.guild.me].read_messages) + self.assertEqual(self.guild.create_category_channel.return_value, actual_category) + + async def test_category_channel_exist(self): + """Should not try to create category channel.""" + expected_category = get_mock_category(jams.MAX_CHANNELS - 2, jams.CATEGORY_NAME) + self.guild.categories = [ + get_mock_category(jams.MAX_CHANNELS - 2, "other"), + expected_category, + get_mock_category(0, jams.CATEGORY_NAME), + ] + + actual_category = await self.cog.get_category(self.guild) + self.assertEqual(expected_category, actual_category) + + async def test_channel_overwrites(self): + """Should have correct permission overwrites for users and roles.""" + leader = MockMember() + members = [leader] + [MockMember() for _ in range(4)] + overwrites = self.cog.get_overwrites(members, self.guild) + + # Leader permission overwrites + self.assertTrue(overwrites[leader].manage_messages) + self.assertTrue(overwrites[leader].read_messages) + self.assertTrue(overwrites[leader].manage_webhooks) + self.assertTrue(overwrites[leader].connect) + + # Other members permission overwrites + for member in members[1:]: + self.assertTrue(overwrites[member].read_messages) + self.assertTrue(overwrites[member].connect) + + # Everyone and verified role overwrite + self.assertFalse(overwrites[self.guild.default_role].read_messages) + self.assertFalse(overwrites[self.guild.default_role].connect) + self.assertFalse(overwrites[self.guild.get_role(Roles.verified)].read_messages) + self.assertFalse(overwrites[self.guild.get_role(Roles.verified)].connect) + + async def test_team_channels_creation(self): + """Should create new voice and text channel for team.""" + members = [MockMember() for _ in range(5)] + + self.cog.get_overwrites = MagicMock() + self.cog.get_category = AsyncMock() + self.ctx.guild.create_text_channel.return_value = MockTextChannel(mention="foobar-channel") + actual = await self.cog.create_channels(self.guild, "my-team", members) + + self.assertEqual("foobar-channel", actual) + self.cog.get_overwrites.assert_called_once_with(members, self.guild) + self.cog.get_category.assert_awaited_once_with(self.guild) + + self.guild.create_text_channel.assert_awaited_once_with( + "my-team", + overwrites=self.cog.get_overwrites.return_value, + category=self.cog.get_category.return_value + ) + self.guild.create_voice_channel.assert_awaited_once_with( + "My Team", + overwrites=self.cog.get_overwrites.return_value, + category=self.cog.get_category.return_value + ) + + async def test_jam_roles_adding(self): + """Should add team leader role to leader and jam role to every team member.""" + leader_role = MockRole(name="Team Leader") + jam_role = MockRole(name="Jammer") + self.guild.get_role.side_effect = [leader_role, jam_role] + + leader = MockMember() + members = [leader] + [MockMember() for _ in range(4)] + await self.cog.add_roles(self.guild, members) + + leader.add_roles.assert_any_await(leader_role) + for member in members: + member.add_roles.assert_any_await(jam_role) + + +class CodeJamSetup(unittest.TestCase): + """Test for `setup` function of `CodeJam` cog.""" + + def test_setup(self): + """Should call `bot.add_cog`.""" + bot = MockBot() + jams.setup(bot) + bot.add_cog.assert_called_once() diff --git a/tests/bot/exts/utils/test_snekbox.py b/tests/bot/exts/utils/test_snekbox.py new file mode 100644 index 000000000..321a92445 --- /dev/null +++ b/tests/bot/exts/utils/test_snekbox.py @@ -0,0 +1,384 @@ +import asyncio +import unittest +from unittest.mock import AsyncMock, MagicMock, Mock, call, create_autospec, patch + +from discord.ext import commands + +from bot import constants +from bot.exts.utils import snekbox +from bot.exts.utils.snekbox import Snekbox +from tests.helpers import MockBot, MockContext, MockMessage, MockReaction, MockUser + + +class SnekboxTests(unittest.IsolatedAsyncioTestCase): + def setUp(self): + """Add mocked bot and cog to the instance.""" + self.bot = MockBot() + self.cog = Snekbox(bot=self.bot) + + async def test_post_eval(self): + """Post the eval code to the URLs.snekbox_eval_api endpoint.""" + resp = MagicMock() + resp.json = AsyncMock(return_value="return") + + context_manager = MagicMock() + context_manager.__aenter__.return_value = resp + self.bot.http_session.post.return_value = context_manager + + self.assertEqual(await self.cog.post_eval("import random"), "return") + self.bot.http_session.post.assert_called_with( + constants.URLs.snekbox_eval_api, + json={"input": "import random"}, + raise_for_status=True + ) + resp.json.assert_awaited_once() + + async def test_upload_output_reject_too_long(self): + """Reject output longer than MAX_PASTE_LEN.""" + result = await self.cog.upload_output("-" * (snekbox.MAX_PASTE_LEN + 1)) + self.assertEqual(result, "too long to upload") + + @patch("bot.exts.utils.snekbox.send_to_paste_service") + async def test_upload_output(self, mock_paste_util): + """Upload the eval output to the URLs.paste_service.format(key="documents") endpoint.""" + await self.cog.upload_output("Test output.") + mock_paste_util.assert_called_once_with("Test output.", extension="txt") + + def test_prepare_input(self): + cases = ( + ('print("Hello world!")', 'print("Hello world!")', 'non-formatted'), + ('`print("Hello world!")`', 'print("Hello world!")', 'one line code block'), + ('```\nprint("Hello world!")```', 'print("Hello world!")', 'multiline code block'), + ('```py\nprint("Hello world!")```', 'print("Hello world!")', 'multiline python code block'), + ('text```print("Hello world!")```text', 'print("Hello world!")', 'code block surrounded by text'), + ('```print("Hello world!")```\ntext\n```py\nprint("Hello world!")```', + 'print("Hello world!")\nprint("Hello world!")', 'two code blocks with text in-between'), + ('`print("Hello world!")`\ntext\n```print("How\'s it going?")```', + 'print("How\'s it going?")', 'code block preceded by inline code'), + ('`print("Hello world!")`\ntext\n`print("Hello world!")`', + 'print("Hello world!")', 'one inline code block of two') + ) + for case, expected, testname in cases: + with self.subTest(msg=f'Extract code from {testname}.'): + self.assertEqual(self.cog.prepare_input(case), expected) + + def test_get_results_message(self): + """Return error and message according to the eval result.""" + cases = ( + ('ERROR', None, ('Your eval job has failed', 'ERROR')), + ('', 128 + snekbox.SIGKILL, ('Your eval job timed out or ran out of memory', '')), + ('', 255, ('Your eval job has failed', 'A fatal NsJail error occurred')) + ) + for stdout, returncode, expected in cases: + with self.subTest(stdout=stdout, returncode=returncode, expected=expected): + actual = self.cog.get_results_message({'stdout': stdout, 'returncode': returncode}) + self.assertEqual(actual, expected) + + @patch('bot.exts.utils.snekbox.Signals', side_effect=ValueError) + def test_get_results_message_invalid_signal(self, mock_signals: Mock): + self.assertEqual( + self.cog.get_results_message({'stdout': '', 'returncode': 127}), + ('Your eval job has completed with return code 127', '') + ) + + @patch('bot.exts.utils.snekbox.Signals') + def test_get_results_message_valid_signal(self, mock_signals: Mock): + mock_signals.return_value.name = 'SIGTEST' + self.assertEqual( + self.cog.get_results_message({'stdout': '', 'returncode': 127}), + ('Your eval job has completed with return code 127 (SIGTEST)', '') + ) + + def test_get_status_emoji(self): + """Return emoji according to the eval result.""" + cases = ( + (' ', -1, ':warning:'), + ('Hello world!', 0, ':white_check_mark:'), + ('Invalid beard size', -1, ':x:') + ) + for stdout, returncode, expected in cases: + with self.subTest(stdout=stdout, returncode=returncode, expected=expected): + actual = self.cog.get_status_emoji({'stdout': stdout, 'returncode': returncode}) + self.assertEqual(actual, expected) + + async def test_format_output(self): + """Test output formatting.""" + self.cog.upload_output = AsyncMock(return_value='https://testificate.com/') + + too_many_lines = ( + '001 | v\n002 | e\n003 | r\n004 | y\n005 | l\n006 | o\n' + '007 | n\n008 | g\n009 | b\n010 | e\n011 | a\n... (truncated - too many lines)' + ) + too_long_too_many_lines = ( + "\n".join( + f"{i:03d} | {line}" for i, line in enumerate(['verylongbeard' * 10] * 15, 1) + )[:1000] + "\n... (truncated - too long, too many lines)" + ) + + cases = ( + ('', ('[No output]', None), 'No output'), + ('My awesome output', ('My awesome output', None), 'One line output'), + ('<@', ("<@\u200B", None), r'Convert <@ to <@\u200B'), + ('<!@', ("<!@\u200B", None), r'Convert <!@ to <!@\u200B'), + ( + '\u202E\u202E\u202E', + ('Code block escape attempt detected; will not output result', 'https://testificate.com/'), + 'Detect RIGHT-TO-LEFT OVERRIDE' + ), + ( + '\u200B\u200B\u200B', + ('Code block escape attempt detected; will not output result', 'https://testificate.com/'), + 'Detect ZERO WIDTH SPACE' + ), + ('long\nbeard', ('001 | long\n002 | beard', None), 'Two line output'), + ( + 'v\ne\nr\ny\nl\no\nn\ng\nb\ne\na\nr\nd', + (too_many_lines, 'https://testificate.com/'), + '12 lines output' + ), + ( + 'verylongbeard' * 100, + ('verylongbeard' * 76 + 'verylongbear\n... (truncated - too long)', 'https://testificate.com/'), + '1300 characters output' + ), + ( + ('verylongbeard' * 10 + '\n') * 15, + (too_long_too_many_lines, 'https://testificate.com/'), + '15 lines, 1965 characters output' + ), + ) + for case, expected, testname in cases: + with self.subTest(msg=testname, case=case, expected=expected): + self.assertEqual(await self.cog.format_output(case), expected) + + async def test_eval_command_evaluate_once(self): + """Test the eval command procedure.""" + ctx = MockContext() + response = MockMessage() + self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode') + self.cog.send_eval = AsyncMock(return_value=response) + self.cog.continue_eval = AsyncMock(return_value=None) + + await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode') + self.cog.prepare_input.assert_called_once_with('MyAwesomeCode') + self.cog.send_eval.assert_called_once_with(ctx, 'MyAwesomeFormattedCode') + self.cog.continue_eval.assert_called_once_with(ctx, response) + + async def test_eval_command_evaluate_twice(self): + """Test the eval and re-eval command procedure.""" + ctx = MockContext() + response = MockMessage() + self.cog.prepare_input = MagicMock(return_value='MyAwesomeFormattedCode') + self.cog.send_eval = AsyncMock(return_value=response) + self.cog.continue_eval = AsyncMock() + self.cog.continue_eval.side_effect = ('MyAwesomeCode-2', None) + + await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode') + self.cog.prepare_input.has_calls(call('MyAwesomeCode'), call('MyAwesomeCode-2')) + self.cog.send_eval.assert_called_with(ctx, 'MyAwesomeFormattedCode') + self.cog.continue_eval.assert_called_with(ctx, response) + + async def test_eval_command_reject_two_eval_at_the_same_time(self): + """Test if the eval command rejects an eval if the author already have a running eval.""" + ctx = MockContext() + ctx.author.id = 42 + ctx.author.mention = '@LemonLemonishBeard#0042' + ctx.send = AsyncMock() + self.cog.jobs = (42,) + await self.cog.eval_command(self.cog, ctx=ctx, code='MyAwesomeCode') + ctx.send.assert_called_once_with( + "@LemonLemonishBeard#0042 You've already got a job running - please wait for it to finish!" + ) + + async def test_eval_command_call_help(self): + """Test if the eval command call the help command if no code is provided.""" + ctx = MockContext(command="sentinel") + await self.cog.eval_command(self.cog, ctx=ctx, code='') + ctx.send_help.assert_called_once_with(ctx.command) + + async def test_send_eval(self): + """Test the send_eval function.""" + ctx = MockContext() + ctx.message = MockMessage() + ctx.send = AsyncMock() + ctx.author.mention = '@LemonLemonishBeard#0042' + + self.cog.post_eval = AsyncMock(return_value={'stdout': '', 'returncode': 0}) + self.cog.get_results_message = MagicMock(return_value=('Return code 0', '')) + self.cog.get_status_emoji = MagicMock(return_value=':yay!:') + self.cog.format_output = AsyncMock(return_value=('[No output]', None)) + + mocked_filter_cog = MagicMock() + mocked_filter_cog.filter_eval = AsyncMock(return_value=False) + self.bot.get_cog.return_value = mocked_filter_cog + + await self.cog.send_eval(ctx, 'MyAwesomeCode') + ctx.send.assert_called_once_with( + '@LemonLemonishBeard#0042 :yay!: Return code 0.\n\n```\n[No output]\n```' + ) + self.cog.post_eval.assert_called_once_with('MyAwesomeCode') + self.cog.get_status_emoji.assert_called_once_with({'stdout': '', 'returncode': 0}) + self.cog.get_results_message.assert_called_once_with({'stdout': '', 'returncode': 0}) + self.cog.format_output.assert_called_once_with('') + + async def test_send_eval_with_paste_link(self): + """Test the send_eval function with a too long output that generate a paste link.""" + ctx = MockContext() + ctx.message = MockMessage() + ctx.send = AsyncMock() + ctx.author.mention = '@LemonLemonishBeard#0042' + + self.cog.post_eval = AsyncMock(return_value={'stdout': 'Way too long beard', 'returncode': 0}) + self.cog.get_results_message = MagicMock(return_value=('Return code 0', '')) + self.cog.get_status_emoji = MagicMock(return_value=':yay!:') + self.cog.format_output = AsyncMock(return_value=('Way too long beard', 'lookatmybeard.com')) + + mocked_filter_cog = MagicMock() + mocked_filter_cog.filter_eval = AsyncMock(return_value=False) + self.bot.get_cog.return_value = mocked_filter_cog + + await self.cog.send_eval(ctx, 'MyAwesomeCode') + ctx.send.assert_called_once_with( + '@LemonLemonishBeard#0042 :yay!: Return code 0.' + '\n\n```\nWay too long beard\n```\nFull output: lookatmybeard.com' + ) + self.cog.post_eval.assert_called_once_with('MyAwesomeCode') + self.cog.get_status_emoji.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) + self.cog.get_results_message.assert_called_once_with({'stdout': 'Way too long beard', 'returncode': 0}) + self.cog.format_output.assert_called_once_with('Way too long beard') + + async def test_send_eval_with_non_zero_eval(self): + """Test the send_eval function with a code returning a non-zero code.""" + ctx = MockContext() + ctx.message = MockMessage() + ctx.send = AsyncMock() + ctx.author.mention = '@LemonLemonishBeard#0042' + self.cog.post_eval = AsyncMock(return_value={'stdout': 'ERROR', 'returncode': 127}) + self.cog.get_results_message = MagicMock(return_value=('Return code 127', 'Beard got stuck in the eval')) + self.cog.get_status_emoji = MagicMock(return_value=':nope!:') + self.cog.format_output = AsyncMock() # This function isn't called + + mocked_filter_cog = MagicMock() + mocked_filter_cog.filter_eval = AsyncMock(return_value=False) + self.bot.get_cog.return_value = mocked_filter_cog + + await self.cog.send_eval(ctx, 'MyAwesomeCode') + ctx.send.assert_called_once_with( + '@LemonLemonishBeard#0042 :nope!: Return code 127.\n\n```\nBeard got stuck in the eval\n```' + ) + self.cog.post_eval.assert_called_once_with('MyAwesomeCode') + self.cog.get_status_emoji.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) + self.cog.get_results_message.assert_called_once_with({'stdout': 'ERROR', 'returncode': 127}) + self.cog.format_output.assert_not_called() + + @patch("bot.exts.utils.snekbox.partial") + async def test_continue_eval_does_continue(self, partial_mock): + """Test that the continue_eval function does continue if required conditions are met.""" + ctx = MockContext(message=MockMessage(add_reaction=AsyncMock(), clear_reactions=AsyncMock())) + response = MockMessage(delete=AsyncMock()) + new_msg = MockMessage() + self.bot.wait_for.side_effect = ((None, new_msg), None) + expected = "NewCode" + self.cog.get_code = create_autospec(self.cog.get_code, spec_set=True, return_value=expected) + + actual = await self.cog.continue_eval(ctx, response) + self.cog.get_code.assert_awaited_once_with(new_msg) + self.assertEqual(actual, expected) + self.bot.wait_for.assert_has_awaits( + ( + call( + 'message_edit', + check=partial_mock(snekbox.predicate_eval_message_edit, ctx), + timeout=snekbox.REEVAL_TIMEOUT, + ), + call('reaction_add', check=partial_mock(snekbox.predicate_eval_emoji_reaction, ctx), timeout=10) + ) + ) + ctx.message.add_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI) + ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI) + response.delete.assert_called_once() + + async def test_continue_eval_does_not_continue(self): + ctx = MockContext(message=MockMessage(clear_reactions=AsyncMock())) + self.bot.wait_for.side_effect = asyncio.TimeoutError + + actual = await self.cog.continue_eval(ctx, MockMessage()) + self.assertEqual(actual, None) + ctx.message.clear_reaction.assert_called_once_with(snekbox.REEVAL_EMOJI) + + async def test_get_code(self): + """Should return 1st arg (or None) if eval cmd in message, otherwise return full content.""" + prefix = constants.Bot.prefix + subtests = ( + (self.cog.eval_command, f"{prefix}{self.cog.eval_command.name} print(1)", "print(1)"), + (self.cog.eval_command, f"{prefix}{self.cog.eval_command.name}", None), + (MagicMock(spec=commands.Command), f"{prefix}tags get foo"), + (None, "print(123)") + ) + + for command, content, *expected_code in subtests: + if not expected_code: + expected_code = content + else: + [expected_code] = expected_code + + with self.subTest(content=content, expected_code=expected_code): + self.bot.get_context.reset_mock() + self.bot.get_context.return_value = MockContext(command=command) + message = MockMessage(content=content) + + actual_code = await self.cog.get_code(message) + + self.bot.get_context.assert_awaited_once_with(message) + self.assertEqual(actual_code, expected_code) + + def test_predicate_eval_message_edit(self): + """Test the predicate_eval_message_edit function.""" + msg0 = MockMessage(id=1, content='abc') + msg1 = MockMessage(id=2, content='abcdef') + msg2 = MockMessage(id=1, content='abcdef') + + cases = ( + (msg0, msg0, False, 'same ID, same content'), + (msg0, msg1, False, 'different ID, different content'), + (msg0, msg2, True, 'same ID, different content') + ) + for ctx_msg, new_msg, expected, testname in cases: + with self.subTest(msg=f'Messages with {testname} return {expected}'): + ctx = MockContext(message=ctx_msg) + actual = snekbox.predicate_eval_message_edit(ctx, ctx_msg, new_msg) + self.assertEqual(actual, expected) + + def test_predicate_eval_emoji_reaction(self): + """Test the predicate_eval_emoji_reaction function.""" + valid_reaction = MockReaction(message=MockMessage(id=1)) + valid_reaction.__str__.return_value = snekbox.REEVAL_EMOJI + valid_ctx = MockContext(message=MockMessage(id=1), author=MockUser(id=2)) + valid_user = MockUser(id=2) + + invalid_reaction_id = MockReaction(message=MockMessage(id=42)) + invalid_reaction_id.__str__.return_value = snekbox.REEVAL_EMOJI + invalid_user_id = MockUser(id=42) + invalid_reaction_str = MockReaction(message=MockMessage(id=1)) + invalid_reaction_str.__str__.return_value = ':longbeard:' + + cases = ( + (invalid_reaction_id, valid_user, False, 'invalid reaction ID'), + (valid_reaction, invalid_user_id, False, 'invalid user ID'), + (invalid_reaction_str, valid_user, False, 'invalid reaction __str__'), + (valid_reaction, valid_user, True, 'matching attributes') + ) + for reaction, user, expected, testname in cases: + with self.subTest(msg=f'Test with {testname} and expected return {expected}'): + actual = snekbox.predicate_eval_emoji_reaction(valid_ctx, reaction, user) + self.assertEqual(actual, expected) + + +class SnekboxSetupTests(unittest.TestCase): + """Tests setup of the `Snekbox` cog.""" + + def test_setup(self): + """Setup of the extension should call add_cog.""" + bot = MockBot() + snekbox.setup(bot) + bot.add_cog.assert_called_once() diff --git a/tests/bot/rules/__init__.py b/tests/bot/rules/__init__.py index 36c986fe1..0d570f5a3 100644 --- a/tests/bot/rules/__init__.py +++ b/tests/bot/rules/__init__.py @@ -12,7 +12,7 @@ class DisallowedCase(NamedTuple): n_violations: int -class RuleTest(unittest.TestCase, metaclass=ABCMeta): +class RuleTest(unittest.IsolatedAsyncioTestCase, metaclass=ABCMeta): """ Abstract class for antispam rule test cases. @@ -68,9 +68,9 @@ class RuleTest(unittest.TestCase, metaclass=ABCMeta): @abstractmethod def relevant_messages(self, case: DisallowedCase) -> Iterable[MockMessage]: """Give expected relevant messages for `case`.""" - raise NotImplementedError + raise NotImplementedError # pragma: no cover @abstractmethod def get_report(self, case: DisallowedCase) -> str: """Give expected error report for `case`.""" - raise NotImplementedError + raise NotImplementedError # pragma: no cover diff --git a/tests/bot/rules/test_attachments.py b/tests/bot/rules/test_attachments.py index e54b4b5b8..d7e779221 100644 --- a/tests/bot/rules/test_attachments.py +++ b/tests/bot/rules/test_attachments.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import attachments from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, total_attachments: int) -> MockMessage: @@ -17,7 +17,6 @@ class AttachmentRuleTests(RuleTest): self.apply = attachments.apply self.config = {"max": 5, "interval": 10} - @async_test async def test_allows_messages_without_too_many_attachments(self): """Messages without too many attachments are allowed as-is.""" cases = ( @@ -28,7 +27,6 @@ class AttachmentRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_with_too_many_attachments(self): """Messages with too many attachments trigger the rule.""" cases = ( diff --git a/tests/bot/rules/test_burst.py b/tests/bot/rules/test_burst.py index 72f0be0c7..03682966b 100644 --- a/tests/bot/rules/test_burst.py +++ b/tests/bot/rules/test_burst.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import burst from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str) -> MockMessage: @@ -21,7 +21,6 @@ class BurstRuleTests(RuleTest): self.apply = burst.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """Cases which do not violate the rule.""" cases = ( @@ -31,7 +30,6 @@ class BurstRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases where the amount of messages exceeds the limit, triggering the rule.""" cases = ( diff --git a/tests/bot/rules/test_burst_shared.py b/tests/bot/rules/test_burst_shared.py index 47367a5f8..3275143d5 100644 --- a/tests/bot/rules/test_burst_shared.py +++ b/tests/bot/rules/test_burst_shared.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import burst_shared from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str) -> MockMessage: @@ -21,7 +21,6 @@ class BurstSharedRuleTests(RuleTest): self.apply = burst_shared.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """ Cases that do not violate the rule. @@ -34,7 +33,6 @@ class BurstSharedRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases where the amount of messages exceeds the limit, triggering the rule.""" cases = ( diff --git a/tests/bot/rules/test_chars.py b/tests/bot/rules/test_chars.py index 7cc36f49e..f1e3c76a7 100644 --- a/tests/bot/rules/test_chars.py +++ b/tests/bot/rules/test_chars.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import chars from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, n_chars: int) -> MockMessage: @@ -20,7 +20,6 @@ class CharsRuleTests(RuleTest): "interval": 10, } - @async_test async def test_allows_messages_within_limit(self): """Cases with a total amount of chars within limit.""" cases = ( @@ -31,7 +30,6 @@ class CharsRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases where the total amount of chars exceeds the limit, triggering the rule.""" cases = ( diff --git a/tests/bot/rules/test_discord_emojis.py b/tests/bot/rules/test_discord_emojis.py index 0239b0b00..66c2d9f92 100644 --- a/tests/bot/rules/test_discord_emojis.py +++ b/tests/bot/rules/test_discord_emojis.py @@ -2,14 +2,15 @@ from typing import Iterable from bot.rules import discord_emojis from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage discord_emoji = "<:abcd:1234>" # Discord emojis follow the format <:name:id> +unicode_emoji = "🧪" -def make_msg(author: str, n_emojis: int) -> MockMessage: +def make_msg(author: str, n_emojis: int, emoji: str = discord_emoji) -> MockMessage: """Build a MockMessage instance with content containing `n_emojis` arbitrary emojis.""" - return MockMessage(author=author, content=discord_emoji * n_emojis) + return MockMessage(author=author, content=emoji * n_emojis) class DiscordEmojisRuleTests(RuleTest): @@ -19,19 +20,23 @@ class DiscordEmojisRuleTests(RuleTest): self.apply = discord_emojis.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): - """Cases with a total amount of discord emojis within limit.""" + """Cases with a total amount of discord and unicode emojis within limit.""" cases = ( [make_msg("bob", 2)], [make_msg("alice", 1), make_msg("bob", 2), make_msg("alice", 1)], + [make_msg("bob", 2, unicode_emoji)], + [ + make_msg("alice", 1, unicode_emoji), + make_msg("bob", 2, unicode_emoji), + make_msg("alice", 1, unicode_emoji) + ], ) await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): - """Cases with more than the allowed amount of discord emojis.""" + """Cases with more than the allowed amount of discord and unicode emojis.""" cases = ( DisallowedCase( [make_msg("bob", 3)], @@ -43,6 +48,20 @@ class DiscordEmojisRuleTests(RuleTest): ("alice",), 4, ), + DisallowedCase( + [make_msg("bob", 3, unicode_emoji)], + ("bob",), + 3, + ), + DisallowedCase( + [ + make_msg("alice", 2, unicode_emoji), + make_msg("bob", 2, unicode_emoji), + make_msg("alice", 2, unicode_emoji) + ], + ("alice",), + 4 + ) ) await self.run_disallowed(cases) diff --git a/tests/bot/rules/test_duplicates.py b/tests/bot/rules/test_duplicates.py index 59e0fb6ef..9bd886a77 100644 --- a/tests/bot/rules/test_duplicates.py +++ b/tests/bot/rules/test_duplicates.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import duplicates from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, content: str) -> MockMessage: @@ -17,7 +17,6 @@ class DuplicatesRuleTests(RuleTest): self.apply = duplicates.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """Cases which do not violate the rule.""" cases = ( @@ -28,7 +27,6 @@ class DuplicatesRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases with too many duplicate messages from the same author.""" cases = ( diff --git a/tests/bot/rules/test_links.py b/tests/bot/rules/test_links.py index 3c3f90e5f..b091bd9d7 100644 --- a/tests/bot/rules/test_links.py +++ b/tests/bot/rules/test_links.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import links from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, total_links: int) -> MockMessage: @@ -21,7 +21,6 @@ class LinksTests(RuleTest): "interval": 10 } - @async_test async def test_links_within_limit(self): """Messages with an allowed amount of links.""" cases = ( @@ -34,7 +33,6 @@ class LinksTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_links_exceeding_limit(self): """Messages with a a higher than allowed amount of links.""" cases = ( diff --git a/tests/bot/rules/test_mentions.py b/tests/bot/rules/test_mentions.py index ebcdabac6..6444532f2 100644 --- a/tests/bot/rules/test_mentions.py +++ b/tests/bot/rules/test_mentions.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import mentions from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, total_mentions: int) -> MockMessage: @@ -20,7 +20,6 @@ class TestMentions(RuleTest): "interval": 10, } - @async_test async def test_mentions_within_limit(self): """Messages with an allowed amount of mentions.""" cases = ( @@ -32,7 +31,6 @@ class TestMentions(RuleTest): await self.run_allowed(cases) - @async_test async def test_mentions_exceeding_limit(self): """Messages with a higher than allowed amount of mentions.""" cases = ( diff --git a/tests/bot/rules/test_newlines.py b/tests/bot/rules/test_newlines.py index d61c4609d..e35377773 100644 --- a/tests/bot/rules/test_newlines.py +++ b/tests/bot/rules/test_newlines.py @@ -2,7 +2,7 @@ from typing import Iterable, List from bot.rules import newlines from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, newline_groups: List[int]) -> MockMessage: @@ -29,7 +29,6 @@ class TotalNewlinesRuleTests(RuleTest): "interval": 10, } - @async_test async def test_allows_messages_within_limit(self): """Cases which do not violate the rule.""" cases = ( @@ -41,7 +40,6 @@ class TotalNewlinesRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_total(self): """Cases which violate the rule by having too many newlines in total.""" cases = ( @@ -79,7 +77,6 @@ class GroupNewlinesRuleTests(RuleTest): self.apply = newlines.apply self.config = {"max": 5, "max_consecutive": 3, "interval": 10} - @async_test async def test_disallows_messages_consecutive(self): """Cases which violate the rule due to having too many consecutive newlines.""" cases = ( diff --git a/tests/bot/rules/test_role_mentions.py b/tests/bot/rules/test_role_mentions.py index b339cccf7..26c05d527 100644 --- a/tests/bot/rules/test_role_mentions.py +++ b/tests/bot/rules/test_role_mentions.py @@ -2,7 +2,7 @@ from typing import Iterable from bot.rules import role_mentions from tests.bot.rules import DisallowedCase, RuleTest -from tests.helpers import MockMessage, async_test +from tests.helpers import MockMessage def make_msg(author: str, n_mentions: int) -> MockMessage: @@ -17,7 +17,6 @@ class RoleMentionsRuleTests(RuleTest): self.apply = role_mentions.apply self.config = {"max": 2, "interval": 10} - @async_test async def test_allows_messages_within_limit(self): """Cases with a total amount of role mentions within limit.""" cases = ( @@ -27,7 +26,6 @@ class RoleMentionsRuleTests(RuleTest): await self.run_allowed(cases) - @async_test async def test_disallows_messages_beyond_limit(self): """Cases with more than the allowed amount of role mentions.""" cases = ( diff --git a/tests/bot/test_api.py b/tests/bot/test_api.py index bdfcc73e4..76bcb481d 100644 --- a/tests/bot/test_api.py +++ b/tests/bot/test_api.py @@ -2,10 +2,9 @@ import unittest from unittest.mock import MagicMock from bot import api -from tests.helpers import async_test -class APIClientTests(unittest.TestCase): +class APIClientTests(unittest.IsolatedAsyncioTestCase): """Tests for the bot's API client.""" @classmethod @@ -14,15 +13,6 @@ class APIClientTests(unittest.TestCase): cls.error_api_response = MagicMock() cls.error_api_response.status = 999 - def test_loop_is_not_running_by_default(self): - """The event loop should not be running by default.""" - self.assertFalse(api.loop_is_running()) - - @async_test - async def test_loop_is_running_in_async_context(self): - """The event loop should be running in an async context.""" - self.assertTrue(api.loop_is_running()) - def test_response_code_error_default_initialization(self): """Test the default initialization of `ResponseCodeError` without `text` or `json`""" error = api.ResponseCodeError(response=self.error_api_response) diff --git a/tests/bot/test_constants.py b/tests/bot/test_constants.py index dae7c066c..f10d6fbe8 100644 --- a/tests/bot/test_constants.py +++ b/tests/bot/test_constants.py @@ -1,14 +1,40 @@ import inspect +import typing import unittest from bot import constants +def is_annotation_instance(value: typing.Any, annotation: typing.Any) -> bool: + """ + Return True if `value` is an instance of the type represented by `annotation`. + + This doesn't account for things like Unions or checking for homogenous types in collections. + """ + origin = typing.get_origin(annotation) + + # This is done in case a bare e.g. `typing.List` is used. + # In such case, for the assertion to pass, the type needs to be normalised to e.g. `list`. + # `get_origin()` does this normalisation for us. + type_ = annotation if origin is None else origin + + return isinstance(value, type_) + + +def is_any_instance(value: typing.Any, types: typing.Collection) -> bool: + """Return True if `value` is an instance of any type in `types`.""" + for type_ in types: + if is_annotation_instance(value, type_): + return True + + return False + + class ConstantsTests(unittest.TestCase): """Tests for our constants.""" def test_section_configuration_matches_type_specification(self): - """The section annotations should match the actual types of the sections.""" + """"The section annotations should match the actual types of the sections.""" sections = ( cls @@ -17,10 +43,15 @@ class ConstantsTests(unittest.TestCase): ) for section in sections: for name, annotation in section.__annotations__.items(): - with self.subTest(section=section, name=name, annotation=annotation): + with self.subTest(section=section.__name__, name=name, annotation=annotation): value = getattr(section, name) + origin = typing.get_origin(annotation) + annotation_args = typing.get_args(annotation) + failure_msg = f"{value} is not an instance of {annotation}" - if getattr(annotation, '_name', None) in ('Dict', 'List'): - self.skipTest("Cannot validate containers yet.") - - self.assertIsInstance(value, annotation) + if origin is typing.Union: + is_instance = is_any_instance(value, annotation_args) + self.assertTrue(is_instance, failure_msg) + else: + is_instance = is_annotation_instance(value, annotation) + self.assertTrue(is_instance, failure_msg) diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py index b2b78d9dd..c42111f3f 100644 --- a/tests/bot/test_converters.py +++ b/tests/bot/test_converters.py @@ -1,5 +1,5 @@ -import asyncio import datetime +import re import unittest from unittest.mock import MagicMock, patch @@ -8,6 +8,7 @@ from discord.ext.commands import BadArgument from bot.converters import ( Duration, + HushDurationConverter, ISODateTime, TagContentConverter, TagNameConverter, @@ -15,7 +16,7 @@ from bot.converters import ( ) -class ConverterTests(unittest.TestCase): +class ConverterTests(unittest.IsolatedAsyncioTestCase): """Tests our custom argument converters.""" @classmethod @@ -25,7 +26,7 @@ class ConverterTests(unittest.TestCase): cls.fixed_utc_now = datetime.datetime.fromisoformat('2019-01-01T00:00:00') - def test_tag_content_converter_for_valid(self): + async def test_tag_content_converter_for_valid(self): """TagContentConverter should return correct values for valid input.""" test_values = ( ('hello', 'hello'), @@ -34,10 +35,10 @@ class ConverterTests(unittest.TestCase): for content, expected_conversion in test_values: with self.subTest(content=content, expected_conversion=expected_conversion): - conversion = asyncio.run(TagContentConverter.convert(self.context, content)) + conversion = await TagContentConverter.convert(self.context, content) self.assertEqual(conversion, expected_conversion) - def test_tag_content_converter_for_invalid(self): + async def test_tag_content_converter_for_invalid(self): """TagContentConverter should raise the proper exception for invalid input.""" test_values = ( ('', "Tag contents should not be empty, or filled with whitespace."), @@ -46,10 +47,10 @@ class ConverterTests(unittest.TestCase): for value, exception_message in test_values: with self.subTest(tag_content=value, exception_message=exception_message): - with self.assertRaises(BadArgument, msg=exception_message): - asyncio.run(TagContentConverter.convert(self.context, value)) + with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): + await TagContentConverter.convert(self.context, value) - def test_tag_name_converter_for_valid(self): + async def test_tag_name_converter_for_valid(self): """TagNameConverter should return the correct values for valid tag names.""" test_values = ( ('tracebacks', 'tracebacks'), @@ -59,44 +60,44 @@ class ConverterTests(unittest.TestCase): for name, expected_conversion in test_values: with self.subTest(name=name, expected_conversion=expected_conversion): - conversion = asyncio.run(TagNameConverter.convert(self.context, name)) + conversion = await TagNameConverter.convert(self.context, name) self.assertEqual(conversion, expected_conversion) - def test_tag_name_converter_for_invalid(self): + async def test_tag_name_converter_for_invalid(self): """TagNameConverter should raise the correct exception for invalid tag names.""" test_values = ( ('👋', "Don't be ridiculous, you can't use that character!"), ('', "Tag names should not be empty, or filled with whitespace."), (' ', "Tag names should not be empty, or filled with whitespace."), - ('42', "Tag names can't be numbers."), + ('42', "Tag names must contain at least one letter."), ('x' * 128, "Are you insane? That's way too long!"), ) for invalid_name, exception_message in test_values: with self.subTest(invalid_name=invalid_name, exception_message=exception_message): - with self.assertRaises(BadArgument, msg=exception_message): - asyncio.run(TagNameConverter.convert(self.context, invalid_name)) + with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): + await TagNameConverter.convert(self.context, invalid_name) - def test_valid_python_identifier_for_valid(self): + async def test_valid_python_identifier_for_valid(self): """ValidPythonIdentifier returns valid identifiers unchanged.""" test_values = ('foo', 'lemon') for name in test_values: with self.subTest(identifier=name): - conversion = asyncio.run(ValidPythonIdentifier.convert(self.context, name)) + conversion = await ValidPythonIdentifier.convert(self.context, name) self.assertEqual(name, conversion) - def test_valid_python_identifier_for_invalid(self): + async def test_valid_python_identifier_for_invalid(self): """ValidPythonIdentifier raises the proper exception for invalid identifiers.""" test_values = ('nested.stuff', '#####') for name in test_values: with self.subTest(identifier=name): exception_message = f'`{name}` is not a valid Python identifier' - with self.assertRaises(BadArgument, msg=exception_message): - asyncio.run(ValidPythonIdentifier.convert(self.context, name)) + with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): + await ValidPythonIdentifier.convert(self.context, name) - def test_duration_converter_for_valid(self): + async def test_duration_converter_for_valid(self): """Duration returns the correct `datetime` for valid duration strings.""" test_values = ( # Simple duration strings @@ -158,35 +159,35 @@ class ConverterTests(unittest.TestCase): mock_datetime.utcnow.return_value = self.fixed_utc_now with self.subTest(duration=duration, duration_dict=duration_dict): - converted_datetime = asyncio.run(converter.convert(self.context, duration)) + converted_datetime = await converter.convert(self.context, duration) self.assertEqual(converted_datetime, expected_datetime) - def test_duration_converter_for_invalid(self): + async def test_duration_converter_for_invalid(self): """Duration raises the right exception for invalid duration strings.""" test_values = ( # Units in wrong order - ('1d1w'), - ('1s1y'), + '1d1w', + '1s1y', # Duplicated units - ('1 year 2 years'), - ('1 M 10 minutes'), + '1 year 2 years', + '1 M 10 minutes', # Unknown substrings - ('1MVes'), - ('1y3breads'), + '1MVes', + '1y3breads', # Missing amount - ('ym'), + 'ym', # Incorrect whitespace - (" 1y"), - ("1S "), - ("1y 1m"), + " 1y", + "1S ", + "1y 1m", # Garbage - ('Guido van Rossum'), - ('lemon lemon lemon lemon lemon lemon lemon'), + 'Guido van Rossum', + 'lemon lemon lemon lemon lemon lemon lemon', ) converter = Duration() @@ -194,10 +195,21 @@ class ConverterTests(unittest.TestCase): for invalid_duration in test_values: with self.subTest(invalid_duration=invalid_duration): exception_message = f'`{invalid_duration}` is not a valid duration string.' - with self.assertRaises(BadArgument, msg=exception_message): - asyncio.run(converter.convert(self.context, invalid_duration)) + with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): + await converter.convert(self.context, invalid_duration) - def test_isodatetime_converter_for_valid(self): + @patch("bot.converters.datetime") + async def test_duration_converter_out_of_range(self, mock_datetime): + """Duration converter should raise BadArgument if datetime raises a ValueError.""" + mock_datetime.__add__.side_effect = ValueError + mock_datetime.utcnow.return_value = mock_datetime + + duration = f"{datetime.MAXYEAR}y" + exception_message = f"`{duration}` results in a datetime outside the supported range." + with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): + await Duration().convert(self.context, duration) + + async def test_isodatetime_converter_for_valid(self): """ISODateTime converter returns correct datetime for valid datetime string.""" test_values = ( # `YYYY-mm-ddTHH:MM:SSZ` | `YYYY-mm-dd HH:MM:SSZ` @@ -242,32 +254,61 @@ class ConverterTests(unittest.TestCase): for datetime_string, expected_dt in test_values: with self.subTest(datetime_string=datetime_string, expected_dt=expected_dt): - converted_dt = asyncio.run(converter.convert(self.context, datetime_string)) + converted_dt = await converter.convert(self.context, datetime_string) self.assertIsNone(converted_dt.tzinfo) self.assertEqual(converted_dt, expected_dt) - def test_isodatetime_converter_for_invalid(self): + async def test_isodatetime_converter_for_invalid(self): """ISODateTime converter raises the correct exception for invalid datetime strings.""" test_values = ( # Make sure it doesn't interfere with the Duration converter - ('1Y'), - ('1d'), - ('1H'), + '1Y', + '1d', + '1H', # Check if it fails when only providing the optional time part - ('10:10:10'), - ('10:00'), + '10:10:10', + '10:00', # Invalid date format - ('19-01-01'), + '19-01-01', # Other non-valid strings - ('fisk the tag master'), + 'fisk the tag master', ) converter = ISODateTime() for datetime_string in test_values: with self.subTest(datetime_string=datetime_string): exception_message = f"`{datetime_string}` is not a valid ISO-8601 datetime string" - with self.assertRaises(BadArgument, msg=exception_message): - asyncio.run(converter.convert(self.context, datetime_string)) + with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): + await converter.convert(self.context, datetime_string) + + async def test_hush_duration_converter_for_valid(self): + """HushDurationConverter returns correct value for minutes duration or `"forever"` strings.""" + test_values = ( + ("0", 0), + ("15", 15), + ("10", 10), + ("5m", 5), + ("5M", 5), + ("forever", None), + ) + converter = HushDurationConverter() + for minutes_string, expected_minutes in test_values: + with self.subTest(minutes_string=minutes_string, expected_minutes=expected_minutes): + converted = await converter.convert(self.context, minutes_string) + self.assertEqual(expected_minutes, converted) + + async def test_hush_duration_converter_for_invalid(self): + """HushDurationConverter raises correct exception for invalid minutes duration strings.""" + test_values = ( + ("16", "Duration must be at most 15 minutes."), + ("10d", "10d is not a valid minutes duration."), + ("-1", "-1 is not a valid minutes duration."), + ) + converter = HushDurationConverter() + for invalid_minutes_string, exception_message in test_values: + with self.subTest(invalid_minutes_string=invalid_minutes_string, exception_message=exception_message): + with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): + await converter.convert(self.context, invalid_minutes_string) diff --git a/tests/bot/test_decorators.py b/tests/bot/test_decorators.py new file mode 100644 index 000000000..3d450caa0 --- /dev/null +++ b/tests/bot/test_decorators.py @@ -0,0 +1,147 @@ +import collections +import unittest +import unittest.mock + +from bot import constants +from bot.decorators import in_whitelist +from bot.utils.checks import InWhitelistCheckFailure +from tests import helpers + +InWhitelistTestCase = collections.namedtuple("WhitelistedContextTestCase", ("kwargs", "ctx", "description")) + + +class InWhitelistTests(unittest.TestCase): + """Tests for the `in_whitelist` check.""" + + @classmethod + def setUpClass(cls): + """Set up helpers that only need to be defined once.""" + cls.bot_commands = helpers.MockTextChannel(id=123456789, category_id=123456) + cls.help_channel = helpers.MockTextChannel(id=987654321, category_id=987654) + cls.non_whitelisted_channel = helpers.MockTextChannel(id=666666) + cls.dm_channel = helpers.MockDMChannel() + + cls.non_staff_member = helpers.MockMember() + cls.staff_role = helpers.MockRole(id=121212) + cls.staff_member = helpers.MockMember(roles=(cls.staff_role,)) + + cls.channels = (cls.bot_commands.id,) + cls.categories = (cls.help_channel.category_id,) + cls.roles = (cls.staff_role.id,) + + def test_predicate_returns_true_for_whitelisted_context(self): + """The predicate should return `True` if a whitelisted context was passed to it.""" + test_cases = ( + InWhitelistTestCase( + kwargs={"channels": self.channels}, + ctx=helpers.MockContext(channel=self.bot_commands, author=self.non_staff_member), + description="In whitelisted channels by members without whitelisted roles", + ), + InWhitelistTestCase( + kwargs={"redirect": self.bot_commands.id}, + ctx=helpers.MockContext(channel=self.bot_commands, author=self.non_staff_member), + description="`redirect` should be implicitly added to `channels`", + ), + InWhitelistTestCase( + kwargs={"categories": self.categories}, + ctx=helpers.MockContext(channel=self.help_channel, author=self.non_staff_member), + description="Whitelisted category without whitelisted role", + ), + InWhitelistTestCase( + kwargs={"roles": self.roles}, + ctx=helpers.MockContext(channel=self.non_whitelisted_channel, author=self.staff_member), + description="Whitelisted role outside of whitelisted channel/category" + ), + InWhitelistTestCase( + kwargs={ + "channels": self.channels, + "categories": self.categories, + "roles": self.roles, + "redirect": self.bot_commands, + }, + ctx=helpers.MockContext(channel=self.help_channel, author=self.staff_member), + description="Case with all whitelist kwargs used", + ), + ) + + for test_case in test_cases: + # patch `commands.check` with a no-op lambda that just returns the predicate passed to it + # so we can test the predicate that was generated from the specified kwargs. + with unittest.mock.patch("bot.decorators.commands.check", new=lambda predicate: predicate): + predicate = in_whitelist(**test_case.kwargs) + + with self.subTest(test_description=test_case.description): + self.assertTrue(predicate(test_case.ctx)) + + def test_predicate_raises_exception_for_non_whitelisted_context(self): + """The predicate should raise `InWhitelistCheckFailure` for a non-whitelisted context.""" + test_cases = ( + # Failing check with explicit `redirect` + InWhitelistTestCase( + kwargs={ + "categories": self.categories, + "channels": self.channels, + "roles": self.roles, + "redirect": self.bot_commands.id, + }, + ctx=helpers.MockContext(channel=self.non_whitelisted_channel, author=self.non_staff_member), + description="Failing check with an explicit redirect channel", + ), + + # Failing check with implicit `redirect` + InWhitelistTestCase( + kwargs={ + "categories": self.categories, + "channels": self.channels, + "roles": self.roles, + }, + ctx=helpers.MockContext(channel=self.non_whitelisted_channel, author=self.non_staff_member), + description="Failing check with an implicit redirect channel", + ), + + # Failing check without `redirect` + InWhitelistTestCase( + kwargs={ + "categories": self.categories, + "channels": self.channels, + "roles": self.roles, + "redirect": None, + }, + ctx=helpers.MockContext(channel=self.non_whitelisted_channel, author=self.non_staff_member), + description="Failing check without a redirect channel", + ), + + # Command issued in DM channel + InWhitelistTestCase( + kwargs={ + "categories": self.categories, + "channels": self.channels, + "roles": self.roles, + "redirect": None, + }, + ctx=helpers.MockContext(channel=self.dm_channel, author=self.dm_channel.me), + description="Commands issued in DM channel should be rejected", + ), + ) + + for test_case in test_cases: + if "redirect" not in test_case.kwargs or test_case.kwargs["redirect"] is not None: + # There are two cases in which we have a redirect channel: + # 1. No redirect channel was passed; the default value of `bot_commands` is used + # 2. An explicit `redirect` is set that is "not None" + redirect_channel = test_case.kwargs.get("redirect", constants.Channels.bot_commands) + redirect_message = f" here. Please use the <#{redirect_channel}> channel instead" + else: + # If an explicit `None` was passed for `redirect`, there is no redirect channel + redirect_message = "" + + exception_message = f"You are not allowed to use that command{redirect_message}." + + # patch `commands.check` with a no-op lambda that just returns the predicate passed to it + # so we can test the predicate that was generated from the specified kwargs. + with unittest.mock.patch("bot.decorators.commands.check", new=lambda predicate: predicate): + predicate = in_whitelist(**test_case.kwargs) + + with self.subTest(test_description=test_case.description): + with self.assertRaisesRegex(InWhitelistCheckFailure, exception_message): + predicate(test_case.ctx) diff --git a/tests/bot/test_pagination.py b/tests/bot/test_pagination.py index 0a734b505..630f2516d 100644 --- a/tests/bot/test_pagination.py +++ b/tests/bot/test_pagination.py @@ -8,29 +8,39 @@ class LinePaginatorTests(TestCase): def setUp(self): """Create a paginator for the test method.""" - self.paginator = pagination.LinePaginator(prefix='', suffix='', max_size=30) - - def test_add_line_raises_on_too_long_lines(self): - """`add_line` should raise a `RuntimeError` for too long lines.""" - message = f"Line exceeds maximum page size {self.paginator.max_size - 2}" - with self.assertRaises(RuntimeError, msg=message): - self.paginator.add_line('x' * self.paginator.max_size) + self.paginator = pagination.LinePaginator(prefix='', suffix='', max_size=30, + scale_to_size=50) def test_add_line_works_on_small_lines(self): """`add_line` should allow small lines to be added.""" self.paginator.add_line('x' * (self.paginator.max_size - 3)) - - -class ImagePaginatorTests(TestCase): - """Tests functionality of the `ImagePaginator`.""" - - def setUp(self): - """Create a paginator for the test method.""" - self.paginator = pagination.ImagePaginator() - - def test_add_image_appends_image(self): - """`add_image` appends the image to the image list.""" - image = 'lemon' - self.paginator.add_image(image) - - assert self.paginator.images == [image] + # Note that the page isn't added to _pages until it's full. + self.assertEqual(len(self.paginator._pages), 0) + + def test_add_line_works_on_long_lines(self): + """After additional lines after `max_size` is exceeded should go on the next page.""" + self.paginator.add_line('x' * self.paginator.max_size) + self.assertEqual(len(self.paginator._pages), 0) + + # Any additional lines should start a new page after `max_size` is exceeded. + self.paginator.add_line('x') + self.assertEqual(len(self.paginator._pages), 1) + + def test_add_line_continuation(self): + """When `scale_to_size` is exceeded, remaining words should be split onto the next page.""" + self.paginator.add_line('zyz ' * (self.paginator.scale_to_size//4 + 1)) + self.assertEqual(len(self.paginator._pages), 1) + + def test_add_line_no_continuation(self): + """If adding a new line to an existing page would exceed `max_size`, it should start a new + page rather than using continuation. + """ + self.paginator.add_line('z' * (self.paginator.max_size - 3)) + self.paginator.add_line('z') + self.assertEqual(len(self.paginator._pages), 1) + + def test_add_line_truncates_very_long_words(self): + """`add_line` should truncate if a single long word exceeds `scale_to_size`.""" + self.paginator.add_line('x' * (self.paginator.scale_to_size + 1)) + # Note: item at index 1 is the truncated line, index 0 is prefix + self.assertEqual(self.paginator._current_page[1], 'x' * self.paginator.scale_to_size) diff --git a/tests/bot/test_utils.py b/tests/bot/test_utils.py deleted file mode 100644 index 58ae2a81a..000000000 --- a/tests/bot/test_utils.py +++ /dev/null @@ -1,52 +0,0 @@ -import unittest - -from bot import utils - - -class CaseInsensitiveDictTests(unittest.TestCase): - """Tests for the `CaseInsensitiveDict` container.""" - - def test_case_insensitive_key_access(self): - """Tests case insensitive key access and storage.""" - instance = utils.CaseInsensitiveDict() - - key = 'LEMON' - value = 'trees' - - instance[key] = value - self.assertIn(key, instance) - self.assertEqual(instance.get(key), value) - self.assertEqual(instance.get(key.casefold()), value) - self.assertEqual(instance.pop(key.casefold()), value) - self.assertNotIn(key, instance) - self.assertNotIn(key.casefold(), instance) - - instance.setdefault(key, value) - del instance[key] - self.assertNotIn(key, instance) - - def test_initialization_from_kwargs(self): - """Tests creating the dictionary from keyword arguments.""" - instance = utils.CaseInsensitiveDict({'FOO': 'bar'}) - self.assertEqual(instance['foo'], 'bar') - - def test_update_from_other_mapping(self): - """Tests updating the dictionary from another mapping.""" - instance = utils.CaseInsensitiveDict() - instance.update({'FOO': 'bar'}) - self.assertEqual(instance['foo'], 'bar') - - -class ChunkTests(unittest.TestCase): - """Tests the `chunk` method.""" - - def test_empty_chunking(self): - """Tests chunking on an empty iterable.""" - generator = utils.chunks(iterable=[], size=5) - self.assertEqual(list(generator), []) - - def test_list_chunking(self): - """Tests chunking a non-empty list.""" - iterable = [1, 2, 3, 4, 5] - generator = utils.chunks(iterable=iterable, size=2) - self.assertEqual(list(generator), [[1, 2], [3, 4], [5]]) diff --git a/tests/bot/utils/test_checks.py b/tests/bot/utils/test_checks.py index 9610771e5..883465e0b 100644 --- a/tests/bot/utils/test_checks.py +++ b/tests/bot/utils/test_checks.py @@ -1,51 +1,93 @@ import unittest +from unittest.mock import MagicMock + +from discord import DMChannel from bot.utils import checks +from bot.utils.checks import InWhitelistCheckFailure from tests.helpers import MockContext, MockRole -class ChecksTests(unittest.TestCase): +class ChecksTests(unittest.IsolatedAsyncioTestCase): """Tests the check functions defined in `bot.checks`.""" def setUp(self): self.ctx = MockContext() - def test_with_role_check_without_guild(self): - """`with_role_check` returns `False` if `Context.guild` is None.""" - self.ctx.guild = None - self.assertFalse(checks.with_role_check(self.ctx)) + async def test_has_any_role_check_without_guild(self): + """`has_any_role_check` returns `False` for non-guild channels.""" + self.ctx.channel = MagicMock(DMChannel) + self.assertFalse(await checks.has_any_role_check(self.ctx)) - def test_with_role_check_without_required_roles(self): - """`with_role_check` returns `False` if `Context.author` lacks the required role.""" + async def test_has_any_role_check_without_required_roles(self): + """`has_any_role_check` returns `False` if `Context.author` lacks the required role.""" self.ctx.author.roles = [] - self.assertFalse(checks.with_role_check(self.ctx)) + self.assertFalse(await checks.has_any_role_check(self.ctx)) - def test_with_role_check_with_guild_and_required_role(self): - """`with_role_check` returns `True` if `Context.author` has the required role.""" + async def test_has_any_role_check_with_guild_and_required_role(self): + """`has_any_role_check` returns `True` if `Context.author` has the required role.""" self.ctx.author.roles.append(MockRole(id=10)) - self.assertTrue(checks.with_role_check(self.ctx, 10)) + self.assertTrue(await checks.has_any_role_check(self.ctx, 10)) - def test_without_role_check_without_guild(self): - """`without_role_check` should return `False` when `Context.guild` is None.""" - self.ctx.guild = None - self.assertFalse(checks.without_role_check(self.ctx)) + async def test_has_no_roles_check_without_guild(self): + """`has_no_roles_check` should return `False` when `Context.guild` is None.""" + self.ctx.channel = MagicMock(DMChannel) + self.assertFalse(await checks.has_no_roles_check(self.ctx)) - def test_without_role_check_returns_false_with_unwanted_role(self): - """`without_role_check` returns `False` if `Context.author` has unwanted role.""" + async def test_has_no_roles_check_returns_false_with_unwanted_role(self): + """`has_no_roles_check` returns `False` if `Context.author` has unwanted role.""" role_id = 42 self.ctx.author.roles.append(MockRole(id=role_id)) - self.assertFalse(checks.without_role_check(self.ctx, role_id)) + self.assertFalse(await checks.has_no_roles_check(self.ctx, role_id)) - def test_without_role_check_returns_true_without_unwanted_role(self): - """`without_role_check` returns `True` if `Context.author` does not have unwanted role.""" + async def test_has_no_roles_check_returns_true_without_unwanted_role(self): + """`has_no_roles_check` returns `True` if `Context.author` does not have unwanted role.""" role_id = 42 self.ctx.author.roles.append(MockRole(id=role_id)) - self.assertTrue(checks.without_role_check(self.ctx, role_id + 10)) + self.assertTrue(await checks.has_no_roles_check(self.ctx, role_id + 10)) + + def test_in_whitelist_check_correct_channel(self): + """`in_whitelist_check` returns `True` if `Context.channel.id` is in the channel list.""" + channel_id = 3 + self.ctx.channel.id = channel_id + self.assertTrue(checks.in_whitelist_check(self.ctx, [channel_id])) + + def test_in_whitelist_check_incorrect_channel(self): + """`in_whitelist_check` raises InWhitelistCheckFailure if there's no channel match.""" + self.ctx.channel.id = 3 + with self.assertRaises(InWhitelistCheckFailure): + checks.in_whitelist_check(self.ctx, [4]) + + def test_in_whitelist_check_correct_category(self): + """`in_whitelist_check` returns `True` if `Context.channel.category_id` is in the category list.""" + category_id = 3 + self.ctx.channel.category_id = category_id + self.assertTrue(checks.in_whitelist_check(self.ctx, categories=[category_id])) + + def test_in_whitelist_check_incorrect_category(self): + """`in_whitelist_check` raises InWhitelistCheckFailure if there's no category match.""" + self.ctx.channel.category_id = 3 + with self.assertRaises(InWhitelistCheckFailure): + checks.in_whitelist_check(self.ctx, categories=[4]) + + def test_in_whitelist_check_correct_role(self): + """`in_whitelist_check` returns `True` if any of the `Context.author.roles` are in the roles list.""" + self.ctx.author.roles = (MagicMock(id=1), MagicMock(id=2)) + self.assertTrue(checks.in_whitelist_check(self.ctx, roles=[2, 6])) + + def test_in_whitelist_check_incorrect_role(self): + """`in_whitelist_check` raises InWhitelistCheckFailure if there's no role match.""" + self.ctx.author.roles = (MagicMock(id=1), MagicMock(id=2)) + with self.assertRaises(InWhitelistCheckFailure): + checks.in_whitelist_check(self.ctx, roles=[4]) - def test_in_channel_check_for_correct_channel(self): - self.ctx.channel.id = 42 - self.assertTrue(checks.in_channel_check(self.ctx, *[42])) + def test_in_whitelist_check_fail_silently(self): + """`in_whitelist_check` test no exception raised if `fail_silently` is `True`""" + self.assertFalse(checks.in_whitelist_check(self.ctx, roles=[2, 6], fail_silently=True)) - def test_in_channel_check_for_incorrect_channel(self): - self.ctx.channel.id = 42 + 10 - self.assertFalse(checks.in_channel_check(self.ctx, *[42])) + def test_in_whitelist_check_complex(self): + """`in_whitelist_check` test with multiple parameters""" + self.ctx.author.roles = (MagicMock(id=1), MagicMock(id=2)) + self.ctx.channel.category_id = 3 + self.ctx.channel.id = 5 + self.assertTrue(checks.in_whitelist_check(self.ctx, channels=[1], categories=[8], roles=[2])) diff --git a/tests/bot/utils/test_messages.py b/tests/bot/utils/test_messages.py new file mode 100644 index 000000000..9c22c9751 --- /dev/null +++ b/tests/bot/utils/test_messages.py @@ -0,0 +1,27 @@ +import unittest + +from bot.utils import messages + + +class TestMessages(unittest.TestCase): + """Tests for functions in the `bot.utils.messages` module.""" + + def test_sub_clyde(self): + """Uppercase E's and lowercase e's are substituted with their cyrillic counterparts.""" + sub_e = "\u0435" + sub_E = "\u0415" # noqa: N806: Uppercase E in variable name + + test_cases = ( + (None, None), + ("", ""), + ("clyde", f"clyd{sub_e}"), + ("CLYDE", f"CLYD{sub_E}"), + ("cLyDe", f"cLyD{sub_e}"), + ("BIGclyde", f"BIGclyd{sub_e}"), + ("small clydeus the unholy", f"small clyd{sub_e}us the unholy"), + ("BIGCLYDE, babyclyde", f"BIGCLYD{sub_E}, babyclyd{sub_e}"), + ) + + for username_in, username_out in test_cases: + with self.subTest(input=username_in, expected_output=username_out): + self.assertEqual(messages.sub_clyde(username_in), username_out) diff --git a/tests/bot/utils/test_services.py b/tests/bot/utils/test_services.py new file mode 100644 index 000000000..1b48f6560 --- /dev/null +++ b/tests/bot/utils/test_services.py @@ -0,0 +1,77 @@ +import logging +import unittest +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +from aiohttp import ClientConnectorError + +from bot.utils.services import FAILED_REQUEST_ATTEMPTS, send_to_paste_service +from tests.helpers import MockBot + + +class PasteTests(unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + patcher = patch("bot.instance", new=MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + + @patch("bot.utils.services.URLs.paste_service", "https://paste_service.com/{key}") + async def test_url_and_sent_contents(self): + """Correct url was used and post was called with expected data.""" + response = MagicMock( + json=AsyncMock(return_value={"key": ""}) + ) + self.bot.http_session.post.return_value.__aenter__.return_value = response + self.bot.http_session.post.reset_mock() + await send_to_paste_service("Content") + self.bot.http_session.post.assert_called_once_with("https://paste_service.com/documents", data="Content") + + @patch("bot.utils.services.URLs.paste_service", "https://paste_service.com/{key}") + async def test_paste_returns_correct_url_on_success(self): + """Url with specified extension is returned on successful requests.""" + key = "paste_key" + test_cases = ( + (f"https://paste_service.com/{key}.txt", "txt"), + (f"https://paste_service.com/{key}.py", "py"), + (f"https://paste_service.com/{key}", ""), + ) + response = MagicMock( + json=AsyncMock(return_value={"key": key}) + ) + self.bot.http_session.post.return_value.__aenter__.return_value = response + + for expected_output, extension in test_cases: + with self.subTest(msg=f"Send contents with extension {repr(extension)}"): + self.assertEqual( + await send_to_paste_service("", extension=extension), + expected_output + ) + + async def test_request_repeated_on_json_errors(self): + """Json with error message and invalid json are handled as errors and requests repeated.""" + test_cases = ({"message": "error"}, {"unexpected_key": None}, {}) + self.bot.http_session.post.return_value.__aenter__.return_value = response = MagicMock() + self.bot.http_session.post.reset_mock() + + for error_json in test_cases: + with self.subTest(error_json=error_json): + response.json = AsyncMock(return_value=error_json) + result = await send_to_paste_service("") + self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) + self.assertIsNone(result) + + self.bot.http_session.post.reset_mock() + + async def test_request_repeated_on_connection_errors(self): + """Requests are repeated in the case of connection errors.""" + self.bot.http_session.post = MagicMock(side_effect=ClientConnectorError(Mock(), Mock())) + result = await send_to_paste_service("") + self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) + self.assertIsNone(result) + + async def test_general_error_handled_and_request_repeated(self): + """All `Exception`s are handled, logged and request repeated.""" + self.bot.http_session.post = MagicMock(side_effect=Exception) + result = await send_to_paste_service("") + self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) + self.assertLogs("bot.utils", logging.ERROR) + self.assertIsNone(result) diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py index 69f35f2f5..694d3a40f 100644 --- a/tests/bot/utils/test_time.py +++ b/tests/bot/utils/test_time.py @@ -1,12 +1,11 @@ import asyncio import unittest from datetime import datetime, timezone -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from dateutil.relativedelta import relativedelta from bot.utils import time -from tests.helpers import AsyncMock class TimeTests(unittest.TestCase): @@ -44,7 +43,7 @@ class TimeTests(unittest.TestCase): for max_units in test_cases: with self.subTest(max_units=max_units), self.assertRaises(ValueError) as error: time.humanize_delta(relativedelta(days=2, hours=2), 'hours', max_units) - self.assertEqual(str(error), 'max_units must be positive') + self.assertEqual(str(error.exception), 'max_units must be positive') def test_parse_rfc1123(self): """Testing parse_rfc1123.""" diff --git a/tests/helpers.py b/tests/helpers.py index 5df796c23..496363ae3 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,18 +1,20 @@ from __future__ import annotations -import asyncio import collections -import functools -import inspect import itertools import logging import unittest.mock -from typing import Any, Iterable, Optional +from asyncio import AbstractEventLoop +from typing import Iterable, Optional import discord +from aiohttp import ClientSession from discord.ext.commands import Context +from bot.api import APIClient +from bot.async_stats import AsyncStatsClient from bot.bot import Bot +from tests._autospec import autospec # noqa: F401 other modules import it via this module for logger in logging.Logger.manager.loggerDict.values(): @@ -25,21 +27,6 @@ for logger in logging.Logger.manager.loggerDict.values(): logger.setLevel(logging.CRITICAL) -def async_test(wrapped): - """ - Run a test case via asyncio. - Example: - >>> @async_test - ... async def lemon_wins(): - ... assert True - """ - - @functools.wraps(wrapped) - def wrapper(*args, **kwargs): - return asyncio.run(wrapped(*args, **kwargs)) - return wrapper - - class HashableMixin(discord.mixins.EqualityComparable): """ Mixin that provides similar hashing and equality functionality as discord.py's `Hashable` mixin. @@ -68,24 +55,31 @@ class CustomMockMixin: """ Provides common functionality for our custom Mock types. - The cooperative `__init__` automatically creates `AsyncMock` attributes for every coroutine - function `inspect` detects in the `spec` instance we provide. In addition, this mixin takes care - of making sure child mocks are instantiated with the correct class. By default, the mock of the - children will be `unittest.mock.MagicMock`, but this can be overwritten by setting the attribute - `child_mock_type` on the custom mock inheriting from this mixin. + The `_get_child_mock` method automatically returns an AsyncMock for coroutine methods of the mock + object. As discord.py also uses synchronous methods that nonetheless return coroutine objects, the + class attribute `additional_spec_asyncs` can be overwritten with an iterable containing additional + attribute names that should also mocked with an AsyncMock instead of a regular MagicMock/Mock. The + class method `spec_set` can be overwritten with the object that should be uses as the specification + for the mock. + + Mock/MagicMock subclasses that use this mixin only need to define `__init__` method if they need to + implement custom behavior. """ child_mock_type = unittest.mock.MagicMock discord_id = itertools.count(0) + spec_set = None + additional_spec_asyncs = None - def __init__(self, spec_set: Any = None, **kwargs): + def __init__(self, **kwargs): name = kwargs.pop('name', None) # `name` has special meaning for Mock classes, so we need to set it manually. - super().__init__(spec_set=spec_set, **kwargs) + super().__init__(spec_set=self.spec_set, **kwargs) + + if self.additional_spec_asyncs: + self._spec_asyncs.extend(self.additional_spec_asyncs) if name: self.name = name - if spec_set: - self._extract_coroutine_methods_from_spec_instance(spec_set) def _get_child_mock(self, **kw): """ @@ -99,7 +93,16 @@ class CustomMockMixin: This override will look for an attribute called `child_mock_type` and use that as the type of the child mock. """ - klass = self.child_mock_type + _new_name = kw.get("_new_name") + if _new_name in self.__dict__['_spec_asyncs']: + return unittest.mock.AsyncMock(**kw) + + _type = type(self) + if issubclass(_type, unittest.mock.MagicMock) and _new_name in unittest.mock._async_method_magics: + # Any asynchronous magic becomes an AsyncMock + klass = unittest.mock.AsyncMock + else: + klass = self.child_mock_type if self._mock_sealed: attribute = "." + kw["name"] if "name" in kw else "()" @@ -108,95 +111,6 @@ class CustomMockMixin: return klass(**kw) - def _extract_coroutine_methods_from_spec_instance(self, source: Any) -> None: - """Automatically detect coroutine functions in `source` and set them as AsyncMock attributes.""" - for name, _method in inspect.getmembers(source, inspect.iscoroutinefunction): - setattr(self, name, AsyncMock()) - - -# TODO: Remove me in Python 3.8 -class AsyncMock(CustomMockMixin, unittest.mock.MagicMock): - """ - A MagicMock subclass to mock async callables. - - Python 3.8 will introduce an AsyncMock class in the standard library that will have some more - features; this stand-in only overwrites the `__call__` method to an async version. - """ - - async def __call__(self, *args, **kwargs): - return super().__call__(*args, **kwargs) - - -class AsyncIteratorMock: - """ - A class to mock asynchronous iterators. - - This allows async for, which is used in certain Discord.py objects. For example, - an async iterator is returned by the Reaction.users() method. - """ - - def __init__(self, iterable: Iterable = None): - if iterable is None: - iterable = [] - - self.iter = iter(iterable) - self.iterable = iterable - - self.call_count = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - try: - return next(self.iter) - except StopIteration: - raise StopAsyncIteration - - def __call__(self): - """ - Keeps track of the number of times an instance has been called. - - This is useful, since it typically shows that the iterator has actually been used somewhere after we have - instantiated the mock for an attribute that normally returns an iterator when called. - """ - self.call_count += 1 - return self - - @property - def return_value(self): - """Makes `self.iterable` accessible as self.return_value.""" - return self.iterable - - @return_value.setter - def return_value(self, iterable): - """Stores the `return_value` as `self.iterable` and its iterator as `self.iter`.""" - self.iter = iter(iterable) - self.iterable = iterable - - def assert_called(self): - """Asserts if the AsyncIteratorMock instance has been called at least once.""" - if self.call_count == 0: - raise AssertionError("Expected AsyncIteratorMock to have been called.") - - def assert_called_once(self): - """Asserts if the AsyncIteratorMock instance has been called exactly once.""" - if self.call_count != 1: - raise AssertionError( - f"Expected AsyncIteratorMock to have been called once. Called {self.call_count} times." - ) - - def assert_not_called(self): - """Asserts if the AsyncIteratorMock instance has not been called.""" - if self.call_count != 0: - raise AssertionError( - f"Expected AsyncIteratorMock to not have been called once. Called {self.call_count} times." - ) - - def reset_mock(self): - """Resets the call count, but not the return value or iterator.""" - self.call_count = 0 - # Create a guild instance to get a realistic Mock of `discord.Guild` guild_data = { @@ -247,9 +161,11 @@ class MockGuild(CustomMockMixin, unittest.mock.Mock, HashableMixin): For more info, see the `Mocking` section in `tests/README.md`. """ + spec_set = guild_instance + def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None: default_kwargs = {'id': next(self.discord_id), 'members': []} - super().__init__(spec_set=guild_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) self.roles = [MockRole(name="@everyone", position=1, id=0)] if roles: @@ -268,9 +184,23 @@ class MockRole(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin): Instances of this class will follow the specifications of `discord.Role` instances. For more information, see the `MockGuild` docstring. """ + spec_set = role_instance + def __init__(self, **kwargs) -> None: - default_kwargs = {'id': next(self.discord_id), 'name': 'role', 'position': 1} - super().__init__(spec_set=role_instance, **collections.ChainMap(kwargs, default_kwargs)) + default_kwargs = { + 'id': next(self.discord_id), + 'name': 'role', + 'position': 1, + 'colour': discord.Colour(0xdeadbf), + 'permissions': discord.Permissions(), + } + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) + + if isinstance(self.colour, int): + self.colour = discord.Colour(self.colour) + + if isinstance(self.permissions, int): + self.permissions = discord.Permissions(self.permissions) if 'mention' not in kwargs: self.mention = f'&{self.name}' @@ -279,6 +209,10 @@ class MockRole(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin): """Simplified position-based comparisons similar to those of `discord.Role`.""" return self.position < other.position + def __ge__(self, other): + """Simplified position-based comparisons similar to those of `discord.Role`.""" + return self.position >= other.position + # Create a Member instance to get a realistic Mock of `discord.Member` member_data = {'user': 'lemon', 'roles': [1]} @@ -293,9 +227,11 @@ class MockMember(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin Instances of this class will follow the specifications of `discord.Member` instances. For more information, see the `MockGuild` docstring. """ + spec_set = member_instance + def __init__(self, roles: Optional[Iterable[MockRole]] = None, **kwargs) -> None: - default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False} - super().__init__(spec_set=member_instance, **collections.ChainMap(kwargs, default_kwargs)) + default_kwargs = {'name': 'member', 'id': next(self.discord_id), 'bot': False, "pending": False} + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) self.roles = [MockRole(name="@everyone", position=1, id=0)] if roles: @@ -316,18 +252,36 @@ class MockUser(CustomMockMixin, unittest.mock.Mock, ColourMixin, HashableMixin): Instances of this class will follow the specifications of `discord.User` instances. For more information, see the `MockGuild` docstring. """ + spec_set = user_instance + def __init__(self, **kwargs) -> None: default_kwargs = {'name': 'user', 'id': next(self.discord_id), 'bot': False} - super().__init__(spec_set=user_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) if 'mention' not in kwargs: self.mention = f"@{self.name}" -# Create a Bot instance to get a realistic MagicMock of `discord.ext.commands.Bot` -bot_instance = Bot(command_prefix=unittest.mock.MagicMock()) -bot_instance.http_session = None -bot_instance.api_client = None +class MockAPIClient(CustomMockMixin, unittest.mock.MagicMock): + """ + A MagicMock subclass to mock APIClient objects. + + Instances of this class will follow the specifications of `bot.api.APIClient` instances. + For more information, see the `MockGuild` docstring. + """ + spec_set = APIClient + + +def _get_mock_loop() -> unittest.mock.Mock: + """Return a mocked asyncio.AbstractEventLoop.""" + loop = unittest.mock.create_autospec(spec=AbstractEventLoop, spec_set=True) + + # Since calling `create_task` on our MockBot does not actually schedule the coroutine object + # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object + # to prevent "has not been awaited"-warnings. + loop.create_task.side_effect = lambda coroutine: coroutine.close() + + return loop class MockBot(CustomMockMixin, unittest.mock.MagicMock): @@ -337,19 +291,20 @@ class MockBot(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.ext.commands.Bot` instances. For more information, see the `MockGuild` docstring. """ + spec_set = Bot( + command_prefix=unittest.mock.MagicMock(), + loop=_get_mock_loop(), + redis_session=unittest.mock.MagicMock(), + ) + additional_spec_asyncs = ("wait_for", "redis_ready") def __init__(self, **kwargs) -> None: - super().__init__(spec_set=bot_instance, **kwargs) - - # self.wait_for is *not* a coroutine function, but returns a coroutine nonetheless and - # and should therefore be awaited. (The documentation calls it a coroutine as well, which - # is technically incorrect, since it's a regular def.) - self.wait_for = AsyncMock() + super().__init__(**kwargs) - # Since calling `create_task` on our MockBot does not actually schedule the coroutine object - # as a task in the asyncio loop, this `side_effect` calls `close()` on the coroutine object - # to prevent "has not been awaited"-warnings. - self.loop.create_task.side_effect = lambda coroutine: coroutine.close() + self.loop = _get_mock_loop() + self.api_client = MockAPIClient(loop=self.loop) + self.http_session = unittest.mock.create_autospec(spec=ClientSession, spec_set=True) + self.stats = unittest.mock.create_autospec(spec=AsyncStatsClient, spec_set=True) # Create a TextChannel instance to get a realistic MagicMock of `discord.TextChannel` @@ -375,15 +330,37 @@ class MockTextChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): Instances of this class will follow the specifications of `discord.TextChannel` instances. For more information, see the `MockGuild` docstring. """ + spec_set = channel_instance - def __init__(self, name: str = 'channel', channel_id: int = 1, **kwargs) -> None: + def __init__(self, **kwargs) -> None: default_kwargs = {'id': next(self.discord_id), 'name': 'channel', 'guild': MockGuild()} - super().__init__(spec_set=channel_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) if 'mention' not in kwargs: self.mention = f"#{self.name}" +# Create data for the DMChannel instance +state = unittest.mock.MagicMock() +me = unittest.mock.MagicMock() +dm_channel_data = {"id": 1, "recipients": [unittest.mock.MagicMock()]} +dm_channel_instance = discord.DMChannel(me=me, state=state, data=dm_channel_data) + + +class MockDMChannel(CustomMockMixin, unittest.mock.Mock, HashableMixin): + """ + A MagicMock subclass to mock TextChannel objects. + + Instances of this class will follow the specifications of `discord.TextChannel` instances. For + more information, see the `MockGuild` docstring. + """ + spec_set = dm_channel_instance + + def __init__(self, **kwargs) -> None: + default_kwargs = {'id': next(self.discord_id), 'recipient': MockUser(), "me": MockUser()} + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) + + # Create a Message instance to get a realistic MagicMock of `discord.Message` message_data = { 'id': 1, @@ -417,9 +394,10 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.ext.commands.Context` instances. For more information, see the `MockGuild` docstring. """ + spec_set = context_instance def __init__(self, **kwargs) -> None: - super().__init__(spec_set=context_instance, **kwargs) + super().__init__(**kwargs) self.bot = kwargs.get('bot', MockBot()) self.guild = kwargs.get('guild', MockGuild()) self.author = kwargs.get('author', MockMember()) @@ -436,8 +414,7 @@ class MockAttachment(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Attachment` instances. For more information, see the `MockGuild` docstring. """ - def __init__(self, **kwargs) -> None: - super().__init__(spec_set=attachment_instance, **kwargs) + spec_set = attachment_instance class MockMessage(CustomMockMixin, unittest.mock.MagicMock): @@ -447,10 +424,11 @@ class MockMessage(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Message` instances. For more information, see the `MockGuild` docstring. """ + spec_set = message_instance def __init__(self, **kwargs) -> None: default_kwargs = {'attachments': []} - super().__init__(spec_set=message_instance, **collections.ChainMap(kwargs, default_kwargs)) + super().__init__(**collections.ChainMap(kwargs, default_kwargs)) self.author = kwargs.get('author', MockMember()) self.channel = kwargs.get('channel', MockTextChannel()) @@ -466,9 +444,10 @@ class MockEmoji(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Emoji` instances. For more information, see the `MockGuild` docstring. """ + spec_set = emoji_instance def __init__(self, **kwargs) -> None: - super().__init__(spec_set=emoji_instance, **kwargs) + super().__init__(**kwargs) self.guild = kwargs.get('guild', MockGuild()) @@ -482,9 +461,7 @@ class MockPartialEmoji(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.PartialEmoji` instances. For more information, see the `MockGuild` docstring. """ - - def __init__(self, **kwargs) -> None: - super().__init__(spec_set=partial_emoji_instance, **kwargs) + spec_set = partial_emoji_instance reaction_instance = discord.Reaction(message=MockMessage(), data={'me': True}, emoji=MockEmoji()) @@ -497,12 +474,19 @@ class MockReaction(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Reaction` instances. For more information, see the `MockGuild` docstring. """ + spec_set = reaction_instance def __init__(self, **kwargs) -> None: - super().__init__(spec_set=reaction_instance, **kwargs) + _users = kwargs.pop("users", []) + super().__init__(**kwargs) self.emoji = kwargs.get('emoji', MockEmoji()) self.message = kwargs.get('message', MockMessage()) - self.users = AsyncIteratorMock(kwargs.get('users', [])) + + user_iterator = unittest.mock.AsyncMock() + user_iterator.__aiter__.return_value = _users + self.users.return_value = user_iterator + + self.__str__.return_value = str(self.emoji) webhook_instance = discord.Webhook(data=unittest.mock.MagicMock(), adapter=unittest.mock.MagicMock()) @@ -515,13 +499,5 @@ class MockAsyncWebhook(CustomMockMixin, unittest.mock.MagicMock): Instances of this class will follow the specifications of `discord.Webhook` instances. For more information, see the `MockGuild` docstring. """ - - def __init__(self, **kwargs) -> None: - super().__init__(spec_set=webhook_instance, **kwargs) - - # Because Webhooks can also use a synchronous "WebhookAdapter", the methods are not defined - # as coroutines. That's why we need to set the methods manually. - self.send = AsyncMock() - self.edit = AsyncMock() - self.delete = AsyncMock() - self.execute = AsyncMock() + spec_set = webhook_instance + additional_spec_asyncs = ("send", "edit", "delete", "execute") diff --git a/tests/test_base.py b/tests/test_base.py index a16e2af8f..a7db4bf3e 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -3,7 +3,11 @@ import unittest import unittest.mock -from tests.base import LoggingTestCase, _CaptureLogHandler +from tests.base import LoggingTestsMixin, _CaptureLogHandler + + +class LoggingTestCase(LoggingTestsMixin, unittest.TestCase): + pass class LoggingTestCaseTests(unittest.TestCase): @@ -18,24 +22,14 @@ class LoggingTestCaseTests(unittest.TestCase): try: with LoggingTestCase.assertNotLogs(self, level=logging.DEBUG): pass - except AssertionError: + except AssertionError: # pragma: no cover self.fail("`self.assertNotLogs` raised an AssertionError when it should not!") - @unittest.mock.patch("tests.base.LoggingTestCase.assertNotLogs") - def test_the_test_function_assert_not_logs_does_not_raise_with_no_logs(self, assertNotLogs): - """Test if test_assert_not_logs_does_not_raise_with_no_logs captures exception correctly.""" - assertNotLogs.return_value = iter([None]) - assertNotLogs.side_effect = AssertionError - - message = "`self.assertNotLogs` raised an AssertionError when it should not!" - with self.assertRaises(AssertionError, msg=message): - self.test_assert_not_logs_does_not_raise_with_no_logs() - def test_assert_not_logs_raises_correct_assertion_error_when_logs_are_emitted(self): """Test if LoggingTestCase.assertNotLogs raises AssertionError when logs were emitted.""" msg_regex = ( r"1 logs of DEBUG or higher were triggered on root:\n" - r'<LogRecord: tests\.test_base, [\d]+, .+/tests/test_base\.py, [\d]+, "Log!">' + r'<LogRecord: tests\.test_base, [\d]+, .+[/\\]tests[/\\]test_base\.py, [\d]+, "Log!">' ) with self.assertRaisesRegex(AssertionError, msg_regex): with LoggingTestCase.assertNotLogs(self, level=logging.DEBUG): diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 7894e104a..81285e009 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,5 +1,4 @@ import asyncio -import inspect import unittest import unittest.mock @@ -214,6 +213,11 @@ class DiscordMocksTests(unittest.TestCase): with self.assertRaises(RuntimeError, msg="cannot reuse already awaited coroutine"): asyncio.run(coroutine_object) + def test_user_mock_uses_explicitly_passed_mention_attribute(self): + """MockUser should use an explicitly passed value for user.mention.""" + user = helpers.MockUser(mention="hello") + self.assertEqual(user.mention, "hello") + class MockObjectTests(unittest.TestCase): """Tests the mock objects and mixins we've defined.""" @@ -341,65 +345,10 @@ class MockObjectTests(unittest.TestCase): attribute = getattr(mock, valid_attribute) self.assertTrue(isinstance(attribute, mock_type.child_mock_type)) - def test_extract_coroutine_methods_from_spec_instance_should_extract_all_and_only_coroutines(self): - """Test if all coroutine functions are extracted, but not regular methods or attributes.""" - class CoroutineDonor: - def __init__(self): - self.some_attribute = 'alpha' - - async def first_coroutine(): - """This coroutine function should be extracted.""" - - async def second_coroutine(): - """This coroutine function should be extracted.""" - - def regular_method(): - """This regular function should not be extracted.""" - - class Receiver: + def test_custom_mock_mixin_mocks_async_magic_methods_with_async_mock(self): + """The CustomMockMixin should mock async magic methods with an AsyncMock.""" + class MyMock(helpers.CustomMockMixin, unittest.mock.MagicMock): pass - donor = CoroutineDonor() - receiver = Receiver() - - helpers.CustomMockMixin._extract_coroutine_methods_from_spec_instance(receiver, donor) - - self.assertIsInstance(receiver.first_coroutine, helpers.AsyncMock) - self.assertIsInstance(receiver.second_coroutine, helpers.AsyncMock) - self.assertFalse(hasattr(receiver, 'regular_method')) - self.assertFalse(hasattr(receiver, 'some_attribute')) - - @unittest.mock.patch("builtins.super", new=unittest.mock.MagicMock()) - @unittest.mock.patch("tests.helpers.CustomMockMixin._extract_coroutine_methods_from_spec_instance") - def test_custom_mock_mixin_init_with_spec(self, extract_method_mock): - """Test if CustomMockMixin correctly passes on spec/kwargs and calls the extraction method.""" - spec_set = "pydis" - - helpers.CustomMockMixin(spec_set=spec_set) - - extract_method_mock.assert_called_once_with(spec_set) - - @unittest.mock.patch("builtins.super", new=unittest.mock.MagicMock()) - @unittest.mock.patch("tests.helpers.CustomMockMixin._extract_coroutine_methods_from_spec_instance") - def test_custom_mock_mixin_init_without_spec(self, extract_method_mock): - """Test if CustomMockMixin correctly passes on spec/kwargs and calls the extraction method.""" - helpers.CustomMockMixin() - - extract_method_mock.assert_not_called() - - def test_async_mock_provides_coroutine_for_dunder_call(self): - """Test if AsyncMock objects have a coroutine for their __call__ method.""" - async_mock = helpers.AsyncMock() - self.assertTrue(inspect.iscoroutinefunction(async_mock.__call__)) - - coroutine = async_mock() - self.assertTrue(inspect.iscoroutine(coroutine)) - self.assertIsNotNone(asyncio.run(coroutine)) - - def test_async_test_decorator_allows_synchronous_call_to_async_def(self): - """Test if the `async_test` decorator allows an `async def` to be called synchronously.""" - @helpers.async_test - async def kosayoda(): - return "return value" - - self.assertEqual(kosayoda(), "return value") + mock = MyMock() + self.assertIsInstance(mock.__aenter__, unittest.mock.AsyncMock) diff --git a/tests/utils/test_time.py b/tests/utils/test_time.py deleted file mode 100644 index 4baa6395c..000000000 --- a/tests/utils/test_time.py +++ /dev/null @@ -1,62 +0,0 @@ -import asyncio -from datetime import datetime, timezone -from unittest.mock import patch - -import pytest -from dateutil.relativedelta import relativedelta - -from bot.utils import time -from tests.helpers import AsyncMock - - - ('delta', 'precision', 'max_units', 'expected'), - ( - (relativedelta(days=2), 'seconds', 1, '2 days'), - (relativedelta(days=2, hours=2), 'seconds', 2, '2 days and 2 hours'), - (relativedelta(days=2, hours=2), 'seconds', 1, '2 days'), - (relativedelta(days=2, hours=2), 'days', 2, '2 days'), - - # Does not abort for unknown units, as the unit name is checked - # against the attribute of the relativedelta instance. - (relativedelta(days=2, hours=2), 'elephants', 2, '2 days and 2 hours'), - - # Very high maximum units, but it only ever iterates over - # each value the relativedelta might have. - (relativedelta(days=2, hours=2), 'hours', 20, '2 days and 2 hours'), - ) -) -def test_humanize_delta( - delta: relativedelta, - precision: str, - max_units: int, - expected: str -): - assert time.humanize_delta(delta, precision, max_units) == expected - - [email protected]('max_units', (-1, 0)) -def test_humanize_delta_raises_for_invalid_max_units(max_units: int): - with pytest.raises(ValueError, match='max_units must be positive'): - time.humanize_delta(relativedelta(days=2, hours=2), 'hours', max_units) - - - ('stamp', 'expected'), - ( - ('Sun, 15 Sep 2019 12:00:00 GMT', datetime(2019, 9, 15, 12, 0, 0, tzinfo=timezone.utc)), - ) -) -def test_parse_rfc1123(stamp: str, expected: str): - assert time.parse_rfc1123(stamp) == expected - - -@patch('asyncio.sleep', new_callable=AsyncMock) -def test_wait_until(sleep_patch): - start = datetime(2019, 1, 1, 0, 0) - then = datetime(2019, 1, 1, 0, 10) - - # No return value - assert asyncio.run(time.wait_until(then, start)) is None - - sleep_patch.assert_called_once_with(10 * 60) |