diff options
| author | 2021-06-04 06:30:02 -0700 | |
|---|---|---|
| committer | 2021-06-04 09:35:44 -0700 | |
| commit | 3c4cbd3d76bb6f5545920c66ba23126f71637766 (patch) | |
| tree | 2ab4857f6ecda7c0336e34c725caa4ff11dfe4ea | |
| parent | Remove talentpool channel constants (diff) | |
| parent | Merge pull request #1619 from python-discord/vcokltfre/chore/star-imports-sma... (diff) | |
Merge branch 'main' into ks123/goodbye-talentpool-channel
98 files changed, 6634 insertions, 3427 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 634bb4bca..6dfe7e859 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,14 +4,14 @@ **/bot/exts/moderation/*silence.py @MarkKoz bot/exts/info/codeblock/** @MarkKoz bot/exts/utils/extensions.py @MarkKoz -bot/exts/utils/snekbox.py @MarkKoz @Akarys42 +bot/exts/utils/snekbox.py @MarkKoz @Akarys42 @jb3 bot/exts/help_channels/** @MarkKoz @Akarys42 -bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 -bot/exts/info/** @Akarys42 @Den4200 -bot/exts/info/information.py @mbaruh -bot/exts/filters/** @mbaruh +bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 @jb3 +bot/exts/info/** @Akarys42 @Den4200 @jb3 +bot/exts/info/information.py @mbaruh @jb3 +bot/exts/filters/** @mbaruh @jb3 bot/exts/fun/** @ks129 -bot/exts/utils/** @ks129 +bot/exts/utils/** @ks129 @jb3 bot/exts/recruitment/** @wookie184 # Rules @@ -30,12 +30,13 @@ tests/bot/exts/test_cogs.py @MarkKoz tests/** @Akarys42 # CI & Docker -.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 -Dockerfile @MarkKoz @Akarys42 @Den4200 -docker-compose.yml @MarkKoz @Akarys42 @Den4200 +.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 @jb3 +Dockerfile @MarkKoz @Akarys42 @Den4200 @jb3 +docker-compose.yml @MarkKoz @Akarys42 @Den4200 @jb3 # Tools -Pipfile* @Akarys42 +poetry.lock @Akarys42 +pyproject.toml @Akarys42 # Statistics bot/async_stats.py @jb3 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e6826e09b..84a671917 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -39,7 +39,7 @@ jobs: with: registry: ghcr.io username: ${{ github.repository_owner }} - password: ${{ secrets.GHCR_TOKEN }} + password: ${{ secrets.GITHUB_TOKEN }} # Build and push the container to the GitHub Container # Repository. The container will be tagged as "latest" diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index 95bed2e14..d96f324ec 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -23,15 +23,12 @@ jobs: PIP_NO_CACHE_DIR: false PIP_USER: 1 - # Hide the graphical elements from pipenv's output - PIPENV_HIDE_EMOJIS: 1 - PIPENV_NOSPIN: 1 - - # Make sure pipenv does not try reuse an environment it's running in - PIPENV_IGNORE_VIRTUALENVS: 1 + # Make sure package manager does not use virtualenv + POETRY_VIRTUALENVS_CREATE: false # Specify explicit paths for python dependencies and the pre-commit # environment so we know which directories to cache + POETRY_CACHE_DIR: ${{ github.workspace }}/.cache/py-user-base PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache @@ -46,7 +43,7 @@ jobs: id: python uses: actions/setup-python@v2 with: - python-version: '3.8' + python-version: '3.9' # This step caches our Python dependencies. To make sure we # only restore a cache when the dependencies, the python version, @@ -61,14 +58,14 @@ jobs: path: ${{ env.PYTHONUSERBASE }} key: "python-0-${{ runner.os }}-${{ env.PYTHONUSERBASE }}-\ ${{ steps.python.outputs.python-version }}-\ - ${{ hashFiles('./Pipfile', './Pipfile.lock') }}" + ${{ hashFiles('./pyproject.toml', './poetry.lock') }}" # Install our dependencies if we did not restore a dependency cache - - name: Install dependencies using pipenv + - name: Install dependencies using poetry if: steps.python_cache.outputs.cache-hit != 'true' run: | - pip install pipenv - pipenv install --dev --deploy --system + pip install poetry + poetry install # This step caches our pre-commit environment. To make sure we # do create a new environment when our pre-commit setup changes, diff --git a/.gitignore b/.gitignore index 9186dbe06..f74a142f3 100644 --- a/.gitignore +++ b/.gitignore @@ -115,6 +115,7 @@ log.* # Custom user configuration config.yml +docker-compose.override.yml # xmlrunner unittest XML reports TEST-**.xml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 52500a282..a9412f07d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,8 +17,8 @@ repos: hooks: - id: flake8 name: Flake8 - description: This hook runs flake8 within our project's pipenv environment. - entry: pipenv run flake8 + description: This hook runs flake8 within our project's environment. + entry: poetry run flake8 language: system types: [python] require_serial: true diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..57ccd80e7 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Code of Conduct + +The Python Discord Code of Conduct can be found [on our website](https://pydis.com/coc). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index addab32ff..f20b53162 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,123 +1,3 @@ -# Contributing to one of Our Projects +# Contributing Guidelines -Our projects are open-source and are automatically deployed whenever commits are pushed to the `main` branch on each repository, so we've created a set of guidelines in order to keep everything clean and in working order. - -Note that contributions may be rejected on the basis of a contributor failing to follow these guidelines. - -## Rules - -1. **No force-pushes** or modifying the Git history in any way. -2. If you have direct access to the repository, **create a branch for your changes** and create a pull request for that branch. If not, create a branch on a fork of the repository and create a pull request from there. - * It's common practice for a repository to reject direct pushes to `main`, so make branching a habit! - * If PRing from your own fork, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process. -3. **Adhere to the prevailing code style**, which we enforce using [`flake8`](http://flake8.pycqa.org/en/latest/index.html) and [`pre-commit`](https://pre-commit.com/). - * Run `flake8` and `pre-commit` against your code [**before** you push it](https://soundcloud.com/lemonsaurusrex/lint-before-you-push). Your commit will be rejected by the build server if it fails to lint. - * [Git Hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) are a powerful git feature for executing custom scripts when certain important git actions occur. The pre-commit hook is the first hook executed during the commit process and can be used to check the code being committed & abort the commit if issues, such as linting failures, are detected. While git hooks can seem daunting to configure, the `pre-commit` framework abstracts this process away from you and is provided as a dev dependency for this project. Run `pipenv run precommit` when setting up the project and you'll never have to worry about committing code that fails linting. -4. **Make great commits**. A well structured git log is key to a project's maintainability; it efficiently provides insight into when and *why* things were done for future maintainers of the project. - * Commits should be as narrow in scope as possible. Commits that span hundreds of lines across multiple unrelated functions and/or files are very hard for maintainers to follow. After about a week they'll probably be hard for you to follow too. - * Avoid making minor commits for fixing typos or linting errors. Since you've already set up a `pre-commit` hook to run the linting pipeline before a commit, you shouldn't be committing linting issues anyway. - * A more in-depth guide to writing great commit messages can be found in Chris Beam's [*How to Write a Git Commit Message*](https://chris.beams.io/posts/git-commit/) -5. **Avoid frequent pushes to the main repository**. This goes for PRs opened against your fork as well. Our test build pipelines are triggered every time a push to the repository (or PR) is made. Try to batch your commits until you've finished working for that session, or you've reached a point where collaborators need your commits to continue their own work. This also provides you the opportunity to amend commits for minor changes rather than having to commit them on their own because you've already pushed. - * This includes merging main into your branch. Try to leave merging from main for after your PR passes review; a maintainer will bring your PR up to date before merging. Exceptions to this include: resolving merge conflicts, needing something that was pushed to main for your branch, or something was pushed to main that could potentionally affect the functionality of what you're writing. -6. **Don't fight the framework**. Every framework has its flaws, but the frameworks we've picked out have been carefully chosen for their particular merits. If you can avoid it, please resist reimplementing swathes of framework logic - the work has already been done for you! -7. If someone is working on an issue or pull request, **do not open your own pull request for the same task**. Instead, collaborate with the author(s) of the existing pull request. Duplicate PRs opened without communicating with the other author(s) and/or PyDis staff will be closed. Communication is key, and there's no point in two separate implementations of the same thing. - * One option is to fork the other contributor's repository and submit your changes to their branch with your own pull request. We suggest following these guidelines when interacting with their repository as well. - * The author(s) of inactive PRs and claimed issues will be be pinged after a week of inactivity for an update. Continued inactivity may result in the issue being released back to the community and/or PR closure. -8. **Work as a team** and collaborate wherever possible. Keep things friendly and help each other out - these are shared projects and nobody likes to have their feet trodden on. -9. All static content, such as images or audio, **must be licensed for open public use**. - * Static content must be hosted by a service designed to do so. Failing to do so is known as "leeching" and is frowned upon, as it generates extra bandwidth costs to the host without providing benefit. It would be best if appropriately licensed content is added to the repository itself so it can be served by PyDis' infrastructure. - -Above all, the needs of our community should come before the wants of an individual. Work together, build solutions to problems and try to do so in a way that people can learn from easily. Abuse of our trust may result in the loss of your Contributor role. - -## Changes to this Arrangement - -All projects evolve over time, and this contribution guide is no different. This document is open to pull requests or changes by contributors. If you believe you have something valuable to add or change, please don't hesitate to do so in a PR. - -## Supplemental Information -### Developer Environment -Instructions for setting the bot developer environment can be found on the [PyDis wiki](https://pythondiscord.com/pages/contributing/bot/) - -To provide a standalone development environment for this project, docker compose is utilized to pull the current version of the [site backend](https://github.com/python-discord/site). While appropriate for bot-only contributions, any contributions that necessitate backend changes will require the site repository to be appropriately configured as well. Instructions for setting up the site environment can be found on the [PyDis site](https://pythondiscord.com/pages/contributing/site/). - -When pulling down changes from GitHub, remember to sync your environment using `pipenv sync --dev` to ensure you're using the most up-to-date versions the project's dependencies. - -### Type Hinting -[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5. Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types. - -For example: - -```py -import typing as t - - -def foo(input_1: int, input_2: t.Dict[str, str]) -> bool: - ... -``` - -Tells us that `foo` accepts an `int` and a `dict`, with `str` keys and values, and returns a `bool`. - -All function declarations should be type hinted in code contributed to the PyDis organization. - -For more information, see *[PEP 483](https://www.python.org/dev/peps/pep-0483/) - The Theory of Type Hints* and Python's documentation for the [`typing`](https://docs.python.org/3/library/typing.html) module. - -### AutoDoc Formatting Directives -Many documentation packages provide support for automatic documentation generation from the codebase's docstrings. These tools utilize special formatting directives to enable richer formatting in the generated documentation. - -For example: - -```py -import typing as t - - -def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool: - """ - Does some things with some stuff. - - :param bar: Some input - :param baz: Optional, some dictionary with string keys and values - - :return: Some boolean - """ - ... -``` - -Since PyDis does not utilize automatic documentation generation, use of this syntax should not be used in code contributed to the organization. Should the purpose and type of the input variables not be easily discernable from the variable name and type annotation, a prose explanation can be used. Explicit references to variables, functions, classes, etc. should be wrapped with backticks (`` ` ``). - -For example, the above docstring would become: - -```py -import typing as t - - -def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool: - """ - Does some things with some stuff. - - This function takes an index, `bar` and checks for its presence in the database `baz`, passed as a dictionary. Returns `False` if `baz` is not passed. - """ - ... -``` - -### Logging Levels -The project currently defines [`logging`](https://docs.python.org/3/library/logging.html) levels as follows, from lowest to highest severity: -* **TRACE:** These events should be used to provide a *verbose* trace of every step of a complex process. This is essentially the `logging` equivalent of sprinkling `print` statements throughout the code. - * **Note:** This is a PyDis-implemented logging level. -* **DEBUG:** These events should add context to what's happening in a development setup to make it easier to follow what's going while working on a project. This is in the same vein as **TRACE** logging but at a much lower level of verbosity. -* **INFO:** These events are normal and don't need direct attention but are worth keeping track of in production, like checking which cogs were loaded during a start-up. -* **WARNING:** These events are out of the ordinary and should be fixed, but have not caused a failure. - * **NOTE:** Events at this logging level and higher should be reserved for events that require the attention of the DevOps team. -* **ERROR:** These events have caused a failure in a specific part of the application and require urgent attention. -* **CRITICAL:** These events have caused the whole application to fail and require immediate intervention. - -Ensure that log messages are succinct. Should you want to pass additional useful information that would otherwise make the log message overly verbose the `logging` module accepts an `extra` kwarg, which can be used to pass a dictionary. This is used to populate the `__dict__` of the `LogRecord` created for the logging event with user-defined attributes that can be accessed by a log handler. Additional information and caveats may be found [in Python's `logging` documentation](https://docs.python.org/3/library/logging.html#logging.Logger.debug). - -### Work in Progress (WIP) PRs -Github [provides a PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) that allows the PR author to mark it as a WIP. This provides both a visual and functional indicator that the contents of the PR are in a draft state and not yet ready for formal review. - -This feature should be utilized in place of the traditional method of prepending `[WIP]` to the PR title. - -As stated earlier, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process. - -## Footnotes - -This document was inspired by the [Glowstone contribution guidelines](https://github.com/GlowstoneMC/Glowstone/blob/dev/docs/CONTRIBUTING.md). +The Contributing Guidelines for Python Discord projects can be found [on our website](https://pydis.com/contributing.md). diff --git a/Dockerfile b/Dockerfile index 1a75e5669..c285898dc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,25 +1,19 @@ -FROM python:3.8-slim +FROM python:3.9.5-slim -# Set pip to have cleaner logs and no saved cache +# Set pip to have no saved cache ENV PIP_NO_CACHE_DIR=false \ - PIPENV_HIDE_EMOJIS=1 \ - PIPENV_IGNORE_VIRTUALENVS=1 \ - PIPENV_NOSPIN=1 + POETRY_VIRTUALENVS_CREATE=false -RUN apt-get -y update \ - && apt-get install -y \ - git \ - && rm -rf /var/lib/apt/lists/* -# Install pipenv -RUN pip install -U pipenv +# Install poetry +RUN pip install -U poetry # Create the working directory WORKDIR /bot # Install project dependencies -COPY Pipfile* ./ -RUN pipenv install --system --deploy +COPY pyproject.toml poetry.lock ./ +RUN poetry install --no-dev # Define Git SHA build argument ARG git_sha="development" diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY index eacd9b952..ab715630d 100644 --- a/LICENSE-THIRD-PARTY +++ b/LICENSE-THIRD-PARTY @@ -35,6 +35,36 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --------------------------------------------------------------------------------------------------- + BSD 2-Clause License +Applies to: + - Copyright (c) 2007-2020 by the Sphinx team (see AUTHORS file). All rights reserved. + - bot/cogs/doc/inventory_parser.py: _load_v1, _load_v2 and ZlibStreamReader.__aiter__. +--------------------------------------------------------------------------------------------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--------------------------------------------------------------------------------------------------- PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 Applies to: - Copyright © 2001-2020 Python Software Foundation. All rights reserved. diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index f8cedb08f..000000000 --- a/Pipfile.lock +++ /dev/null @@ -1,1115 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "228ae55fe5700ac3827ba6b661933b60b1d06f44fea8bcbe8c5a769fa10ab2fd" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.8" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.python.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "aio-pika": { - "hashes": [ - "sha256:9773440a89840941ac3099a7720bf9d51e8764a484066b82ede4d395660ff430", - "sha256:a8065be3c722eb8f9fff8c0e7590729e7782202cdb9363d9830d7d5d47b45c7c" - ], - "index": "pypi", - "version": "==6.7.1" - }, - "aiodns": { - "hashes": [ - "sha256:815fdef4607474295d68da46978a54481dd1e7be153c7d60f9e72773cd38d77d", - "sha256:aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de" - ], - "index": "pypi", - "version": "==2.0.0" - }, - "aiohttp": { - "hashes": [ - "sha256:119feb2bd551e58d83d1b38bfa4cb921af8ddedec9fad7183132db334c3133e0", - "sha256:16d0683ef8a6d803207f02b899c928223eb219111bd52420ef3d7a8aa76227b6", - "sha256:2eb3efe243e0f4ecbb654b08444ae6ffab37ac0ef8f69d3a2ffb958905379daf", - "sha256:2ffea7904e70350da429568113ae422c88d2234ae776519549513c8f217f58a9", - "sha256:40bd1b101b71a18a528ffce812cc14ff77d4a2a1272dfb8b11b200967489ef3e", - "sha256:418597633b5cd9639e514b1d748f358832c08cd5d9ef0870026535bd5eaefdd0", - "sha256:481d4b96969fbfdcc3ff35eea5305d8565a8300410d3d269ccac69e7256b1329", - "sha256:4c1bdbfdd231a20eee3e56bd0ac1cd88c4ff41b64ab679ed65b75c9c74b6c5c2", - "sha256:5563ad7fde451b1986d42b9bb9140e2599ecf4f8e42241f6da0d3d624b776f40", - "sha256:58c62152c4c8731a3152e7e650b29ace18304d086cb5552d317a54ff2749d32a", - "sha256:5b50e0b9460100fe05d7472264d1975f21ac007b35dcd6fd50279b72925a27f4", - "sha256:5d84ecc73141d0a0d61ece0742bb7ff5751b0657dab8405f899d3ceb104cc7de", - "sha256:5dde6d24bacac480be03f4f864e9a67faac5032e28841b00533cd168ab39cad9", - "sha256:5e91e927003d1ed9283dee9abcb989334fc8e72cf89ebe94dc3e07e3ff0b11e9", - "sha256:62bc216eafac3204877241569209d9ba6226185aa6d561c19159f2e1cbb6abfb", - "sha256:6c8200abc9dc5f27203986100579fc19ccad7a832c07d2bc151ce4ff17190076", - "sha256:6ca56bdfaf825f4439e9e3673775e1032d8b6ea63b8953d3812c71bd6a8b81de", - "sha256:71680321a8a7176a58dfbc230789790639db78dad61a6e120b39f314f43f1907", - "sha256:7c7820099e8b3171e54e7eedc33e9450afe7cd08172632d32128bd527f8cb77d", - "sha256:7dbd087ff2f4046b9b37ba28ed73f15fd0bc9f4fdc8ef6781913da7f808d9536", - "sha256:822bd4fd21abaa7b28d65fc9871ecabaddc42767884a626317ef5b75c20e8a2d", - "sha256:8ec1a38074f68d66ccb467ed9a673a726bb397142c273f90d4ba954666e87d54", - "sha256:950b7ef08b2afdab2488ee2edaff92a03ca500a48f1e1aaa5900e73d6cf992bc", - "sha256:99c5a5bf7135607959441b7d720d96c8e5c46a1f96e9d6d4c9498be8d5f24212", - "sha256:b84ad94868e1e6a5e30d30ec419956042815dfaea1b1df1cef623e4564c374d9", - "sha256:bc3d14bf71a3fb94e5acf5bbf67331ab335467129af6416a437bd6024e4f743d", - "sha256:c2a80fd9a8d7e41b4e38ea9fe149deed0d6aaede255c497e66b8213274d6d61b", - "sha256:c44d3c82a933c6cbc21039326767e778eface44fca55c65719921c4b9661a3f7", - "sha256:cc31e906be1cc121ee201adbdf844522ea3349600dd0a40366611ca18cd40e81", - "sha256:d5d102e945ecca93bcd9801a7bb2fa703e37ad188a2f81b1e65e4abe4b51b00c", - "sha256:dd7936f2a6daa861143e376b3a1fb56e9b802f4980923594edd9ca5670974895", - "sha256:dee68ec462ff10c1d836c0ea2642116aba6151c6880b688e56b4c0246770f297", - "sha256:e76e78863a4eaec3aee5722d85d04dcbd9844bc6cd3bfa6aa880ff46ad16bfcb", - "sha256:eab51036cac2da8a50d7ff0ea30be47750547c9aa1aa2cf1a1b710a1827e7dbe", - "sha256:f4496d8d04da2e98cc9133e238ccebf6a13ef39a93da2e87146c8c8ac9768242", - "sha256:fbd3b5e18d34683decc00d9a360179ac1e7a320a5fee10ab8053ffd6deab76e0", - "sha256:feb24ff1226beeb056e247cf2e24bba5232519efb5645121c4aea5b6ad74c1f2" - ], - "index": "pypi", - "version": "==3.7.4" - }, - "aioping": { - "hashes": [ - "sha256:8900ef2f5a589ba0c12aaa9c2d586f5371820d468d21b374ddb47ef5fc8f297c", - "sha256:f983d86acab3a04c322731ce88d42c55d04d2842565fc8532fe10c838abfd275" - ], - "index": "pypi", - "version": "==0.3.1" - }, - "aioredis": { - "hashes": [ - "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a", - "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3" - ], - "index": "pypi", - "version": "==1.3.1" - }, - "aiormq": { - "hashes": [ - "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", - "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" - ], - "version": "==3.3.1" - }, - "alabaster": { - "hashes": [ - "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", - "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" - ], - "version": "==0.7.12" - }, - "arrow": { - "hashes": [ - "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5", - "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4" - ], - "index": "pypi", - "version": "==0.17.0" - }, - "async-rediscache": { - "extras": [ - "fakeredis" - ], - "hashes": [ - "sha256:6be8a657d724ccbcfb1946d29a80c3478c5f9ecd2f78a0a26d2f4013a622258f", - "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af" - ], - "index": "pypi", - "version": "==0.1.4" - }, - "async-timeout": { - "hashes": [ - "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", - "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" - ], - "version": "==3.0.1" - }, - "attrs": { - "hashes": [ - "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", - "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" - ], - "version": "==20.3.0" - }, - "babel": { - "hashes": [ - "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", - "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" - ], - "version": "==2.9.0" - }, - "beautifulsoup4": { - "hashes": [ - "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35", - "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25", - "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666" - ], - "index": "pypi", - "version": "==4.9.3" - }, - "certifi": { - "hashes": [ - "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", - "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" - ], - "version": "==2020.12.5" - }, - "cffi": { - "hashes": [ - "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", - "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", - "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", - "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", - "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", - "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", - "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", - "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", - "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", - "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", - "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", - "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", - "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", - "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", - "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", - "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", - "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", - "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", - "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", - "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", - "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", - "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", - "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", - "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", - "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", - "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", - "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", - "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", - "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", - "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", - "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", - "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", - "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", - "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", - "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", - "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", - "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" - ], - "version": "==1.14.5" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "colorama": { - "hashes": [ - "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", - "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" - ], - "index": "pypi", - "markers": "sys_platform == 'win32'", - "version": "==0.4.4" - }, - "coloredlogs": { - "hashes": [ - "sha256:7ef1a7219870c7f02c218a2f2877ce68f2f8e087bb3a55bd6fbaa2a4362b4d52", - "sha256:e244a892f9d97ffd2c60f15bf1d2582ef7f9ac0f848d132249004184785702b3" - ], - "index": "pypi", - "version": "==14.3" - }, - "deepdiff": { - "hashes": [ - "sha256:59fc1e3e7a28dd0147b0f2b00e3e27181f0f0ef4286b251d5f214a5bcd9a9bc4", - "sha256:91360be1d9d93b1d9c13ae9c5048fa83d9cff17a88eb30afaa0d7ff2d0fee17d" - ], - "index": "pypi", - "version": "==4.3.2" - }, - "discord.py": { - "hashes": [ - "sha256:3df148daf6fbcc7ab5b11042368a3cd5f7b730b62f09fb5d3cbceff59bcfbb12", - "sha256:ba8be99ff1b8c616f7b6dcb700460d0222b29d4c11048e74366954c465fdd05f" - ], - "index": "pypi", - "version": "==1.6.0" - }, - "docutils": { - "hashes": [ - "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", - "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" - ], - "version": "==0.16" - }, - "emoji": { - "hashes": [ - "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11" - ], - "index": "pypi", - "version": "==0.6.0" - }, - "fakeredis": { - "hashes": [ - "sha256:01cb47d2286825a171fb49c0e445b1fa9307087e07cbb3d027ea10dbff108b6a", - "sha256:2c6041cf0225889bc403f3949838b2c53470a95a9e2d4272422937786f5f8f73" - ], - "version": "==1.4.5" - }, - "feedparser": { - "hashes": [ - "sha256:bd030652c2d08532c034c27fcd7c85868e7fa3cb2b17f230a44a6bbc92519bf9", - "sha256:cd2485472e41471632ed3029d44033ee420ad0b57111db95c240c9160a85831c", - "sha256:ce875495c90ebd74b179855449040003a1beb40cd13d5f037a0654251e260b02" - ], - "index": "pypi", - "version": "==5.2.1" - }, - "fuzzywuzzy": { - "hashes": [ - "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8", - "sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993" - ], - "index": "pypi", - "version": "==0.18.0" - }, - "hiredis": { - "hashes": [ - "sha256:06a039208f83744a702279b894c8cf24c14fd63c59cd917dcde168b79eef0680", - "sha256:0a909bf501459062aa1552be1461456518f367379fdc9fdb1f2ca5e4a1fdd7c0", - "sha256:18402d9e54fb278cb9a8c638df6f1550aca36a009d47ecf5aa263a38600f35b0", - "sha256:1e4cbbc3858ec7e680006e5ca590d89a5e083235988f26a004acf7244389ac01", - "sha256:23344e3c2177baf6975fbfa361ed92eb7d36d08f454636e5054b3faa7c2aff8a", - "sha256:289b31885b4996ce04cadfd5fc03d034dce8e2a8234479f7c9e23b9e245db06b", - "sha256:2c1c570ae7bf1bab304f29427e2475fe1856814312c4a1cf1cd0ee133f07a3c6", - "sha256:2c227c0ed371771ffda256034427320870e8ea2e4fd0c0a618c766e7c49aad73", - "sha256:3bb9b63d319402cead8bbd9dd55dca3b667d2997e9a0d8a1f9b6cc274db4baee", - "sha256:3ef2183de67b59930d2db8b8e8d4d58e00a50fcc5e92f4f678f6eed7a1c72d55", - "sha256:43b8ed3dbfd9171e44c554cb4acf4ee4505caa84c5e341858b50ea27dd2b6e12", - "sha256:47bcf3c5e6c1e87ceb86cdda2ee983fa0fe56a999e6185099b3c93a223f2fa9b", - "sha256:5263db1e2e1e8ae30500cdd75a979ff99dcc184201e6b4b820d0de74834d2323", - "sha256:5b1451727f02e7acbdf6aae4e06d75f66ee82966ff9114550381c3271a90f56c", - "sha256:6996883a8a6ff9117cbb3d6f5b0dcbbae6fb9e31e1a3e4e2f95e0214d9a1c655", - "sha256:6c96f64a54f030366657a54bb90b3093afc9c16c8e0dfa29fc0d6dbe169103a5", - "sha256:7332d5c3e35154cd234fd79573736ddcf7a0ade7a986db35b6196b9171493e75", - "sha256:7885b6f32c4a898e825bb7f56f36a02781ac4a951c63e4169f0afcf9c8c30dfb", - "sha256:7b0f63f10a166583ab744a58baad04e0f52cfea1ac27bfa1b0c21a48d1003c23", - "sha256:819f95d4eba3f9e484dd115ab7ab72845cf766b84286a00d4ecf76d33f1edca1", - "sha256:8968eeaa4d37a38f8ca1f9dbe53526b69628edc9c42229a5b2f56d98bb828c1f", - "sha256:89ebf69cb19a33d625db72d2ac589d26e936b8f7628531269accf4a3196e7872", - "sha256:8daecd778c1da45b8bd54fd41ffcd471a86beed3d8e57a43acf7a8d63bba4058", - "sha256:955ba8ea73cf3ed8bd2f963b4cb9f8f0dcb27becd2f4b3dd536fd24c45533454", - "sha256:964f18a59f5a64c0170f684c417f4fe3e695a536612e13074c4dd5d1c6d7c882", - "sha256:969843fbdfbf56cdb71da6f0bdf50f9985b8b8aeb630102945306cf10a9c6af2", - "sha256:996021ef33e0f50b97ff2d6b5f422a0fe5577de21a8873b58a779a5ddd1c3132", - "sha256:9e9c9078a7ce07e6fce366bd818be89365a35d2e4b163268f0ca9ba7e13bb2f6", - "sha256:a04901757cb0fb0f5602ac11dda48f5510f94372144d06c2563ba56c480b467c", - "sha256:a7bf1492429f18d205f3a818da3ff1f242f60aa59006e53dee00b4ef592a3363", - "sha256:aa0af2deb166a5e26e0d554b824605e660039b161e37ed4f01b8d04beec184f3", - "sha256:abfb15a6a7822f0fae681785cb38860e7a2cb1616a708d53df557b3d76c5bfd4", - "sha256:b253fe4df2afea4dfa6b1fa8c5fef212aff8bcaaeb4207e81eed05cb5e4a7919", - "sha256:b27f082f47d23cffc4cf1388b84fdc45c4ef6015f906cd7e0d988d9e35d36349", - "sha256:b33aea449e7f46738811fbc6f0b3177c6777a572207412bbbf6f525ffed001ae", - "sha256:b44f9421c4505c548435244d74037618f452844c5d3c67719d8a55e2613549da", - "sha256:bcc371151d1512201d0214c36c0c150b1dc64f19c2b1a8c9cb1d7c7c15ebd93f", - "sha256:c2851deeabd96d3f6283e9c6b26e0bfed4de2dc6fb15edf913e78b79fc5909ed", - "sha256:cdfd501c7ac5b198c15df800a3a34c38345f5182e5f80770caf362bccca65628", - "sha256:d2c0caffa47606d6d7c8af94ba42547bd2a441f06c74fd90a1ffe328524a6c64", - "sha256:dcb2db95e629962db5a355047fb8aefb012df6c8ae608930d391619dbd96fd86", - "sha256:e0eeb9c112fec2031927a1745788a181d0eecbacbed941fc5c4f7bc3f7b273bf", - "sha256:e154891263306200260d7f3051982774d7b9ef35af3509d5adbbe539afd2610c", - "sha256:e2e023a42dcbab8ed31f97c2bcdb980b7fbe0ada34037d87ba9d799664b58ded", - "sha256:e64be68255234bb489a574c4f2f8df7029c98c81ec4d160d6cd836e7f0679390", - "sha256:e82d6b930e02e80e5109b678c663a9ed210680ded81c1abaf54635d88d1da298" - ], - "version": "==1.1.0" - }, - "humanfriendly": { - "hashes": [ - "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d", - "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72" - ], - "version": "==9.1" - }, - "idna": { - "hashes": [ - "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", - "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" - ], - "version": "==2.10" - }, - "imagesize": { - "hashes": [ - "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", - "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" - ], - "version": "==1.2.0" - }, - "jinja2": { - "hashes": [ - "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", - "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" - ], - "version": "==2.11.3" - }, - "lxml": { - "hashes": [ - "sha256:0448576c148c129594d890265b1a83b9cd76fd1f0a6a04620753d9a6bcfd0a4d", - "sha256:127f76864468d6630e1b453d3ffbbd04b024c674f55cf0a30dc2595137892d37", - "sha256:1471cee35eba321827d7d53d104e7b8c593ea3ad376aa2df89533ce8e1b24a01", - "sha256:2363c35637d2d9d6f26f60a208819e7eafc4305ce39dc1d5005eccc4593331c2", - "sha256:2e5cc908fe43fe1aa299e58046ad66981131a66aea3129aac7770c37f590a644", - "sha256:2e6fd1b8acd005bd71e6c94f30c055594bbd0aa02ef51a22bbfa961ab63b2d75", - "sha256:366cb750140f221523fa062d641393092813b81e15d0e25d9f7c6025f910ee80", - "sha256:42ebca24ba2a21065fb546f3e6bd0c58c3fe9ac298f3a320147029a4850f51a2", - "sha256:4e751e77006da34643ab782e4a5cc21ea7b755551db202bc4d3a423b307db780", - "sha256:4fb85c447e288df535b17ebdebf0ec1cf3a3f1a8eba7e79169f4f37af43c6b98", - "sha256:50c348995b47b5a4e330362cf39fc503b4a43b14a91c34c83b955e1805c8e308", - "sha256:535332fe9d00c3cd455bd3dd7d4bacab86e2d564bdf7606079160fa6251caacf", - "sha256:535f067002b0fd1a4e5296a8f1bf88193080ff992a195e66964ef2a6cfec5388", - "sha256:5be4a2e212bb6aa045e37f7d48e3e1e4b6fd259882ed5a00786f82e8c37ce77d", - "sha256:60a20bfc3bd234d54d49c388950195d23a5583d4108e1a1d47c9eef8d8c042b3", - "sha256:648914abafe67f11be7d93c1a546068f8eff3c5fa938e1f94509e4a5d682b2d8", - "sha256:681d75e1a38a69f1e64ab82fe4b1ed3fd758717bed735fb9aeaa124143f051af", - "sha256:68a5d77e440df94011214b7db907ec8f19e439507a70c958f750c18d88f995d2", - "sha256:69a63f83e88138ab7642d8f61418cf3180a4d8cd13995df87725cb8b893e950e", - "sha256:6e4183800f16f3679076dfa8abf2db3083919d7e30764a069fb66b2b9eff9939", - "sha256:6fd8d5903c2e53f49e99359b063df27fdf7acb89a52b6a12494208bf61345a03", - "sha256:791394449e98243839fa822a637177dd42a95f4883ad3dec2a0ce6ac99fb0a9d", - "sha256:7a7669ff50f41225ca5d6ee0a1ec8413f3a0d8aa2b109f86d540887b7ec0d72a", - "sha256:7e9eac1e526386df7c70ef253b792a0a12dd86d833b1d329e038c7a235dfceb5", - "sha256:7ee8af0b9f7de635c61cdd5b8534b76c52cd03536f29f51151b377f76e214a1a", - "sha256:8246f30ca34dc712ab07e51dc34fea883c00b7ccb0e614651e49da2c49a30711", - "sha256:8c88b599e226994ad4db29d93bc149aa1aff3dc3a4355dd5757569ba78632bdf", - "sha256:923963e989ffbceaa210ac37afc9b906acebe945d2723e9679b643513837b089", - "sha256:94d55bd03d8671686e3f012577d9caa5421a07286dd351dfef64791cf7c6c505", - "sha256:97db258793d193c7b62d4e2586c6ed98d51086e93f9a3af2b2034af01450a74b", - "sha256:a9d6bc8642e2c67db33f1247a77c53476f3a166e09067c0474facb045756087f", - "sha256:cd11c7e8d21af997ee8079037fff88f16fda188a9776eb4b81c7e4c9c0a7d7fc", - "sha256:d8d3d4713f0c28bdc6c806a278d998546e8efc3498949e3ace6e117462ac0a5e", - "sha256:e0bfe9bb028974a481410432dbe1b182e8191d5d40382e5b8ff39cdd2e5c5931", - "sha256:f4822c0660c3754f1a41a655e37cb4dbbc9be3d35b125a37fab6f82d47674ebc", - "sha256:f83d281bb2a6217cd806f4cf0ddded436790e66f393e124dfe9731f6b3fb9afe", - "sha256:fc37870d6716b137e80d19241d0e2cff7a7643b925dfa49b4c8ebd1295eb506e" - ], - "index": "pypi", - "version": "==4.6.2" - }, - "markdownify": { - "hashes": [ - "sha256:30be8340724e706c9e811c27fe8c1542cf74a15b46827924fff5c54b40dd9b0d", - "sha256:a69588194fd76634f0139d6801b820fd652dc5eeba9530e90d323dfdc0155252" - ], - "index": "pypi", - "version": "==0.5.3" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", - "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", - "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", - "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", - "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", - "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", - "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", - "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", - "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", - "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", - "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" - ], - "version": "==1.1.1" - }, - "more-itertools": { - "hashes": [ - "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", - "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" - ], - "index": "pypi", - "version": "==8.6.0" - }, - "multidict": { - "hashes": [ - "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a", - "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93", - "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632", - "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656", - "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79", - "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7", - "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d", - "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5", - "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224", - "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26", - "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea", - "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348", - "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6", - "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76", - "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1", - "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f", - "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952", - "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a", - "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37", - "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9", - "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359", - "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8", - "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da", - "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3", - "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d", - "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf", - "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841", - "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d", - "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93", - "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f", - "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647", - "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635", - "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456", - "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda", - "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5", - "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", - "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80" - ], - "version": "==5.1.0" - }, - "ordered-set": { - "hashes": [ - "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95" - ], - "version": "==4.0.2" - }, - "packaging": { - "hashes": [ - "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", - "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" - ], - "version": "==20.9" - }, - "pamqp": { - "hashes": [ - "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02", - "sha256:5cd0f5a85e89f20d5f8e19285a1507788031cfca4a9ea6f067e3cf18f5e294e8" - ], - "version": "==2.3.0" - }, - "pycares": { - "hashes": [ - "sha256:050f00b39ed77ea8a4e555f09417d4b1a6b5baa24bb9531a3e15d003d2319b3f", - "sha256:0a24d2e580a8eb567140d7b69f12cb7de90c836bd7b6488ec69394d308605ac3", - "sha256:0c5bd1f6f885a219d5e972788d6eef7b8043b55c3375a845e5399638436e0bba", - "sha256:11c628402cc8fc8ef461076d4e47f88afc1f8609989ebbff0dbffcd54c97239f", - "sha256:18dfd4fd300f570d6c4536c1d987b7b7673b2a9d14346592c5d6ed716df0d104", - "sha256:1917b82494907a4a342db420bc4dd5bac355a5fa3984c35ba9bf51422b020b48", - "sha256:1b90fa00a89564df059fb18e796458864cc4e00cb55e364dbf921997266b7c55", - "sha256:1d8d177c40567de78108a7835170f570ab04f09084bfd32df9919c0eaec47aa1", - "sha256:236286f81664658b32c141c8e79d20afc3d54f6e2e49dfc8b702026be7265855", - "sha256:2e4f74677542737fb5af4ea9a2e415ec5ab31aa67e7b8c3c969fdb15c069f679", - "sha256:48a7750f04e69e1f304f4332b755728067e7c4b1abe2760bba1cacd9ff7a847a", - "sha256:7d86e62b700b21401ffe7fd1bbfe91e08489416fecae99c6570ab023c6896022", - "sha256:7e2d7effd08d2e5a3cb95d98a7286ebab71ab2fbce84fa93cc2dd56caf7240dd", - "sha256:81edb016d9e43dde7473bc3999c29cdfee3a6b67308fed1ea21049f458e83ae0", - "sha256:96c90e11b4a4c7c0b8ff5aaaae969c5035493136586043ff301979aae0623941", - "sha256:9a0a1845f8cb2e62332bca0aaa9ad5494603ac43fb60d510a61d5b5b170d7216", - "sha256:a05bbfdfd41f8410a905a818f329afe7510cbd9ee65c60f8860a72b6c64ce5dc", - "sha256:a5089fd660f0b0d228b14cdaa110d0d311edfa5a63f800618dbf1321dcaef66b", - "sha256:c457a709e6f2befea7e2996c991eda6d79705dd075f6521593ba6ebc1485b811", - "sha256:c5cb72644b04e5e5abfb1e10a0e7eb75da6684ea0e60871652f348e412cf3b11", - "sha256:cce46dd4717debfd2aab79d6d7f0cbdf6b1e982dc4d9bebad81658d59ede07c2", - "sha256:cfdd1f90bcf373b00f4b2c55ea47868616fe2f779f792fc913fa82a3d64ffe43", - "sha256:d88a279cbc5af613f73e86e19b3f63850f7a2e2736e249c51995dedcc830b1bb", - "sha256:eba9a9227438da5e78fc8eee32f32eb35d9a50cf0a0bd937eb6275c7cc3015fe", - "sha256:eee7b6a5f5b5af050cb7d66ab28179287b416f06d15a8974ac831437fec51336", - "sha256:f41ac1c858687e53242828c9f59c2e7b0b95dbcd5bdd09c7e5d3c48b0f89a25a", - "sha256:f8deaefefc3a589058df1b177275f79233e8b0eeee6734cf4336d80164ecd022", - "sha256:fa78e919f3bd7d6d075db262aa41079b4c02da315c6043c6f43881e2ebcdd623", - "sha256:fadb97d2e02dabdc15a0091591a972a938850d79ddde23d385d813c1731983f0" - ], - "version": "==3.1.1" - }, - "pycparser": { - "hashes": [ - "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", - "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" - ], - "version": "==2.20" - }, - "pygments": { - "hashes": [ - "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0", - "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88" - ], - "version": "==2.8.0" - }, - "pyparsing": { - "hashes": [ - "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", - "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" - ], - "version": "==2.4.7" - }, - "python-dateutil": { - "hashes": [ - "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", - "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" - ], - "index": "pypi", - "version": "==2.8.1" - }, - "pytz": { - "hashes": [ - "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", - "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" - ], - "version": "==2021.1" - }, - "pyyaml": { - "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" - ], - "index": "pypi", - "version": "==5.3.1" - }, - "redis": { - "hashes": [ - "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", - "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24" - ], - "version": "==3.5.3" - }, - "requests": { - "hashes": [ - "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", - "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" - ], - "index": "pypi", - "version": "==2.25.1" - }, - "sentry-sdk": { - "hashes": [ - "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0", - "sha256:737a094e49a529dd0fdcaafa9e97cf7c3d5eb964bd229821d640bc77f3502b3f" - ], - "index": "pypi", - "version": "==0.19.5" - }, - "six": { - "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" - ], - "version": "==1.15.0" - }, - "snowballstemmer": { - "hashes": [ - "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", - "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" - ], - "version": "==2.1.0" - }, - "sortedcontainers": { - "hashes": [ - "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", - "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1" - ], - "version": "==2.3.0" - }, - "soupsieve": { - "hashes": [ - "sha256:407fa1e8eb3458d1b5614df51d9651a1180ea5fedf07feb46e45d7e25e6d6cdd", - "sha256:d3a5ea5b350423f47d07639f74475afedad48cf41c0ad7a82ca13a3928af34f6" - ], - "markers": "python_version >= '3.0'", - "version": "==2.2" - }, - "sphinx": { - "hashes": [ - "sha256:b4c750d546ab6d7e05bdff6ac24db8ae3e8b8253a3569b754e445110a0a12b66", - "sha256:fc312670b56cb54920d6cc2ced455a22a547910de10b3142276495ced49231cb" - ], - "index": "pypi", - "version": "==2.4.4" - }, - "sphinxcontrib-applehelp": { - "hashes": [ - "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", - "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" - ], - "version": "==1.0.2" - }, - "sphinxcontrib-devhelp": { - "hashes": [ - "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", - "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" - ], - "version": "==1.0.2" - }, - "sphinxcontrib-htmlhelp": { - "hashes": [ - "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", - "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" - ], - "version": "==1.0.3" - }, - "sphinxcontrib-jsmath": { - "hashes": [ - "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", - "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" - ], - "version": "==1.0.1" - }, - "sphinxcontrib-qthelp": { - "hashes": [ - "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", - "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" - ], - "version": "==1.0.3" - }, - "sphinxcontrib-serializinghtml": { - "hashes": [ - "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", - "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" - ], - "version": "==1.1.4" - }, - "statsd": { - "hashes": [ - "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa", - "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f" - ], - "index": "pypi", - "version": "==3.3.0" - }, - "typing-extensions": { - "hashes": [ - "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", - "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", - "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" - ], - "version": "==3.7.4.3" - }, - "urllib3": { - "hashes": [ - "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", - "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" - ], - "version": "==1.26.3" - }, - "yarl": { - "hashes": [ - "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e", - "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434", - "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366", - "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3", - "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec", - "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959", - "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e", - "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c", - "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6", - "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a", - "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6", - "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424", - "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e", - "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f", - "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50", - "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2", - "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc", - "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4", - "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970", - "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10", - "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0", - "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406", - "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896", - "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643", - "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721", - "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478", - "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724", - "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e", - "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8", - "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96", - "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25", - "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76", - "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2", - "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2", - "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c", - "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", - "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71" - ], - "version": "==1.6.3" - } - }, - "develop": { - "appdirs": { - "hashes": [ - "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", - "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" - ], - "version": "==1.4.4" - }, - "attrs": { - "hashes": [ - "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", - "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" - ], - "version": "==20.3.0" - }, - "certifi": { - "hashes": [ - "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", - "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" - ], - "version": "==2020.12.5" - }, - "cfgv": { - "hashes": [ - "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", - "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" - ], - "version": "==3.2.0" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "coverage": { - "hashes": [ - "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297", - "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1", - "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497", - "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606", - "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528", - "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b", - "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4", - "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830", - "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1", - "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f", - "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d", - "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3", - "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8", - "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500", - "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7", - "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb", - "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b", - "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059", - "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b", - "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72", - "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36", - "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277", - "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c", - "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631", - "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff", - "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8", - "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec", - "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b", - "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7", - "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105", - "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b", - "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c", - "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b", - "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98", - "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4", - "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879", - "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f", - "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4", - "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044", - "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e", - "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899", - "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f", - "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448", - "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714", - "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2", - "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d", - "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd", - "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7", - "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae" - ], - "index": "pypi", - "version": "==5.3.1" - }, - "coveralls": { - "hashes": [ - "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc", - "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617" - ], - "index": "pypi", - "version": "==2.2.0" - }, - "distlib": { - "hashes": [ - "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb", - "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1" - ], - "version": "==0.3.1" - }, - "docopt": { - "hashes": [ - "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" - ], - "version": "==0.6.2" - }, - "filelock": { - "hashes": [ - "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", - "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836" - ], - "version": "==3.0.12" - }, - "flake8": { - "hashes": [ - "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", - "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" - ], - "index": "pypi", - "version": "==3.8.4" - }, - "flake8-annotations": { - "hashes": [ - "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055", - "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e" - ], - "index": "pypi", - "version": "==2.5.0" - }, - "flake8-bugbear": { - "hashes": [ - "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538", - "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703" - ], - "index": "pypi", - "version": "==20.11.1" - }, - "flake8-docstrings": { - "hashes": [ - "sha256:3d5a31c7ec6b7367ea6506a87ec293b94a0a46c0bce2bb4975b7f1d09b6f3717", - "sha256:a256ba91bc52307bef1de59e2a009c3cf61c3d0952dbe035d6ff7208940c2edc" - ], - "index": "pypi", - "version": "==1.5.0" - }, - "flake8-import-order": { - "hashes": [ - "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543", - "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92" - ], - "index": "pypi", - "version": "==0.18.1" - }, - "flake8-polyfill": { - "hashes": [ - "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9", - "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda" - ], - "version": "==1.0.2" - }, - "flake8-string-format": { - "hashes": [ - "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", - "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af" - ], - "index": "pypi", - "version": "==0.3.0" - }, - "flake8-tidy-imports": { - "hashes": [ - "sha256:52e5f2f987d3d5597538d5941153409ebcab571635835b78f522c7bf03ca23bc", - "sha256:76e36fbbfdc8e3c5017f9a216c2855a298be85bc0631e66777f4e6a07a859dc4" - ], - "index": "pypi", - "version": "==4.2.1" - }, - "flake8-todo": { - "hashes": [ - "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915" - ], - "index": "pypi", - "version": "==0.7" - }, - "identify": { - "hashes": [ - "sha256:de7129142a5c86d75a52b96f394d94d96d497881d2aaf8eafe320cdbe8ac4bcc", - "sha256:e0dae57c0397629ce13c289f6ddde0204edf518f557bfdb1e56474aa143e77c3" - ], - "version": "==1.5.14" - }, - "idna": { - "hashes": [ - "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", - "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" - ], - "version": "==2.10" - }, - "mccabe": { - "hashes": [ - "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", - "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" - ], - "version": "==0.6.1" - }, - "nodeenv": { - "hashes": [ - "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9", - "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c" - ], - "version": "==1.5.0" - }, - "pep8-naming": { - "hashes": [ - "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724", - "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738" - ], - "index": "pypi", - "version": "==0.11.1" - }, - "pre-commit": { - "hashes": [ - "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0", - "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4" - ], - "index": "pypi", - "version": "==2.9.3" - }, - "pycodestyle": { - "hashes": [ - "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", - "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" - ], - "version": "==2.6.0" - }, - "pydocstyle": { - "hashes": [ - "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325", - "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678" - ], - "version": "==5.1.1" - }, - "pyflakes": { - "hashes": [ - "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", - "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" - ], - "version": "==2.2.0" - }, - "pyyaml": { - "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", - "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", - "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", - "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", - "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", - "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", - "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" - ], - "index": "pypi", - "version": "==5.3.1" - }, - "requests": { - "hashes": [ - "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", - "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" - ], - "index": "pypi", - "version": "==2.25.1" - }, - "six": { - "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" - ], - "version": "==1.15.0" - }, - "snowballstemmer": { - "hashes": [ - "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", - "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" - ], - "version": "==2.1.0" - }, - "toml": { - "hashes": [ - "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", - "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" - ], - "version": "==0.10.2" - }, - "urllib3": { - "hashes": [ - "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", - "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" - ], - "version": "==1.26.3" - }, - "virtualenv": { - "hashes": [ - "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d", - "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3" - ], - "version": "==20.4.2" - } - } -} diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..fa5a88a39 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,3 @@ +# Security Notice + +The Security Notice for Python Discord projects can be found [on our website](https://pydis.com/security.md). diff --git a/bot/bot.py b/bot/bot.py index 3a2af472d..914da9c98 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -111,7 +111,7 @@ class Bot(commands.Bot): loop = asyncio.get_event_loop() allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES] - intents = discord.Intents().all() + intents = discord.Intents.all() intents.presences = False intents.dm_typing = False intents.dm_reactions = False diff --git a/bot/constants.py b/bot/constants.py index 840f6fbf6..7616ea4df 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -19,6 +19,12 @@ from typing import Dict, List, Optional import yaml +try: + import dotenv + dotenv.load_dotenv() +except ModuleNotFoundError: + pass + log = logging.getLogger(__name__) @@ -175,13 +181,14 @@ class YAMLGetter(type): if cls.subsection is not None: return _CONFIG_YAML[cls.section][cls.subsection][name] return _CONFIG_YAML[cls.section][name] - except KeyError: + except KeyError as e: dotted_path = '.'.join( (cls.section, cls.subsection, name) if cls.subsection is not None else (cls.section, name) ) - log.critical(f"Tried accessing configuration variable at `{dotted_path}`, but it could not be found.") - raise + # Only an INFO log since this can be caught through `hasattr` or `getattr`. + log.info(f"Tried accessing configuration variable at `{dotted_path}`, but it could not be found.") + raise AttributeError(repr(name)) from e def __getitem__(cls, name): return cls.__getattr__(name) @@ -199,6 +206,7 @@ class Bot(metaclass=YAMLGetter): prefix: str sentry_dsn: Optional[str] token: str + trace_loggers: Optional[str] class Redis(metaclass=YAMLGetter): @@ -279,6 +287,8 @@ class Emojis(metaclass=YAMLGetter): badge_partner: str badge_staff: str badge_verified_bot_developer: str + verified_bot: str + bot: str defcon_shutdown: str # noqa: E704 defcon_unshutdown: str # noqa: E704 @@ -295,6 +305,8 @@ class Emojis(metaclass=YAMLGetter): status_offline: str status_online: str + ducky_dave: str + trashcan: str bullet: str @@ -303,10 +315,6 @@ class Emojis(metaclass=YAMLGetter): new: str pencil: str - comments: str - upvotes: str - user: str - ok_hand: str @@ -388,6 +396,7 @@ class Categories(metaclass=YAMLGetter): help_available: int help_dormant: int help_in_use: int + moderators: int modmail: int voice: int @@ -402,7 +411,6 @@ class Channels(metaclass=YAMLGetter): python_events: int python_news: int reddit: int - user_event_announcements: int dev_contrib: int dev_core: int @@ -412,11 +420,12 @@ class Channels(metaclass=YAMLGetter): python_general: int cooldown: int + how_to_get_help: int attachment_log: int - dm_log: int message_log: int mod_log: int + nomination_archive: int user_log: int voice_log: int @@ -435,9 +444,8 @@ class Channels(metaclass=YAMLGetter): helpers: int incidents: int incidents_archive: int - mods: int mod_alerts: int - mod_spam: int + nominations: int nomination_voting: int organisation: int @@ -465,10 +473,8 @@ class Webhooks(metaclass=YAMLGetter): big_brother: int dev_log: int - dm_log: int duck_pond: int incidents_archive: int - reddit: int class Roles(metaclass=YAMLGetter): @@ -483,13 +489,17 @@ class Roles(metaclass=YAMLGetter): python_community: int sprinters: int voice_verified: int + video: int admins: int core_developers: int devops: int + domain_leads: int helpers: int moderators: int + mod_team: int owners: int + project_leads: int jammers: int team_leaders: int @@ -543,12 +553,13 @@ class URLs(metaclass=YAMLGetter): paste_service: str -class Reddit(metaclass=YAMLGetter): - section = "reddit" +class Metabase(metaclass=YAMLGetter): + section = "metabase" - client_id: Optional[str] - secret: Optional[str] - subreddits: list + username: Optional[str] + password: Optional[str] + url: str + max_session_age: int class AntiSpam(metaclass=YAMLGetter): @@ -589,9 +600,9 @@ class HelpChannels(metaclass=YAMLGetter): section = 'help_channels' enable: bool - claim_minutes: int cmd_whitelist: List[int] - idle_minutes: int + idle_minutes_claimant: int + idle_minutes_others: int deleted_idle_minutes: int max_available: int max_total_channels: int @@ -659,6 +670,12 @@ class Event(Enum): voice_state_update = "voice_state_update" +class VideoPermission(metaclass=YAMLGetter): + section = "video_permission" + + default_permission_duration: int + + # Debug mode DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") diff --git a/bot/converters.py b/bot/converters.py index 67525cd4d..2a3943831 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -15,6 +15,7 @@ from discord.utils import DISCORD_EPOCH, snowflake_time from bot.api import ResponseCodeError from bot.constants import URLs +from bot.exts.info.doc import _inventory_parser from bot.utils.regex import INVITE_RE from bot.utils.time import parse_duration_string @@ -127,22 +128,20 @@ class ValidFilterListType(Converter): return list_type -class ValidPythonIdentifier(Converter): +class PackageName(Converter): """ - A converter that checks whether the given string is a valid Python identifier. + A converter that checks whether the given string is a valid package name. - This is used to have package names that correspond to how you would use the package in your - code, e.g. `import package`. - - Raises `BadArgument` if the argument is not a valid Python identifier, and simply passes through - the given argument otherwise. + Package names are used for stats and are restricted to the a-z and _ characters. """ - @staticmethod - async def convert(ctx: Context, argument: str) -> str: - """Checks whether the given string is a valid Python identifier.""" - if not argument.isidentifier(): - raise BadArgument(f"`{argument}` is not a valid Python identifier") + PACKAGE_NAME_RE = re.compile(r"[^a-z0-9_]") + + @classmethod + async def convert(cls, ctx: Context, argument: str) -> str: + """Checks whether the given string is a valid package name.""" + if cls.PACKAGE_NAME_RE.search(argument): + raise BadArgument("The provided package name is not valid; please only use the _, 0-9, and a-z characters.") return argument @@ -178,6 +177,27 @@ class ValidURL(Converter): return url +class Inventory(Converter): + """ + Represents an Intersphinx inventory URL. + + This converter checks whether intersphinx accepts the given inventory URL, and raises + `BadArgument` if that is not the case or if the url is unreachable. + + Otherwise, it returns the url and the fetched inventory dict in a tuple. + """ + + @staticmethod + async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.InventoryDict]: + """Convert url to Intersphinx inventory URL.""" + await ctx.trigger_typing() + if (inventory := await _inventory_parser.fetch_inventory(url)) is None: + raise BadArgument( + f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS} attempts." + ) + return url, inventory + + class Snowflake(IDConverter): """ Converts to an int if the argument is a valid Discord snowflake. @@ -216,35 +236,6 @@ class Snowflake(IDConverter): return snowflake -class Subreddit(Converter): - """Forces a string to begin with "r/" and checks if it's a valid subreddit.""" - - @staticmethod - async def convert(ctx: Context, sub: str) -> str: - """ - Force sub to begin with "r/" and check if it's a valid subreddit. - - If sub is a valid subreddit, return it prepended with "r/" - """ - sub = sub.lower() - - if not sub.startswith("r/"): - sub = f"r/{sub}" - - resp = await ctx.bot.http_session.get( - "https://www.reddit.com/subreddits/search.json", - params={"q": sub} - ) - - json = await resp.json() - if not json["data"]["children"]: - raise BadArgument( - f"The subreddit `{sub}` either doesn't exist, or it has no posts." - ) - - return sub - - class TagNameConverter(Converter): """ Ensure that a proposed tag name is valid. diff --git a/bot/decorators.py b/bot/decorators.py index 063c8f878..f65ec4103 100644 --- a/bot/decorators.py +++ b/bot/decorators.py @@ -1,16 +1,18 @@ import asyncio +import functools import logging +import types import typing as t from contextlib import suppress -from functools import wraps from discord import Member, NotFound from discord.ext import commands from discord.ext.commands import Cog, Context -from bot.constants import Channels, RedirectOutput +from bot.constants import Channels, DEBUG_MODE, RedirectOutput from bot.utils import function -from bot.utils.checks import in_whitelist_check +from bot.utils.checks import ContextCheckFailure, in_whitelist_check +from bot.utils.function import command_wraps log = logging.getLogger(__name__) @@ -43,6 +45,49 @@ def in_whitelist( return commands.check(predicate) +class NotInBlacklistCheckFailure(ContextCheckFailure): + """Raised when the 'not_in_blacklist' check fails.""" + + +def not_in_blacklist( + *, + channels: t.Container[int] = (), + categories: t.Container[int] = (), + roles: t.Container[int] = (), + override_roles: t.Container[int] = (), + redirect: t.Optional[int] = Channels.bot_commands, + fail_silently: bool = False, +) -> t.Callable: + """ + Check if a command was not issued in a blacklisted context. + + The blacklists that can be provided are: + + - `channels`: a container with channel ids for blacklisted channels + - `categories`: a container with category ids for blacklisted categories + - `roles`: a container with role ids for blacklisted roles + + If the command was invoked in a context that was blacklisted, the member is either + redirected to the `redirect` channel that was passed (default: #bot-commands) or simply + told that they're not allowed to use this particular command (if `None` was passed). + + The blacklist can be overridden through the roles specified in `override_roles`. + """ + def predicate(ctx: Context) -> bool: + """Check if command was issued in a blacklisted context.""" + not_blacklisted = not in_whitelist_check(ctx, channels, categories, roles, fail_silently=True) + overridden = in_whitelist_check(ctx, roles=override_roles, fail_silently=True) + + success = not_blacklisted or overridden + + if not success and not fail_silently: + raise NotInBlacklistCheckFailure(redirect) + + return success + + return commands.check(predicate) + + def has_no_roles(*roles: t.Union[str, int]) -> t.Callable: """ Returns True if the user does not have any of the roles specified. @@ -62,19 +107,27 @@ def has_no_roles(*roles: t.Union[str, int]) -> t.Callable: return commands.check(predicate) -def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = None) -> t.Callable: +def redirect_output( + destination_channel: int, + bypass_roles: t.Optional[t.Container[int]] = None, + channels: t.Optional[t.Container[int]] = None, + categories: t.Optional[t.Container[int]] = None, + ping_user: bool = True +) -> t.Callable: """ Changes the channel in the context of the command to redirect the output to a certain channel. - Redirect is bypassed if the author has a role to bypass redirection. + Redirect is bypassed if the author has a bypass role or if it is in a channel that can bypass redirection. + + If ping_user is False, it will not send a message in the destination channel. This decorator must go before (below) the `command` decorator. """ - def wrap(func: t.Callable) -> t.Callable: - @wraps(func) + def wrap(func: types.FunctionType) -> types.FunctionType: + @command_wraps(func) async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None: if ctx.channel.id == destination_channel: - log.trace(f"Command {ctx.command.name} was invoked in destination_channel, not redirecting") + log.trace(f"Command {ctx.command} was invoked in destination_channel, not redirecting") await func(self, ctx, *args, **kwargs) return @@ -83,12 +136,24 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N await func(self, ctx, *args, **kwargs) return + elif channels and ctx.channel.id not in channels: + log.trace(f"{ctx.author} used {ctx.command} in a channel that can bypass output redirection") + await func(self, ctx, *args, **kwargs) + return + + elif categories and ctx.channel.category.id not in categories: + log.trace(f"{ctx.author} used {ctx.command} in a category that can bypass output redirection") + await func(self, ctx, *args, **kwargs) + return + redirect_channel = ctx.guild.get_channel(destination_channel) old_channel = ctx.channel log.trace(f"Redirecting output of {ctx.author}'s command '{ctx.command.name}' to {redirect_channel.name}") ctx.channel = redirect_channel - await ctx.channel.send(f"Here's the output of your command, {ctx.author.mention}") + + if ping_user: + await ctx.send(f"Here's the output of your command, {ctx.author.mention}") asyncio.create_task(func(self, ctx, *args, **kwargs)) message = await old_channel.send( @@ -105,7 +170,6 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N with suppress(NotFound): await ctx.message.delete() log.trace("Redirect output: Deleted invocation message") - return inner return wrap @@ -122,8 +186,8 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: This decorator must go before (below) the `command` decorator. """ - def decorator(func: t.Callable) -> t.Callable: - @wraps(func) + def decorator(func: types.FunctionType) -> types.FunctionType: + @command_wraps(func) async def wrapper(*args, **kwargs) -> None: log.trace(f"{func.__name__}: respect role hierarchy decorator called") @@ -153,3 +217,23 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable: await func(*args, **kwargs) return wrapper return decorator + + +def mock_in_debug(return_value: t.Any) -> t.Callable: + """ + Short-circuit function execution if in debug mode and return `return_value`. + + The original function name, and the incoming args and kwargs are DEBUG level logged + upon each call. This is useful for expensive operations, i.e. media asset uploads + that are prone to rate-limits but need to be tested extensively. + """ + def decorator(func: t.Callable) -> t.Callable: + @functools.wraps(func) + async def wrapped(*args, **kwargs) -> t.Any: + """Short-circuit and log if in debug mode.""" + if DEBUG_MODE: + log.debug(f"Function {func.__name__} called with args: {args}, kwargs: {kwargs}") + return return_value + return await func(*args, **kwargs) + return wrapped + return decorator diff --git a/bot/errors.py b/bot/errors.py index ab0adcd42..3544c6320 100644 --- a/bot/errors.py +++ b/bot/errors.py @@ -35,3 +35,9 @@ class InvalidInfractedUser(Exception): self.reason = reason super().__init__(reason) + + +class BrandingMisconfiguration(RuntimeError): + """Raised by the Branding cog when a misconfigured event is encountered.""" + + pass diff --git a/bot/exts/backend/branding/__init__.py b/bot/exts/backend/branding/__init__.py index 81ea3bf49..20a747b7f 100644 --- a/bot/exts/backend/branding/__init__.py +++ b/bot/exts/backend/branding/__init__.py @@ -1,7 +1,7 @@ from bot.bot import Bot -from bot.exts.backend.branding._cog import BrandingManager +from bot.exts.backend.branding._cog import Branding def setup(bot: Bot) -> None: - """Loads BrandingManager cog.""" - bot.add_cog(BrandingManager(bot)) + """Load Branding cog.""" + bot.add_cog(Branding(bot)) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 20df83a89..47c379a34 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -1,566 +1,653 @@ import asyncio -import itertools +import contextlib import logging import random import typing as t -from datetime import datetime, time, timedelta +from datetime import timedelta +from enum import Enum +from operator import attrgetter -import arrow import async_timeout import discord +from arrow import Arrow from async_rediscache import RedisCache -from discord.ext import commands +from discord.ext import commands, tasks from bot.bot import Bot -from bot.constants import Branding, Colours, Emojis, Guild, MODERATION_ROLES -from bot.exts.backend.branding import _constants, _decorators, _errors, _seasons +from bot.constants import Branding as BrandingConfig, Channels, Colours, Guild, MODERATION_ROLES +from bot.decorators import mock_in_debug +from bot.exts.backend.branding._repository import BrandingRepository, Event, RemoteObject log = logging.getLogger(__name__) -class GitHubFile(t.NamedTuple): +class AssetType(Enum): """ - Represents a remote file on GitHub. + Recognised Discord guild asset types. - The `sha` hash is kept so that we can determine that a file has changed, - despite its filename remaining unchanged. + The value of each member corresponds exactly to a kwarg that can be passed to `Guild.edit`. """ - download_url: str - path: str - sha: str + BANNER = "banner" + ICON = "icon" -def pretty_files(files: t.Iterable[GitHubFile]) -> str: - """Provide a human-friendly representation of `files`.""" - return "\n".join(file.path for file in files) +def compound_hash(objects: t.Iterable[RemoteObject]) -> str: + """ + Join SHA attributes of `objects` into a single string. + + Compound hashes are cached to check for change in any of the member `objects`. + """ + return "-".join(item.sha for item in objects) + + +def make_embed(title: str, description: str, *, success: bool) -> discord.Embed: + """ + Construct simple response embed. + + If `success` is True, use green colour, otherwise red. + + For both `title` and `description`, empty string are valid values ~ fields will be empty. + """ + colour = Colours.soft_green if success else Colours.soft_red + return discord.Embed(title=title[:256], description=description[:2048], colour=colour) -def time_until_midnight() -> timedelta: +def extract_event_duration(event: Event) -> str: """ - Determine amount of time until the next-up UTC midnight. + Extract a human-readable, year-agnostic duration string from `event`. - The exact `midnight` moment is actually delayed to 5 seconds after, in order - to avoid potential problems due to imprecise sleep. + In the case that `event` is a fallback event, resolves to 'Fallback'. + + For 1-day events, only the single date is shown, instead of a period. """ - now = datetime.utcnow() - tomorrow = now + timedelta(days=1) - midnight = datetime.combine(tomorrow, time(second=5)) + if event.meta.is_fallback: + return "Fallback" + + fmt = "%B %d" # Ex: August 23 + start_date = event.meta.start_date.strftime(fmt) + end_date = event.meta.end_date.strftime(fmt) + + if start_date == end_date: + return start_date - return midnight - now + return f"{start_date} - {end_date}" -class BrandingManager(commands.Cog): +def extract_event_name(event: Event) -> str: """ - Manages the guild's branding. - - The purpose of this cog is to help automate the synchronization of the branding - repository with the guild. It is capable of discovering assets in the repository - via GitHub's API, resolving download urls for them, and delegating - to the `bot` instance to upload them to the guild. - - BrandingManager is designed to be entirely autonomous. Its `daemon` background task awakens - once a day (see `time_until_midnight`) to detect new seasons, or to cycle icons within a single - season. The daemon can be turned on and off via the `daemon` cmd group. The value set via - its `start` and `stop` commands is persisted across sessions. If turned on, the daemon will - automatically start on the next bot start-up. Otherwise, it will wait to be started manually. - - All supported operations, e.g. setting seasons, applying the branding, or cycling icons, can - also be invoked manually, via the following API: - - branding list - - Show all available seasons - - branding set <season_name> - - Set the cog's internal state to represent `season_name`, if it exists - - If no `season_name` is given, set chronologically current season - - This will not automatically apply the season's branding to the guild, - the cog's state can be detached from the guild - - Seasons can therefore be 'previewed' using this command - - branding info - - View detailed information about resolved assets for current season - - branding refresh - - Refresh internal state, i.e. synchronize with branding repository - - branding apply - - Apply the current internal state to the guild, i.e. upload the assets - - branding cycle - - If there are multiple available icons for current season, randomly pick - and apply the next one - - The daemon calls these methods autonomously as appropriate. The use of this cog - is locked to moderation roles. As it performs media asset uploads, it is prone to - rate-limits - the `apply` command should be used with caution. The `set` command can, - however, be used freely to 'preview' seasonal branding and check whether paths have been - resolved as appropriate. - - While the bot is in debug mode, it will 'mock' asset uploads by logging the passed - download urls and pretending that the upload was successful. Make use of this - to test this cog's behaviour. + Extract title-cased event name from the path of `event`. + + An event with a path of 'events/black_history_month' will resolve to 'Black History Month'. """ + name = event.path.split("/")[-1] # Inner-most directory name. + words = name.split("_") # Words from snake case. + + return " ".join(word.title() for word in words) + + +class Branding(commands.Cog): + """ + Guild branding management. + + Extension responsible for automatic synchronisation of the guild's branding with the branding repository. + Event definitions and assets are automatically discovered and applied as appropriate. - current_season: t.Type[_seasons.SeasonBase] + All state is stored in Redis. The cog should therefore seamlessly transition across restarts and maintain + a consistent icon rotation schedule for events with multiple icon assets. - banner: t.Optional[GitHubFile] + By caching hashes of banner & icon assets, we discover changes in currently applied assets and always keep + the latest version applied. - available_icons: t.List[GitHubFile] - remaining_icons: t.List[GitHubFile] + The command interface allows moderators+ to control the daemon or request asset synchronisation, while + regular users can see information about the current event and the overall event schedule. + """ - days_since_cycle: t.Iterator + # RedisCache[ + # "daemon_active": bool | If True, daemon starts on start-up. Controlled via commands. + # "event_path": str | Current event's path in the branding repo. + # "event_description": str | Current event's Markdown description. + # "event_duration": str | Current event's human-readable date range. + # "banner_hash": str | SHA of the currently applied banner. + # "icons_hash": str | Compound SHA of all icons in current rotation. + # "last_rotation_timestamp": float | POSIX UTC timestamp. + # ] + cache_information = RedisCache() - daemon: t.Optional[asyncio.Task] + # Icons in current rotation. Keys (str) are download URLs, values (int) track the amount of times each + # icon has been used in the current rotation. + cache_icons = RedisCache() - # Branding configuration - branding_configuration = RedisCache() + # All available event names & durations. Cached by the daemon nightly; read by the calendar command. + cache_events = RedisCache() def __init__(self, bot: Bot) -> None: + """Instantiate repository abstraction & allow daemon to start.""" + self.bot = bot + self.repository = BrandingRepository(bot) + + self.bot.loop.create_task(self.maybe_start_daemon()) # Start depending on cache. + + # region: Internal logic & state management + + @mock_in_debug(return_value=True) # Mocked in development environment to prevent API spam. + async def apply_asset(self, asset_type: AssetType, download_url: str) -> bool: """ - Assign safe default values on init. + Download asset from `download_url` and apply it to PyDis as `asset_type`. - At this point, we don't have information about currently available branding. - Most of these attributes will be overwritten once the daemon connects, or once - the `refresh` command is used. + Return a boolean indicating whether the application was successful. """ - self.bot = bot - self.current_season = _seasons.get_current_season() + log.info(f"Applying '{asset_type.value}' asset to the guild.") + + try: + file = await self.repository.fetch_file(download_url) + except Exception: + log.exception(f"Failed to fetch '{asset_type.value}' asset.") + return False - self.banner = None + await self.bot.wait_until_guild_available() + pydis: discord.Guild = self.bot.get_guild(Guild.id) - self.available_icons = [] - self.remaining_icons = [] + timeout = 10 # Seconds. + try: + with async_timeout.timeout(timeout): # Raise after `timeout` seconds. + await pydis.edit(**{asset_type.value: file}) + except discord.HTTPException: + log.exception("Asset upload to Discord failed.") + return False + except asyncio.TimeoutError: + log.error(f"Asset upload to Discord timed out after {timeout} seconds.") + return False + else: + log.trace("Asset uploaded successfully.") + return True - self.days_since_cycle = itertools.cycle([None]) + async def apply_banner(self, banner: RemoteObject) -> bool: + """ + Apply `banner` to the guild and cache its hash if successful. - self.daemon = None - self._startup_task = self.bot.loop.create_task(self._initial_start_daemon()) + Banners should always be applied via this method to ensure that the last hash is cached. - async def _initial_start_daemon(self) -> None: - """Checks is daemon active and when is, start it at cog load.""" - if await self.branding_configuration.get("daemon_active"): - self.daemon = self.bot.loop.create_task(self._daemon_func()) + Return a boolean indicating whether the application was successful. + """ + success = await self.apply_asset(AssetType.BANNER, banner.download_url) - @property - def _daemon_running(self) -> bool: - """True if the daemon is currently active, False otherwise.""" - return self.daemon is not None and not self.daemon.done() + if success: + await self.cache_information.set("banner_hash", banner.sha) - async def _daemon_func(self) -> None: + return success + + async def rotate_icons(self) -> bool: """ - Manage all automated behaviour of the BrandingManager cog. + Choose and apply the next-up icon in rotation. + + We keep track of the amount of times each icon has been used. The values in `cache_icons` can be understood + to be iteration IDs. When an icon is chosen & applied, we bump its count, pushing it into the next iteration. - Once a day, the daemon will perform the following tasks: - - Update `current_season` - - Poll GitHub API to see if the available branding for `current_season` has changed - - Update assets if changes are detected (banner, guild icon, bot avatar, bot nickname) - - Check whether it's time to cycle guild icons + Once the current iteration (lowest count in the cache) depletes, we move onto the next iteration. - The internal loop runs once when activated, then periodically at the time - given by `time_until_midnight`. + In the case that there is only 1 icon in the rotation and has already been applied, do nothing. - All method calls in the internal loop are considered safe, i.e. no errors propagate - to the daemon's loop. The daemon itself does not perform any error handling on its own. + Return a boolean indicating whether a new icon was applied successfully. """ - await self.bot.wait_until_guild_available() + log.debug("Rotating icons.") - while True: - self.current_season = _seasons.get_current_season() - branding_changed = await self.refresh() + state = await self.cache_icons.to_dict() + log.trace(f"Total icons in rotation: {len(state)}.") - if branding_changed: - await self.apply() + if not state: # This would only happen if rotation not initiated, but we can handle gracefully. + log.warning("Attempted icon rotation with an empty icon cache. This indicates wrong logic.") + return False - elif next(self.days_since_cycle) == Branding.cycle_frequency: - await self.cycle() + if len(state) == 1 and 1 in state.values(): + log.debug("Aborting icon rotation: only 1 icon is available and has already been applied.") + return False - until_midnight = time_until_midnight() - await asyncio.sleep(until_midnight.total_seconds()) + current_iteration = min(state.values()) # Choose iteration to draw from. + options = [download_url for download_url, times_used in state.items() if times_used == current_iteration] - async def _info_embed(self) -> discord.Embed: - """Make an informative embed representing current season.""" - info_embed = discord.Embed(description=self.current_season.description, colour=self.current_season.colour) + log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}.") + next_icon = random.choice(options) - # If we're in a non-evergreen season, also show active months - if self.current_season is not _seasons.SeasonBase: - title = f"{self.current_season.season_name} ({', '.join(str(m) for m in self.current_season.months)})" - else: - title = self.current_season.season_name + success = await self.apply_asset(AssetType.ICON, next_icon) + + if success: + await self.cache_icons.increment(next_icon) # Push the icon into the next iteration. + + timestamp = Arrow.utcnow().timestamp() + await self.cache_information.set("last_rotation_timestamp", timestamp) - # Use the author field to show the season's name and avatar if available - info_embed.set_author(name=title) + return success + + async def maybe_rotate_icons(self) -> None: + """ + Call `rotate_icons` if the configured amount of time has passed since last rotation. + + We offset the calculated time difference into the future to avoid off-by-a-little-bit errors. Because there + is work to be done before the timestamp is read and written, the next read will likely commence slightly + under 24 hours after the last write. + """ + log.debug("Checking whether it's time for icons to rotate.") - banner = self.banner.path if self.banner is not None else "Unavailable" - info_embed.add_field(name="Banner", value=banner, inline=False) + last_rotation_timestamp = await self.cache_information.get("last_rotation_timestamp") - icons = pretty_files(self.available_icons) or "Unavailable" - info_embed.add_field(name="Available icons", value=icons, inline=False) + if last_rotation_timestamp is None: # Maiden case ~ never rotated. + await self.rotate_icons() + return - # Only display cycle frequency if we're actually cycling - if len(self.available_icons) > 1 and Branding.cycle_frequency: - info_embed.set_footer(text=f"Icon cycle frequency: {Branding.cycle_frequency}") + last_rotation = Arrow.utcfromtimestamp(last_rotation_timestamp) + difference = (Arrow.utcnow() - last_rotation) + timedelta(minutes=5) - return info_embed + log.trace(f"Icons last rotated at {last_rotation} (difference: {difference}).") - async def _reset_remaining_icons(self) -> None: - """Set `remaining_icons` to a shuffled copy of `available_icons`.""" - self.remaining_icons = random.sample(self.available_icons, k=len(self.available_icons)) + if difference.days >= BrandingConfig.cycle_frequency: + await self.rotate_icons() - async def _reset_days_since_cycle(self) -> None: + async def initiate_icon_rotation(self, available_icons: t.List[RemoteObject]) -> None: """ - Reset the `days_since_cycle` iterator based on configured frequency. + Set up a new icon rotation. - If the current season only has 1 icon, or if `Branding.cycle_frequency` is falsey, - the iterator will always yield None. This signals that the icon shouldn't be cycled. + This function should be called whenever available icons change. This is generally the case when we enter + a new event, but potentially also when the assets of an on-going event change. In such cases, a reset + of `cache_icons` is necessary, because it contains download URLs which may have gotten stale. - Otherwise, it will yield ints in range [1, `Branding.cycle_frequency`] indefinitely. - When the iterator yields a value equal to `Branding.cycle_frequency`, it is time to cycle. + This function does not upload a new icon! """ - if len(self.available_icons) > 1 and Branding.cycle_frequency: - sequence = range(1, Branding.cycle_frequency + 1) - else: - sequence = [None] + log.debug("Initiating new icon rotation.") + + await self.cache_icons.clear() + + new_state = {icon.download_url: 0 for icon in available_icons} + await self.cache_icons.update(new_state) + + log.trace(f"Icon rotation initiated for {len(new_state)} icons.") - self.days_since_cycle = itertools.cycle(sequence) + await self.cache_information.set("icons_hash", compound_hash(available_icons)) - async def _get_files(self, path: str, include_dirs: bool = False) -> t.Dict[str, GitHubFile]: + async def send_info_embed(self, channel_id: int, *, is_notification: bool) -> None: """ - Get files at `path` in the branding repository. + Send the currently cached event description to `channel_id`. - If `include_dirs` is False (default), only returns files at `path`. - Otherwise, will return both files and directories. Never returns symlinks. + When `is_notification` holds, a short contextual message for the #changelog channel is added. - Return dict mapping from filename to corresponding `GitHubFile` instance. - This may return an empty dict if the response status is non-200, - or if the target directory is empty. + We read event information from `cache_information`. The caller is therefore responsible for making + sure that the cache is up-to-date before calling this function. """ - url = f"{_constants.BRANDING_URL}/{path}" - async with self.bot.http_session.get( - url, headers=_constants.HEADERS, params=_constants.PARAMS - ) as resp: - # Short-circuit if we get non-200 response - if resp.status != _constants.STATUS_OK: - log.error(f"GitHub API returned non-200 response: {resp}") - return {} - directory = await resp.json() # Directory at `path` + log.debug(f"Sending event information event to channel: {channel_id} ({is_notification=}).") - allowed_types = {"file", "dir"} if include_dirs else {"file"} - return { - file["name"]: GitHubFile(file["download_url"], file["path"], file["sha"]) - for file in directory - if file["type"] in allowed_types - } + await self.bot.wait_until_guild_available() + channel: t.Optional[discord.TextChannel] = self.bot.get_channel(channel_id) + + if channel is None: + log.warning(f"Cannot send event information: channel {channel_id} not found!") + return + + log.trace(f"Destination channel: #{channel.name}.") + + description = await self.cache_information.get("event_description") + duration = await self.cache_information.get("event_duration") + + if None in (description, duration): + content = None + embed = make_embed("No event in cache", "Is the daemon enabled?", success=False) - async def refresh(self) -> bool: + else: + content = "Python Discord is entering a new event!" if is_notification else None + embed = discord.Embed(description=description[:2048], colour=discord.Colour.blurple()) + embed.set_footer(text=duration[:2048]) + + await channel.send(content=content, embed=embed) + + async def enter_event(self, event: Event) -> t.Tuple[bool, bool]: """ - Synchronize available assets with branding repository. + Apply `event` assets and update information cache. + + We cache `event` information to ensure that we: + * Remember which event we're currently in across restarts + * Provide an on-demand informational embed without re-querying the branding repository - If the current season is not the evergreen, and lacks at least one asset, - we use the evergreen seasonal dir as fallback for missing assets. + An event change should always be handled via this function, as it ensures that the cache is populated. - Finally, if neither the seasonal nor fallback branding directories contain - an asset, it will simply be ignored. + The #changelog notification is omitted when `event` is fallback, or already applied. - Return True if the branding has changed. This will be the case when we enter - a new season, or when something changes in the current seasons's directory - in the branding repository. + Return a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ - old_branding = (self.banner, self.available_icons) - seasonal_dir = await self._get_files(self.current_season.branding_path, include_dirs=True) + log.info(f"Entering event: '{event.path}'.") - # Only make a call to the fallback directory if there is something to be gained - branding_incomplete = any( - asset not in seasonal_dir - for asset in (_constants.FILE_BANNER, _constants.FILE_AVATAR, _constants.SERVER_ICONS) - ) - if branding_incomplete and self.current_season is not _seasons.SeasonBase: - fallback_dir = await self._get_files( - _seasons.SeasonBase.branding_path, include_dirs=True - ) - else: - fallback_dir = {} + banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly. - # Resolve assets in this directory, None is a safe value - self.banner = ( - seasonal_dir.get(_constants.FILE_BANNER) - or fallback_dir.get(_constants.FILE_BANNER) - ) + await self.initiate_icon_rotation(event.icons) # Prepare a new rotation. + icon_success = await self.rotate_icons() # Apply an icon from the new rotation. + + # This will only be False in the case of a manual same-event re-synchronisation. + event_changed = event.path != await self.cache_information.get("event_path") - # Now resolve server icons by making a call to the proper sub-directory - if _constants.SERVER_ICONS in seasonal_dir: - icons_dir = await self._get_files( - f"{self.current_season.branding_path}/{_constants.SERVER_ICONS}" - ) - self.available_icons = list(icons_dir.values()) + # Cache event identity to avoid re-entry in case of restart. + await self.cache_information.set("event_path", event.path) - elif _constants.SERVER_ICONS in fallback_dir: - icons_dir = await self._get_files( - f"{_seasons.SeasonBase.branding_path}/{_constants.SERVER_ICONS}" - ) - self.available_icons = list(icons_dir.values()) + # Cache information shown in the 'about' embed. + await self.populate_cache_event_description(event) + # Notify guild of new event ~ this reads the information that we cached above. + if event_changed and not event.meta.is_fallback: + await self.send_info_embed(Channels.change_log, is_notification=True) else: - self.available_icons = [] # This should never be the case, but an empty list is a safe value + log.trace("Omitting #changelog notification. Event has not changed, or new event is fallback.") - # GitHubFile instances carry a `sha` attr so this will pick up if a file changes - branding_changed = old_branding != (self.banner, self.available_icons) + return banner_success, icon_success - if branding_changed: - log.info(f"New branding detected (season: {self.current_season.season_name})") - await self._reset_remaining_icons() - await self._reset_days_since_cycle() + async def synchronise(self) -> t.Tuple[bool, bool]: + """ + Fetch the current event and delegate to `enter_event`. - return branding_changed + This is a convenience function to force synchronisation via a command. It should generally only be used + in a recovery scenario. In the usual case, the daemon already has an `Event` instance and can pass it + to `enter_event` directly. - async def cycle(self) -> bool: + Return a 2-tuple indicating whether the banner, and the icon, were applied successfully. """ - Apply the next-up server icon. + log.debug("Synchronise: fetching current event.") - Returns True if an icon is available and successfully gets applied, False otherwise. - """ - if not self.available_icons: - log.info("Cannot cycle: no icons for this season") - return False + current_event, available_events = await self.repository.get_current_event() - if not self.remaining_icons: - log.info("Reset & shuffle remaining icons") - await self._reset_remaining_icons() + await self.populate_cache_events(available_events) - next_up = self.remaining_icons.pop(0) - success = await self.set_icon(next_up.download_url) + if current_event is None: + log.error("Failed to fetch event. Cannot synchronise!") + return False, False - return success + return await self.enter_event(current_event) - async def apply(self) -> t.List[str]: + async def populate_cache_events(self, events: t.List[Event]) -> None: """ - Apply current branding to the guild and bot. + Clear `cache_events` and re-populate with names and durations of `events`. - This delegates to the bot instance to do all the work. We only provide download urls - for available assets. Assets unavailable in the branding repo will be ignored. + For each event, we store its name and duration string. This is the information presented to users in the + calendar command. If a format change is needed, it has to be done here. - Returns a list of names of all failed assets. An asset is considered failed - if it isn't found in the branding repo, or if something goes wrong while the - bot is trying to apply it. - - An empty list denotes that all assets have been applied successfully. + The cache does not store the fallback event, as it is not shown in the calendar. """ - report = {asset: False for asset in ("banner", "icon")} + log.debug("Populating events cache.") - if self.banner is not None: - report["banner"] = await self.set_banner(self.banner.download_url) + await self.cache_events.clear() - report["icon"] = await self.cycle() + no_fallback = [event for event in events if not event.meta.is_fallback] + chronological_events = sorted(no_fallback, key=attrgetter("meta.start_date")) - failed_assets = [asset for asset, succeeded in report.items() if not succeeded] - return failed_assets + log.trace(f"Writing {len(chronological_events)} events (fallback omitted).") - @commands.has_any_role(*MODERATION_ROLES) - @commands.group(name="branding") - async def branding_cmds(self, ctx: commands.Context) -> None: - """Manual branding control.""" - if not ctx.invoked_subcommand: - await ctx.send_help(ctx.command) + with contextlib.suppress(ValueError): # Cache raises when updated with an empty dict. + await self.cache_events.update({ + extract_event_name(event): extract_event_duration(event) + for event in chronological_events + }) - @branding_cmds.command(name="list", aliases=["ls"]) - async def branding_list(self, ctx: commands.Context) -> None: - """List all available seasons and branding sources.""" - embed = discord.Embed(title="Available seasons", colour=Colours.soft_green) + async def populate_cache_event_description(self, event: Event) -> None: + """ + Cache `event` description & duration. + + This should be called when entering a new event, and can be called periodically to ensure that the cache + holds fresh information in the case that the event remains the same, but its description changes. - for season in _seasons.get_all_seasons(): - if season is _seasons.SeasonBase: - active_when = "always" - else: - active_when = f"in {', '.join(str(m) for m in season.months)}" + The duration is stored formatted for the frontend. It is not intended to be used programmatically. + """ + log.debug("Caching event description & duration.") - description = ( - f"Active {active_when}\n" - f"Branding: {season.branding_path}" - ) - embed.add_field(name=season.season_name, value=description, inline=False) + await self.cache_information.set("event_description", event.meta.description) + await self.cache_information.set("event_duration", extract_event_duration(event)) - await ctx.send(embed=embed) + # endregion + # region: Daemon - @branding_cmds.command(name="set") - async def branding_set(self, ctx: commands.Context, *, season_name: t.Optional[str] = None) -> None: + async def maybe_start_daemon(self) -> None: """ - Manually set season, or reset to current if none given. + Start the daemon depending on cache state. - Season search is a case-less comparison against both seasonal class name, - and its `season_name` attr. + The daemon will only start if it has been explicitly enabled via a command. + """ + log.debug("Checking whether daemon should start.") - This only pre-loads the cog's internal state to the chosen season, but does not - automatically apply the branding. As that is an expensive operation, the `apply` - command must be called explicitly after this command finishes. + should_begin: t.Optional[bool] = await self.cache_information.get("daemon_active") # None if never set! - This means that this command can be used to 'preview' a season gathering info - about its available assets, without applying them to the guild. + if should_begin: + self.daemon_loop.start() - If the daemon is running, it will automatically reset the season to current when - it wakes up. The season set via this command can therefore remain 'detached' from - what it should be - the daemon will make sure that it's set back properly. + def cog_unload(self) -> None: """ - if season_name is None: - new_season = _seasons.get_current_season() - else: - new_season = _seasons.get_season(season_name) - if new_season is None: - raise _errors.BrandingError("No such season exists") + Cancel the daemon in case of cog unload. - if self.current_season is new_season: - raise _errors.BrandingError(f"Season {self.current_season.season_name} already active") + This is **not** done automatically! The daemon otherwise remains active in the background. + """ + log.debug("Cog unload: cancelling daemon.") - self.current_season = new_season - await self.branding_refresh(ctx) + self.daemon_loop.cancel() - @branding_cmds.command(name="info", aliases=["status"]) - async def branding_info(self, ctx: commands.Context) -> None: + async def daemon_main(self) -> None: """ - Show available assets for current season. + Synchronise guild & caches with branding repository. + + Pull the currently active event from the branding repository and check whether it matches the currently + active event in the cache. If not, apply the new event. - This can be used to confirm that assets have been resolved properly. - When `apply` is used, it attempts to upload exactly the assets listed here. + However, it is also possible that an event's assets change as it's active. To account for such cases, + we check the banner & icons hashes against the currently cached values. If there is a mismatch, each + specific asset is re-applied. """ - await ctx.send(embed=await self._info_embed()) + log.info("Daemon main: checking current event.") - @branding_cmds.command(name="refresh") - async def branding_refresh(self, ctx: commands.Context) -> None: - """Sync currently available assets with branding repository.""" - async with ctx.typing(): - await self.refresh() - await self.branding_info(ctx) + new_event, available_events = await self.repository.get_current_event() + + await self.populate_cache_events(available_events) + + if new_event is None: + log.warning("Daemon main: failed to get current event from branding repository, will do nothing.") + return + + if new_event.path != await self.cache_information.get("event_path"): + log.debug("Daemon main: new event detected!") + await self.enter_event(new_event) + return - @branding_cmds.command(name="apply") - async def branding_apply(self, ctx: commands.Context) -> None: + await self.populate_cache_event_description(new_event) # Cache fresh frontend info in case of change. + + log.trace("Daemon main: event has not changed, checking for change in assets.") + + if new_event.banner.sha != await self.cache_information.get("banner_hash"): + log.debug("Daemon main: detected banner change.") + await self.apply_banner(new_event.banner) + + if compound_hash(new_event.icons) != await self.cache_information.get("icons_hash"): + log.debug("Daemon main: detected icon change.") + await self.initiate_icon_rotation(new_event.icons) + await self.rotate_icons() + else: + await self.maybe_rotate_icons() + + @tasks.loop(hours=24) + async def daemon_loop(self) -> None: """ - Apply current season's branding to the guild. + Call `daemon_main` every 24 hours. - Use `info` to check which assets will be applied. Shows which assets have - failed to be applied, if any. + The scheduler maintains an exact 24-hour frequency even if this coroutine takes time to complete. If the + coroutine is started at 00:01 and completes at 00:05, it will still be started at 00:01 the next day. """ - async with ctx.typing(): - failed_assets = await self.apply() - if failed_assets: - raise _errors.BrandingError( - f"Failed to apply following assets: {', '.join(failed_assets)}" - ) + log.trace("Daemon loop: calling daemon main.") - response = discord.Embed(description=f"All assets applied {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) + try: + await self.daemon_main() + except Exception: + log.exception("Daemon loop: failed with an unhandled exception!") - @branding_cmds.command(name="cycle") - async def branding_cycle(self, ctx: commands.Context) -> None: + @daemon_loop.before_loop + async def daemon_before(self) -> None: """ - Apply the next-up guild icon, if multiple are available. + Call `daemon_loop` immediately, then block the loop until the next-up UTC midnight. - The order is random. + The first iteration is invoked directly such that synchronisation happens immediately after daemon start. + We then calculate the time until the next-up midnight and sleep before letting `daemon_loop` begin. """ - async with ctx.typing(): - success = await self.cycle() - if not success: - raise _errors.BrandingError("Failed to cycle icon") + log.trace("Daemon before: performing start-up iteration.") + + await self.daemon_loop() - response = discord.Embed(description=f"Success {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) + log.trace("Daemon before: calculating time to sleep before loop begins.") + now = Arrow.utcnow() - @branding_cmds.group(name="daemon", aliases=["d", "task"]) - async def daemon_group(self, ctx: commands.Context) -> None: - """Control the background daemon.""" + # The actual midnight moment is offset into the future to prevent issues with imprecise sleep. + tomorrow = now.shift(days=1) + midnight = tomorrow.replace(hour=0, minute=1, second=0, microsecond=0) + + sleep_secs = (midnight - now).total_seconds() + log.trace(f"Daemon before: sleeping {sleep_secs} seconds before next-up midnight: {midnight}.") + + await asyncio.sleep(sleep_secs) + + # endregion + # region: Command interface (branding) + + @commands.group(name="branding") + async def branding_group(self, ctx: commands.Context) -> None: + """Control the branding cog.""" if not ctx.invoked_subcommand: await ctx.send_help(ctx.command) - @daemon_group.command(name="status") - async def daemon_status(self, ctx: commands.Context) -> None: - """Check whether daemon is currently active.""" - if self._daemon_running: - remaining_time = (arrow.utcnow() + time_until_midnight()).humanize() - response = discord.Embed(description=f"Daemon running {Emojis.ok_hand}", colour=Colours.soft_green) - response.set_footer(text=f"Next refresh {remaining_time}") - else: - response = discord.Embed(description="Daemon not running", colour=Colours.soft_red) + @branding_group.command(name="about", aliases=("current", "event")) + async def branding_about_cmd(self, ctx: commands.Context) -> None: + """Show the current event's description and duration.""" + await self.send_info_embed(ctx.channel.id, is_notification=False) + + @commands.has_any_role(*MODERATION_ROLES) + @branding_group.command(name="sync") + async def branding_sync_cmd(self, ctx: commands.Context) -> None: + """ + Force branding synchronisation. - await ctx.send(embed=response) + Show which assets have failed to synchronise, if any. + """ + async with ctx.typing(): + banner_success, icon_success = await self.synchronise() - @daemon_group.command(name="start") - async def daemon_start(self, ctx: commands.Context) -> None: - """If the daemon isn't running, start it.""" - if self._daemon_running: - raise _errors.BrandingError("Daemon already running!") + failed_assets = ", ".join( + name + for name, status in [("banner", banner_success), ("icon", icon_success)] + if status is False + ) - self.daemon = self.bot.loop.create_task(self._daemon_func()) - await self.branding_configuration.set("daemon_active", True) + if failed_assets: + resp = make_embed("Synchronisation unsuccessful", f"Failed to apply: {failed_assets}.", success=False) + resp.set_footer(text="Check log for details.") + else: + resp = make_embed("Synchronisation successful", "Assets have been applied.", success=True) - response = discord.Embed(description=f"Daemon started {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) + await ctx.send(embed=resp) - @daemon_group.command(name="stop") - async def daemon_stop(self, ctx: commands.Context) -> None: - """If the daemon is running, stop it.""" - if not self._daemon_running: - raise _errors.BrandingError("Daemon not running!") + # endregion + # region: Command interface (branding calendar) - self.daemon.cancel() - await self.branding_configuration.set("daemon_active", False) + @branding_group.group(name="calendar", aliases=("schedule", "events")) + async def branding_calendar_group(self, ctx: commands.Context) -> None: + """ + Show the current event calendar. - response = discord.Embed(description=f"Daemon stopped {Emojis.ok_hand}", colour=Colours.soft_green) - await ctx.send(embed=response) + We draw event information from `cache_events` and use each key-value pair to create a field in the response + embed. As such, we do not need to query the API to get event information. The cache is automatically + re-populated by the daemon whenever it makes a request. A moderator+ can also explicitly request a cache + refresh using the 'refresh' subcommand. - async def _fetch_image(self, url: str) -> bytes: - """Retrieve and read image from `url`.""" - log.debug(f"Getting image from: {url}") - async with self.bot.http_session.get(url) as resp: - return await resp.read() + Due to Discord limitations, we only show up to 25 events. This is entirely sufficient at the time of writing. + In the case that we find ourselves with more than 25 events, a warning log will alert core devs. - async def _apply_asset(self, target: discord.Guild, asset: _constants.AssetType, url: str) -> bool: + In the future, we may be interested in a field-paginating solution. """ - Internal method for applying media assets to the guild. + if ctx.invoked_subcommand: + # If you're wondering why this works: when the 'refresh' subcommand eventually re-invokes + # this group, the attribute will be automatically set to None by the framework. + return + + available_events = await self.cache_events.to_dict() + log.trace(f"Found {len(available_events)} cached events available for calendar view.") + + if not available_events: + resp = make_embed("No events found!", "Cache may be empty, try `branding calendar refresh`.", success=False) + await ctx.send(embed=resp) + return + + embed = discord.Embed(title="Current event calendar", colour=discord.Colour.blurple()) - This shouldn't be called directly. The purpose of this method is mainly generic - error handling to reduce needless code repetition. + # Because Discord embeds can only contain up to 25 fields, we only show the first 25. + first_25 = list(available_events.items())[:25] - Return True if upload was successful, False otherwise. + if len(first_25) != len(available_events): # Alert core devs that a paginating solution is now necessary. + log.warning(f"There are {len(available_events)} events, but the calendar view can only display 25.") + + for name, duration in first_25: + embed.add_field(name=name[:256], value=duration[:1024]) + + embed.set_footer(text="Otherwise, the fallback season is used.") + + await ctx.send(embed=embed) + + @commands.has_any_role(*MODERATION_ROLES) + @branding_calendar_group.command(name="refresh") + async def branding_calendar_refresh_cmd(self, ctx: commands.Context) -> None: """ - log.info(f"Attempting to set {asset.name}: {url}") + Refresh event cache and show current event calendar. - kwargs = {asset.value: await self._fetch_image(url)} - try: - async with async_timeout.timeout(5): - await target.edit(**kwargs) + Supplementary subcommand allowing force-refreshing the event cache. Implemented as a subcommand because + unlike the supergroup, it requires moderator privileges. + """ + log.info("Performing command-requested event cache refresh.") - except asyncio.TimeoutError: - log.info("Asset upload timed out") - return False + async with ctx.typing(): + available_events = await self.repository.get_events() + await self.populate_cache_events(available_events) - except discord.HTTPException as discord_error: - log.exception("Asset upload failed", exc_info=discord_error) - return False + await ctx.invoke(self.branding_calendar_group) + + # endregion + # region: Command interface (branding daemon) + + @commands.has_any_role(*MODERATION_ROLES) + @branding_group.group(name="daemon", aliases=("d",)) + async def branding_daemon_group(self, ctx: commands.Context) -> None: + """Control the branding cog's daemon.""" + if not ctx.invoked_subcommand: + await ctx.send_help(ctx.command) + + @branding_daemon_group.command(name="enable", aliases=("start", "on")) + async def branding_daemon_enable_cmd(self, ctx: commands.Context) -> None: + """Enable the branding daemon.""" + await self.cache_information.set("daemon_active", True) + if self.daemon_loop.is_running(): + resp = make_embed("Daemon is already enabled!", "", success=False) else: - log.info("Asset successfully applied") - return True + self.daemon_loop.start() + resp = make_embed("Daemon enabled!", "It will now automatically awaken on start-up.", success=True) - @_decorators.mock_in_debug(return_value=True) - async def set_banner(self, url: str) -> bool: - """Set the guild's banner to image at `url`.""" - guild = self.bot.get_guild(Guild.id) - if guild is None: - log.info("Failed to get guild instance, aborting asset upload") - return False + await ctx.send(embed=resp) - return await self._apply_asset(guild, _constants.AssetType.BANNER, url) + @branding_daemon_group.command(name="disable", aliases=("stop", "off")) + async def branding_daemon_disable_cmd(self, ctx: commands.Context) -> None: + """Disable the branding daemon.""" + await self.cache_information.set("daemon_active", False) - @_decorators.mock_in_debug(return_value=True) - async def set_icon(self, url: str) -> bool: - """Sets the guild's icon to image at `url`.""" - guild = self.bot.get_guild(Guild.id) - if guild is None: - log.info("Failed to get guild instance, aborting asset upload") - return False + if self.daemon_loop.is_running(): + self.daemon_loop.cancel() + resp = make_embed("Daemon disabled!", "It will not awaken on start-up.", success=True) + else: + resp = make_embed("Daemon is already disabled!", "", success=False) - return await self._apply_asset(guild, _constants.AssetType.SERVER_ICON, url) + await ctx.send(embed=resp) - def cog_unload(self) -> None: - """Cancels startup and daemon task.""" - self._startup_task.cancel() - if self.daemon is not None: - self.daemon.cancel() + @branding_daemon_group.command(name="status") + async def branding_daemon_status_cmd(self, ctx: commands.Context) -> None: + """Check whether the daemon is currently enabled.""" + if self.daemon_loop.is_running(): + resp = make_embed("Daemon is enabled", "Use `branding daemon disable` to stop.", success=True) + else: + resp = make_embed("Daemon is disabled", "Use `branding daemon enable` to start.", success=False) + + await ctx.send(embed=resp) + + # endregion diff --git a/bot/exts/backend/branding/_constants.py b/bot/exts/backend/branding/_constants.py deleted file mode 100644 index ca8e8c5f5..000000000 --- a/bot/exts/backend/branding/_constants.py +++ /dev/null @@ -1,51 +0,0 @@ -from enum import Enum, IntEnum - -from bot.constants import Keys - - -class Month(IntEnum): - """All month constants for seasons.""" - - JANUARY = 1 - FEBRUARY = 2 - MARCH = 3 - APRIL = 4 - MAY = 5 - JUNE = 6 - JULY = 7 - AUGUST = 8 - SEPTEMBER = 9 - OCTOBER = 10 - NOVEMBER = 11 - DECEMBER = 12 - - def __str__(self) -> str: - return self.name.title() - - -class AssetType(Enum): - """ - Discord media assets. - - The values match exactly the kwarg keys that can be passed to `Guild.edit`. - """ - - BANNER = "banner" - SERVER_ICON = "icon" - - -STATUS_OK = 200 # HTTP status code - -FILE_BANNER = "banner.png" -FILE_AVATAR = "avatar.png" -SERVER_ICONS = "server_icons" - -BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents" - -PARAMS = {"ref": "main"} # Target branch -HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3 - -# A GitHub token is not necessary for the cog to operate, -# unauthorized requests are however limited to 60 per hour -if Keys.github: - HEADERS["Authorization"] = f"token {Keys.github}" diff --git a/bot/exts/backend/branding/_decorators.py b/bot/exts/backend/branding/_decorators.py deleted file mode 100644 index 6a1e7e869..000000000 --- a/bot/exts/backend/branding/_decorators.py +++ /dev/null @@ -1,27 +0,0 @@ -import functools -import logging -import typing as t - -from bot.constants import DEBUG_MODE - -log = logging.getLogger(__name__) - - -def mock_in_debug(return_value: t.Any) -> t.Callable: - """ - Short-circuit function execution if in debug mode and return `return_value`. - - The original function name, and the incoming args and kwargs are DEBUG level logged - upon each call. This is useful for expensive operations, i.e. media asset uploads - that are prone to rate-limits but need to be tested extensively. - """ - def decorator(func: t.Callable) -> t.Callable: - @functools.wraps(func) - async def wrapped(*args, **kwargs) -> t.Any: - """Short-circuit and log if in debug mode.""" - if DEBUG_MODE: - log.debug(f"Function {func.__name__} called with args: {args}, kwargs: {kwargs}") - return return_value - return await func(*args, **kwargs) - return wrapped - return decorator diff --git a/bot/exts/backend/branding/_errors.py b/bot/exts/backend/branding/_errors.py deleted file mode 100644 index 7cd271af3..000000000 --- a/bot/exts/backend/branding/_errors.py +++ /dev/null @@ -1,2 +0,0 @@ -class BrandingError(Exception): - """Exception raised by the BrandingManager cog.""" diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py new file mode 100644 index 000000000..7b09d4641 --- /dev/null +++ b/bot/exts/backend/branding/_repository.py @@ -0,0 +1,240 @@ +import logging +import typing as t +from datetime import date, datetime + +import frontmatter + +from bot.bot import Bot +from bot.constants import Keys +from bot.errors import BrandingMisconfiguration + +# Base URL for requests into the branding repository. +BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents" + +PARAMS = {"ref": "main"} # Target branch. +HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3. + +# A GitHub token is not necessary. However, unauthorized requests are limited to 60 per hour. +if Keys.github: + HEADERS["Authorization"] = f"token {Keys.github}" + +# Since event periods are year-agnostic, we parse them into `datetime` objects with a manually inserted year. +# Please note that this is intentionally a leap year to allow Feb 29 to be valid. +ARBITRARY_YEAR = 2020 + +# Format used to parse date strings after we inject `ARBITRARY_YEAR` at the end. +DATE_FMT = "%B %d %Y" # Ex: July 10 2020 + +log = logging.getLogger(__name__) + + +class RemoteObject: + """ + Remote file or directory on GitHub. + + The annotations match keys in the response JSON that we're interested in. + """ + + sha: str # Hash helps us detect asset change. + name: str # Filename. + path: str # Path from repo root. + type: str # Either 'file' or 'dir'. + download_url: t.Optional[str] # If type is 'dir', this is None! + + def __init__(self, dictionary: t.Dict[str, t.Any]) -> None: + """Initialize by grabbing annotated attributes from `dictionary`.""" + missing_keys = self.__annotations__.keys() - dictionary.keys() + if missing_keys: + raise KeyError(f"Fetched object lacks expected keys: {missing_keys}") + for annotation in self.__annotations__: + setattr(self, annotation, dictionary[annotation]) + + +class MetaFile(t.NamedTuple): + """Attributes defined in a 'meta.md' file.""" + + is_fallback: bool + start_date: t.Optional[date] + end_date: t.Optional[date] + description: str # Markdown event description. + + +class Event(t.NamedTuple): + """Event defined in the branding repository.""" + + path: str # Path from repo root where event lives. This is the event's identity. + meta: MetaFile + banner: RemoteObject + icons: t.List[RemoteObject] + + def __str__(self) -> str: + return f"<Event at '{self.path}'>" + + +class BrandingRepository: + """ + Branding repository abstraction. + + This class represents the branding repository's main branch and exposes available events and assets + as objects. It performs the necessary amount of validation to ensure that a misconfigured event + isn't returned. Such events are simply ignored, and will be substituted with the fallback event, + if available. Warning logs will inform core developers if a misconfigured event is encountered. + + Colliding events cause no special behaviour. In such cases, the first found active event is returned. + We work with the assumption that the branding repository checks for such conflicts and prevents them + from reaching the main branch. + + This class keeps no internal state. All `get_current_event` calls will result in GitHub API requests. + The caller is therefore responsible for being responsible and caching information to prevent API abuse. + + Requests are made using the HTTP session looked up on the bot instance. + """ + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + async def fetch_directory(self, path: str, types: t.Container[str] = ("file", "dir")) -> t.Dict[str, RemoteObject]: + """ + Fetch directory found at `path` in the branding repository. + + Raise an exception if the request fails, or if the response lacks the expected keys. + + Passing custom `types` allows getting only files or directories. By default, both are included. + """ + full_url = f"{BRANDING_URL}/{path}" + log.debug(f"Fetching directory from branding repository: '{full_url}'.") + + async with self.bot.http_session.get(full_url, params=PARAMS, headers=HEADERS) as response: + if response.status != 200: + raise RuntimeError(f"Failed to fetch directory due to status: {response.status}") + + log.debug("Fetch successful, reading JSON response.") + json_directory = await response.json() + + return {file["name"]: RemoteObject(file) for file in json_directory if file["type"] in types} + + async def fetch_file(self, download_url: str) -> bytes: + """ + Fetch file as bytes from `download_url`. + + Raise an exception if the request does not succeed. + """ + log.debug(f"Fetching file from branding repository: '{download_url}'.") + + async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response: + if response.status != 200: + raise RuntimeError(f"Failed to fetch file due to status: {response.status}") + + log.debug("Fetch successful, reading payload.") + return await response.read() + + def parse_meta_file(self, raw_file: bytes) -> MetaFile: + """ + Parse a 'meta.md' file from raw bytes. + + The caller is responsible for handling errors caused by misconfiguration. + """ + attrs, description = frontmatter.parse(raw_file, encoding="UTF-8") + + if not description: + raise BrandingMisconfiguration("No description found in 'meta.md'!") + + if attrs.get("fallback", False): + return MetaFile(is_fallback=True, start_date=None, end_date=None, description=description) + + start_date_raw = attrs.get("start_date") + end_date_raw = attrs.get("end_date") + + if None in (start_date_raw, end_date_raw): + raise BrandingMisconfiguration("Non-fallback event doesn't have start and end dates defined!") + + # We extend the configured month & day with an arbitrary leap year, allowing a datetime object to exist. + # This may raise errors if misconfigured. We let the caller handle such cases. + start_date = datetime.strptime(f"{start_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date() + end_date = datetime.strptime(f"{end_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date() + + return MetaFile(is_fallback=False, start_date=start_date, end_date=end_date, description=description) + + async def construct_event(self, directory: RemoteObject) -> Event: + """ + Construct an `Event` instance from an event `directory`. + + The caller is responsible for handling errors caused by misconfiguration. + """ + contents = await self.fetch_directory(directory.path) + + missing_assets = {"meta.md", "banner.png", "server_icons"} - contents.keys() + + if missing_assets: + raise BrandingMisconfiguration(f"Directory is missing following assets: {missing_assets}") + + server_icons = await self.fetch_directory(contents["server_icons"].path, types=("file",)) + + if len(server_icons) == 0: + raise BrandingMisconfiguration("Found no server icons!") + + meta_bytes = await self.fetch_file(contents["meta.md"].download_url) + + meta_file = self.parse_meta_file(meta_bytes) + + return Event(directory.path, meta_file, contents["banner.png"], list(server_icons.values())) + + async def get_events(self) -> t.List[Event]: + """ + Discover available events in the branding repository. + + Misconfigured events are skipped. May return an empty list in the catastrophic case. + """ + log.debug("Discovering events in branding repository.") + + try: + event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files. + except Exception: + log.exception("Failed to fetch 'events' directory.") + return [] + + instances: t.List[Event] = [] + + for event_directory in event_directories.values(): + log.trace(f"Attempting to construct event from directory: '{event_directory.path}'.") + try: + instance = await self.construct_event(event_directory) + except Exception as exc: + log.warning(f"Could not construct event '{event_directory.path}'.", exc_info=exc) + else: + instances.append(instance) + + return instances + + async def get_current_event(self) -> t.Tuple[t.Optional[Event], t.List[Event]]: + """ + Get the currently active event, or the fallback event. + + The second return value is a list of all available events. The caller may discard it, if not needed. + Returning all events alongside the current one prevents having to query the API twice in some cases. + + The current event may be None in the case that no event is active, and no fallback event is found. + """ + utc_now = datetime.utcnow() + log.debug(f"Finding active event for: {utc_now}.") + + # Construct an object in the arbitrary year for the purpose of comparison. + lookup_now = date(year=ARBITRARY_YEAR, month=utc_now.month, day=utc_now.day) + log.trace(f"Lookup object in arbitrary year: {lookup_now}.") + + available_events = await self.get_events() + log.trace(f"Found {len(available_events)} available events.") + + for event in available_events: + meta = event.meta + if not meta.is_fallback and (meta.start_date <= lookup_now <= meta.end_date): + return event, available_events + + log.trace("No active event found. Looking for fallback event.") + + for event in available_events: + if event.meta.is_fallback: + return event, available_events + + log.warning("No event is currently active and no fallback event was found!") + return None, available_events diff --git a/bot/exts/backend/branding/_seasons.py b/bot/exts/backend/branding/_seasons.py deleted file mode 100644 index 5f6256b30..000000000 --- a/bot/exts/backend/branding/_seasons.py +++ /dev/null @@ -1,175 +0,0 @@ -import logging -import typing as t -from datetime import datetime - -from bot.constants import Colours -from bot.exts.backend.branding._constants import Month -from bot.exts.backend.branding._errors import BrandingError - -log = logging.getLogger(__name__) - - -class SeasonBase: - """ - Base for Seasonal classes. - - This serves as the off-season fallback for when no specific - seasons are active. - - Seasons are 'registered' simply by inheriting from `SeasonBase`. - We discover them by calling `__subclasses__`. - """ - - season_name: str = "Evergreen" - - colour: str = Colours.soft_green - description: str = "The default season!" - - branding_path: str = "seasonal/evergreen" - - months: t.Set[Month] = set(Month) - - -class Christmas(SeasonBase): - """Branding for December.""" - - season_name = "Festive season" - - colour = Colours.soft_red - description = ( - "The time is here to get into the festive spirit! No matter who you are, where you are, " - "or what beliefs you may follow, we hope every one of you enjoy this festive season!" - ) - - branding_path = "seasonal/christmas" - - months = {Month.DECEMBER} - - -class Easter(SeasonBase): - """Branding for April.""" - - season_name = "Easter" - - colour = Colours.bright_green - description = ( - "Bunny here, bunny there, bunny everywhere! Here at Python Discord, we celebrate " - "our version of Easter during the entire month of April." - ) - - branding_path = "seasonal/easter" - - months = {Month.APRIL} - - -class Halloween(SeasonBase): - """Branding for October.""" - - season_name = "Halloween" - - colour = Colours.orange - description = "Trick or treat?!" - - branding_path = "seasonal/halloween" - - months = {Month.OCTOBER} - - -class Pride(SeasonBase): - """Branding for June.""" - - season_name = "Pride" - - colour = Colours.pink - description = ( - "The month of June is a special month for us at Python Discord. It is very important to us " - "that everyone feels welcome here, no matter their origin, identity or sexuality. During the " - "month of June, while some of you are participating in Pride festivals across the world, " - "we will be celebrating individuality and commemorating the history and challenges " - "of the LGBTQ+ community with a Pride event of our own!" - ) - - branding_path = "seasonal/pride" - - months = {Month.JUNE} - - -class Valentines(SeasonBase): - """Branding for February.""" - - season_name = "Valentines" - - colour = Colours.pink - description = "Love is in the air!" - - branding_path = "seasonal/valentines" - - months = {Month.FEBRUARY} - - -class Wildcard(SeasonBase): - """Branding for August.""" - - season_name = "Wildcard" - - colour = Colours.purple - description = "A season full of surprises!" - - months = {Month.AUGUST} - - -def get_all_seasons() -> t.List[t.Type[SeasonBase]]: - """Give all available season classes.""" - return [SeasonBase] + SeasonBase.__subclasses__() - - -def get_current_season() -> t.Type[SeasonBase]: - """Give active season, based on current UTC month.""" - current_month = Month(datetime.utcnow().month) - - active_seasons = tuple( - season - for season in SeasonBase.__subclasses__() - if current_month in season.months - ) - - if not active_seasons: - return SeasonBase - - return active_seasons[0] - - -def get_season(name: str) -> t.Optional[t.Type[SeasonBase]]: - """ - Give season such that its class name or its `season_name` attr match `name` (caseless). - - If no such season exists, return None. - """ - name = name.casefold() - - for season in get_all_seasons(): - matches = (season.__name__.casefold(), season.season_name.casefold()) - - if name in matches: - return season - - -def _validate_season_overlap() -> None: - """ - Raise BrandingError if there are any colliding seasons. - - This serves as a local test to ensure that seasons haven't been misconfigured. - """ - month_to_season = {} - - for season in SeasonBase.__subclasses__(): - for month in season.months: - colliding_season = month_to_season.get(month) - - if colliding_season: - raise BrandingError(f"Season {season} collides with {colliding_season} in {month.name}") - else: - month_to_season[month] = season - - -_validate_season_overlap() diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py index 9cb54cdab..d8de177f5 100644 --- a/bot/exts/backend/error_handler.py +++ b/bot/exts/backend/error_handler.py @@ -1,7 +1,5 @@ -import contextlib import difflib import logging -import random import typing as t from discord import Embed @@ -10,11 +8,10 @@ from sentry_sdk import push_scope from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Colours, ERROR_REPLIES, Icons, MODERATION_ROLES +from bot.constants import Colours, Icons, MODERATION_ROLES from bot.converters import TagNameConverter from bot.errors import InvalidInfractedUser, LockedResourceError -from bot.exts.backend.branding._errors import BrandingError -from bot.utils.checks import InWhitelistCheckFailure +from bot.utils.checks import ContextCheckFailure log = logging.getLogger(__name__) @@ -62,7 +59,7 @@ class ErrorHandler(Cog): log.trace(f"Command {command} had its error already handled locally; ignoring.") return - if isinstance(e, errors.CommandNotFound) and not hasattr(ctx, "invoked_from_error_handler"): + if isinstance(e, errors.CommandNotFound) and not getattr(ctx, "invoked_from_error_handler", False): if await self.try_silence(ctx): return # Try to look for a tag with the command's name @@ -79,9 +76,6 @@ class ErrorHandler(Cog): await self.handle_api_error(ctx, e.original) elif isinstance(e.original, LockedResourceError): await ctx.send(f"{e.original} Please wait for it to finish and try again later.") - elif isinstance(e.original, BrandingError): - await ctx.send(embed=self._get_error_embed(random.choice(ERROR_REPLIES), str(e.original))) - return elif isinstance(e.original, InvalidInfractedUser): await ctx.send(f"Cannot infract that user. {e.original.reason}") else: @@ -167,9 +161,8 @@ class ErrorHandler(Cog): f"and the fallback tag failed validation in TagNameConverter." ) else: - with contextlib.suppress(ResponseCodeError): - if await ctx.invoke(tags_get_command, tag_name=tag_name): - return + if await ctx.invoke(tags_get_command, tag_name=tag_name): + return if not any(role.id in MODERATION_ROLES for role in ctx.author.roles): await self.send_command_suggestion(ctx, ctx.invoked_with) @@ -219,32 +212,30 @@ class ErrorHandler(Cog): * ArgumentParsingError: send an error message * Other: send an error message and the help command """ - prepared_help_command = self.get_help_command(ctx) - if isinstance(e, errors.MissingRequiredArgument): embed = self._get_error_embed("Missing required argument", e.param.name) await ctx.send(embed=embed) - await prepared_help_command + await self.get_help_command(ctx) self.bot.stats.incr("errors.missing_required_argument") elif isinstance(e, errors.TooManyArguments): embed = self._get_error_embed("Too many arguments", str(e)) await ctx.send(embed=embed) - await prepared_help_command + await self.get_help_command(ctx) self.bot.stats.incr("errors.too_many_arguments") elif isinstance(e, errors.BadArgument): embed = self._get_error_embed("Bad argument", str(e)) await ctx.send(embed=embed) - await prepared_help_command + await self.get_help_command(ctx) self.bot.stats.incr("errors.bad_argument") elif isinstance(e, errors.BadUnionArgument): embed = self._get_error_embed("Bad argument", f"{e}\n{e.errors[-1]}") await ctx.send(embed=embed) - await prepared_help_command + await self.get_help_command(ctx) self.bot.stats.incr("errors.bad_union_argument") elif isinstance(e, errors.ArgumentParsingError): embed = self._get_error_embed("Argument parsing error", str(e)) await ctx.send(embed=embed) - prepared_help_command.close() + self.get_help_command(ctx).close() self.bot.stats.incr("errors.argument_parsing_error") else: embed = self._get_error_embed( @@ -252,7 +243,7 @@ class ErrorHandler(Cog): "Something about your input seems off. Check the arguments and try again." ) await ctx.send(embed=embed) - await prepared_help_command + await self.get_help_command(ctx) self.bot.stats.incr("errors.other_user_input_error") @staticmethod @@ -279,7 +270,7 @@ class ErrorHandler(Cog): await ctx.send( "Sorry, it looks like I don't have the permissions or roles I need to do that." ) - elif isinstance(e, (InWhitelistCheckFailure, errors.NoPrivateMessage)): + elif isinstance(e, (ContextCheckFailure, errors.NoPrivateMessage)): ctx.bot.stats.incr("errors.wrong_channel_or_dm_error") await ctx.send(e) diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py index af8528a68..7555e25a2 100644 --- a/bot/exts/filters/antispam.py +++ b/bot/exts/filters/antispam.py @@ -3,7 +3,7 @@ import logging from collections.abc import Mapping from dataclasses import dataclass, field from datetime import datetime, timedelta -from operator import itemgetter +from operator import attrgetter, itemgetter from typing import Dict, Iterable, List, Set from discord import Colour, Member, Message, NotFound, Object, TextChannel @@ -18,6 +18,7 @@ from bot.constants import ( ) from bot.converters import Duration from bot.exts.moderation.modlog import ModLog +from bot.utils import lock, scheduling from bot.utils.messages import format_user, send_attachments @@ -114,7 +115,7 @@ class AntiSpam(Cog): self.message_deletion_queue = dict() - self.bot.loop.create_task(self.alert_on_validation_error()) + self.bot.loop.create_task(self.alert_on_validation_error(), name="AntiSpam.alert_on_validation_error") @property def mod_log(self) -> ModLog: @@ -191,7 +192,10 @@ class AntiSpam(Cog): if channel.id not in self.message_deletion_queue: log.trace(f"Creating queue for channel `{channel.id}`") self.message_deletion_queue[message.channel.id] = DeletionContext(channel) - self.bot.loop.create_task(self._process_deletion_context(message.channel.id)) + scheduling.create_task( + self._process_deletion_context(message.channel.id), + name=f"AntiSpam._process_deletion_context({message.channel.id})" + ) # Add the relevant of this trigger to the Deletion Context await self.message_deletion_queue[message.channel.id].add( @@ -201,16 +205,15 @@ class AntiSpam(Cog): ) for member in members: - - # Fire it off as a background task to ensure - # that the sleep doesn't block further tasks - self.bot.loop.create_task( - self.punish(message, member, full_reason) + scheduling.create_task( + self.punish(message, member, full_reason), + name=f"AntiSpam.punish(message={message.id}, member={member.id}, rule={rule_name})" ) await self.maybe_delete_messages(channel, relevant_messages) break + @lock.lock_arg("antispam.punish", "member", attrgetter("id")) async def punish(self, msg: Message, member: Member, reason: str) -> None: """Punishes the given member for triggering an antispam rule.""" if not any(role.id == self.muted_role.id for role in member.roles): diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index c90b18dcb..464732453 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -6,6 +6,7 @@ from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union import dateutil import discord.errors +import regex from async_rediscache import RedisCache from dateutil.relativedelta import relativedelta from discord import Colour, HTTPException, Member, Message, NotFound, TextChannel @@ -34,7 +35,11 @@ CODE_BLOCK_RE = re.compile( EVERYONE_PING_RE = re.compile(rf"@everyone|<@&{Guild.id}>|@here") SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL) URL_RE = re.compile(r"(https?://[^\s]+)", flags=re.IGNORECASE) -ZALGO_RE = re.compile(r"[\u0300-\u036F\u0489]") + +# Exclude variation selectors from zalgo because they're actually invisible. +VARIATION_SELECTORS = r"\uFE00-\uFE0F\U000E0100-\U000E01EF" +INVISIBLE_RE = regex.compile(rf"[{VARIATION_SELECTORS}\p{{UNASSIGNED}}\p{{FORMAT}}\p{{CONTROL}}--\s]", regex.V1) +ZALGO_RE = regex.compile(rf"[\p{{NONSPACING MARK}}\p{{ENCLOSING MARK}}--[{VARIATION_SELECTORS}]]", regex.V1) # Other constants. DAYS_BETWEEN_ALERTS = 3 @@ -178,6 +183,7 @@ class Filtering(Cog): def get_name_matches(self, name: str) -> List[re.Match]: """Check bad words from passed string (name). Return list of matches.""" + name = self.clean_input(name) matches = [] watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False) for pattern in watchlist_patterns: @@ -444,6 +450,8 @@ class Filtering(Cog): if SPOILER_RE.search(text): text = self._expand_spoilers(text) + text = self.clean_input(text) + # Make sure it's not a URL if URL_RE.search(text): return False, None @@ -462,6 +470,7 @@ class Filtering(Cog): Second return value is a reason of URL blacklisting (can be None). """ + text = self.clean_input(text) if not URL_RE.search(text): return False, None @@ -492,6 +501,8 @@ class Filtering(Cog): Attempts to catch some of common ways to try to cheat the system. """ + text = self.clean_input(text) + # Remove backslashes to prevent escape character aroundfuckery like # discord\.gg/gdudes-pony-farm text = text.replace("\\", "") @@ -628,6 +639,15 @@ class Filtering(Cog): await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}') log.info(f"Deleted the offensive message with id {msg['id']}.") + @staticmethod + def clean_input(string: str) -> str: + """Remove zalgo and invisible characters from `string`.""" + # For future consideration: remove characters in the Mc, Sk, and Lm categories too. + # Can be normalised with form C to merge char + combining char into a single char to avoid + # removing legit diacritics, but this would open up a way to bypass filters. + no_zalgo = ZALGO_RE.sub("", string) + return INVISIBLE_RE.sub("", no_zalgo) + def setup(bot: Bot) -> None: """Load the Filtering cog.""" diff --git a/bot/exts/filters/pixels_token_remover.py b/bot/exts/filters/pixels_token_remover.py new file mode 100644 index 000000000..2356491e5 --- /dev/null +++ b/bot/exts/filters/pixels_token_remover.py @@ -0,0 +1,108 @@ +import logging +import re +import typing as t + +from discord import Colour, Message, NotFound +from discord.ext.commands import Cog + +from bot.bot import Bot +from bot.constants import Channels, Colours, Event, Icons +from bot.exts.moderation.modlog import ModLog +from bot.utils.messages import format_user + +log = logging.getLogger(__name__) + +LOG_MESSAGE = "Censored a valid Pixels token sent by {author} in {channel}, token was `{token}`" +DELETION_MESSAGE_TEMPLATE = ( + "Hey {mention}! I noticed you posted a valid Pixels API " + "token in your message and have removed your message. " + "This means that your token has been **compromised**. " + "I have taken the liberty of invalidating the token for you. " + "You can go to <https://pixels.pythondiscord.com/authorize> to get a new key." +) + +PIXELS_TOKEN_RE = re.compile(r"[A-Za-z0-9-_=]{30,}\.[A-Za-z0-9-_=]{50,}\.[A-Za-z0-9-_.+\=]{30,}") + + +class PixelsTokenRemover(Cog): + """Scans messages for Pixels API tokens, removes and invalidates them.""" + + def __init__(self, bot: Bot): + self.bot = bot + + @property + def mod_log(self) -> ModLog: + """Get currently loaded ModLog cog instance.""" + return self.bot.get_cog("ModLog") + + @Cog.listener() + async def on_message(self, msg: Message) -> None: + """Check each message for a string that matches the RS-256 token pattern.""" + # Ignore DMs; can't delete messages in there anyway. + if not msg.guild or msg.author.bot: + return + + found_token = await self.find_token_in_message(msg) + if found_token: + await self.take_action(msg, found_token) + + @Cog.listener() + async def on_message_edit(self, before: Message, after: Message) -> None: + """Check each edit for a string that matches the RS-256 token pattern.""" + await self.on_message(after) + + async def take_action(self, msg: Message, found_token: str) -> None: + """Remove the `msg` containing the `found_token` and send a mod log message.""" + self.mod_log.ignore(Event.message_delete, msg.id) + + try: + await msg.delete() + except NotFound: + log.debug(f"Failed to remove token in message {msg.id}: message already deleted.") + return + + await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention)) + + log_message = self.format_log_message(msg, found_token) + log.debug(log_message) + + # Send pretty mod log embed to mod-alerts + await self.mod_log.send_log_message( + icon_url=Icons.token_removed, + colour=Colour(Colours.soft_red), + title="Token removed!", + text=log_message, + thumbnail=msg.author.avatar_url_as(static_format="png"), + channel_id=Channels.mod_alerts, + ping_everyone=False, + ) + + self.bot.stats.incr("tokens.removed_pixels_tokens") + + @staticmethod + def format_log_message(msg: Message, token: str) -> str: + """Return the generic portion of the log message to send for `token` being censored in `msg`.""" + return LOG_MESSAGE.format( + author=format_user(msg.author), + channel=msg.channel.mention, + token=token + ) + + async def find_token_in_message(self, msg: Message) -> t.Optional[str]: + """Return a seemingly valid token found in `msg` or `None` if no token is found.""" + # Use finditer rather than search to guard against method calls prematurely returning the + # token check (e.g. `message.channel.send` also matches our token pattern) + for match in PIXELS_TOKEN_RE.finditer(msg.content): + auth_header = {"Authorization": f"Bearer {match[0]}"} + async with self.bot.http_session.delete("https://pixels.pythondiscord.com/token", headers=auth_header) as r: + if r.status == 204: + # Short curcuit on first match. + return match[0] + + # No matching substring + return + + +def setup(bot: Bot) -> None: + """Load the PixelsTokenRemover cog.""" + bot.add_cog(PixelsTokenRemover(bot)) diff --git a/bot/exts/fun/duck_pond.py b/bot/exts/fun/duck_pond.py index ee440dec2..c78b9c141 100644 --- a/bot/exts/fun/duck_pond.py +++ b/bot/exts/fun/duck_pond.py @@ -9,7 +9,7 @@ from discord.ext.commands import Cog, Context, command from bot import constants from bot.bot import Bot from bot.utils.checks import has_any_role -from bot.utils.messages import send_attachments +from bot.utils.messages import count_unique_users_reaction, send_attachments from bot.utils.webhooks import send_webhook log = logging.getLogger(__name__) @@ -78,18 +78,12 @@ class DuckPond(Cog): Only counts ducks added by staff members. """ - duck_reactors = set() - - # iterate over all reactions - for reaction in message.reactions: - # check if the current reaction is a duck - if not self._is_duck_emoji(reaction.emoji): - continue - - # update the set of reactors with all staff reactors - duck_reactors |= {user.id async for user in reaction.users() if self.is_staff(user)} - - return len(duck_reactors) + return await count_unique_users_reaction( + message, + lambda r: self._is_duck_emoji(r.emoji), + self.is_staff, + False + ) async def relay_message(self, message: Message) -> None: """Relays the message's content and attachments to the duck pond channel.""" diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py index 4cea385b7..c5e4ee917 100644 --- a/bot/exts/help_channels/_caches.py +++ b/bot/exts/help_channels/_caches.py @@ -8,12 +8,19 @@ claim_times = RedisCache(namespace="HelpChannels.claim_times") # RedisCache[discord.TextChannel.id, t.Union[discord.User.id, discord.Member.id]] claimants = RedisCache(namespace="HelpChannels.help_channel_claimants") +# Stores the timestamp of the last message from the claimant of a help channel +# RedisCache[discord.TextChannel.id, UtcPosixTimestamp] +claimant_last_message_times = RedisCache(namespace="HelpChannels.claimant_last_message_times") + +# This cache maps a help channel to the timestamp of the last non-claimant message. +# This cache being empty for a given help channel indicates the question is unanswered. +# RedisCache[discord.TextChannel.id, UtcPosixTimestamp] +non_claimant_last_message_times = RedisCache(namespace="HelpChannels.non_claimant_last_message_times") + # This cache maps a help channel to original question message in same channel. # RedisCache[discord.TextChannel.id, discord.Message.id] question_messages = RedisCache(namespace="HelpChannels.question_messages") -# This cache maps a help channel to whether it has had any -# activity other than the original claimant. True being no other -# activity and False being other activity. -# RedisCache[discord.TextChannel.id, bool] -unanswered = RedisCache(namespace="HelpChannels.unanswered") +# This cache keeps track of the dynamic message ID for +# the continuously updated message in the #How-to-get-help channel. +dynamic_message = RedisCache(namespace="HelpChannels.dynamic_message") diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py index 224214b00..0846b28c8 100644 --- a/bot/exts/help_channels/_channel.py +++ b/bot/exts/help_channels/_channel.py @@ -1,8 +1,11 @@ import logging import typing as t -from datetime import datetime, timedelta +from datetime import timedelta +from enum import Enum +import arrow import discord +from arrow import Arrow import bot from bot import constants @@ -15,6 +18,17 @@ MAX_CHANNELS_PER_CATEGORY = 50 EXCLUDED_CHANNELS = (constants.Channels.cooldown,) +class ClosingReason(Enum): + """All possible closing reasons for help channels.""" + + COMMAND = "command" + LATEST_MESSSAGE = "auto.latest_message" + CLAIMANT_TIMEOUT = "auto.claimant_timeout" + OTHER_TIMEOUT = "auto.other_timeout" + DELETED = "auto.deleted" + CLEANUP = "auto.cleanup" + + def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[discord.TextChannel]: """Yield the text channels of the `category` in an unsorted manner.""" log.trace(f"Getting text channels in the category '{category}' ({category.id}).") @@ -25,23 +39,69 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco yield channel -async def get_idle_time(channel: discord.TextChannel) -> t.Optional[int]: +async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[Arrow, ClosingReason]: """ - Return the time elapsed, in seconds, since the last message sent in the `channel`. + Return the time at which the given help `channel` should be closed along with the reason. - Return None if the channel has no messages. - """ - log.trace(f"Getting the idle time for #{channel} ({channel.id}).") - - msg = await _message.get_last_message(channel) - if not msg: - log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages.") - return None + `init_done` is True if the cog has finished loading and False otherwise. - idle_time = (datetime.utcnow() - msg.created_at).seconds + The time is calculated as follows: - log.trace(f"#{channel} ({channel.id}) has been idle for {idle_time} seconds.") - return idle_time + * If `init_done` is True or the cached time for the claimant's last message is unavailable, + add the configured `idle_minutes_claimant` to the time the most recent message was sent. + * If the help session is empty (see `is_empty`), do the above but with `deleted_idle_minutes`. + * If either of the above is attempted but the channel is completely empty, close the channel + immediately. + * Otherwise, retrieve the times of the claimant's and non-claimant's last messages from the + cache. Add the configured `idle_minutes_claimant` and idle_minutes_others`, respectively, and + choose the time which is furthest in the future. + """ + log.trace(f"Getting the closing time for #{channel} ({channel.id}).") + + is_empty = await _message.is_empty(channel) + if is_empty: + idle_minutes_claimant = constants.HelpChannels.deleted_idle_minutes + else: + idle_minutes_claimant = constants.HelpChannels.idle_minutes_claimant + + claimant_time = await _caches.claimant_last_message_times.get(channel.id) + + # The current session lacks messages, the cog is still starting, or the cache is empty. + if is_empty or not init_done or claimant_time is None: + msg = await _message.get_last_message(channel) + if not msg: + log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages, closing now.") + return Arrow.min, ClosingReason.DELETED + + # Use the greatest offset to avoid the possibility of prematurely closing the channel. + time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant) + reason = ClosingReason.DELETED if is_empty else ClosingReason.LATEST_MESSSAGE + return time, reason + + claimant_time = Arrow.utcfromtimestamp(claimant_time) + others_time = await _caches.non_claimant_last_message_times.get(channel.id) + + if others_time: + others_time = Arrow.utcfromtimestamp(others_time) + else: + # The help session hasn't received any answers (messages from non-claimants) yet. + # Set to min value so it isn't considered when calculating the closing time. + others_time = Arrow.min + + # Offset the cached times by the configured values. + others_time += timedelta(minutes=constants.HelpChannels.idle_minutes_others) + claimant_time += timedelta(minutes=idle_minutes_claimant) + + # Use the time which is the furthest into the future. + if claimant_time >= others_time: + closing_time = claimant_time + reason = ClosingReason.CLAIMANT_TIMEOUT + else: + closing_time = others_time + reason = ClosingReason.OTHER_TIMEOUT + + log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time} due to {reason}.") + return closing_time, reason async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: @@ -50,8 +110,8 @@ async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]: claimed_timestamp = await _caches.claim_times.get(channel_id) if claimed_timestamp: - claimed = datetime.utcfromtimestamp(claimed_timestamp) - return datetime.utcnow() - claimed + claimed = Arrow.utcfromtimestamp(claimed_timestamp) + return arrow.utcnow() - claimed def is_excluded_channel(channel: discord.abc.GuildChannel) -> bool: diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 1c730dce9..5c410a0a1 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -2,16 +2,17 @@ import asyncio import logging import random import typing as t -from datetime import datetime, timezone +from datetime import timedelta from operator import attrgetter +import arrow import discord import discord.abc from discord.ext import commands from bot import constants from bot.bot import Bot -from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats +from bot.exts.help_channels import _caches, _channel, _message, _name, _stats from bot.utils import channel as channel_utils, lock, scheduling log = logging.getLogger(__name__) @@ -20,6 +21,7 @@ NAMESPACE = "help" HELP_CHANNEL_TOPIC = """ This is a Python help channel. You can claim your own help channel in the Python Help: Available category. """ +AVAILABLE_HELP_CHANNELS = "**Currently available help channel(s):** {available}" class HelpChannels(commands.Cog): @@ -43,7 +45,9 @@ class HelpChannels(commands.Cog): In Use Category * Contains all channels which are occupied by someone needing help - * Channel moves to dormant category after `constants.HelpChannels.idle_minutes` of being idle + * Channel moves to dormant category after + - `constants.HelpChannels.idle_minutes_other` minutes since the last user message, or + - `constants.HelpChannels.idle_minutes_claimant` minutes since the last claimant message. * Command can prematurely mark a channel as dormant * Channel claimant is allowed to use the command * Allowed roles for the command are configurable with `constants.HelpChannels.cmd_whitelist` @@ -70,7 +74,10 @@ class HelpChannels(commands.Cog): self.channel_queue: asyncio.Queue[discord.TextChannel] = None self.name_queue: t.Deque[str] = None - self.last_notification: t.Optional[datetime] = None + self.last_notification: t.Optional[arrow.Arrow] = None + + self.dynamic_message: t.Optional[int] = None + self.available_help_channels: t.Set[discord.TextChannel] = set() # Asyncio stuff self.queue_tasks: t.List[asyncio.Task] = [] @@ -87,6 +94,24 @@ class HelpChannels(commands.Cog): self.scheduler.cancel_all() + async def _handle_role_change(self, member: discord.Member, coro: t.Callable[..., t.Coroutine]) -> None: + """ + Change `member`'s cooldown role via awaiting `coro` and handle errors. + + `coro` is intended to be `discord.Member.add_roles` or `discord.Member.remove_roles`. + """ + try: + await coro(self.bot.get_guild(constants.Guild.id).get_role(constants.Roles.help_cooldown)) + except discord.NotFound: + log.debug(f"Failed to change role for {member} ({member.id}): member not found") + except discord.Forbidden: + log.debug( + f"Forbidden to change role for {member} ({member.id}); " + f"possibly due to role hierarchy" + ) + except discord.HTTPException as e: + log.error(f"Failed to change role for {member} ({member.id}): {e.status} {e.code}") + @lock.lock_arg(NAMESPACE, "message", attrgetter("channel.id")) @lock.lock_arg(NAMESPACE, "message", attrgetter("author.id")) @lock.lock_arg(f"{NAMESPACE}.unclaim", "message", attrgetter("author.id"), wait=True) @@ -99,9 +124,10 @@ class HelpChannels(commands.Cog): """ log.info(f"Channel #{message.channel} was claimed by `{message.author.id}`.") await self.move_to_in_use(message.channel) - await _cooldown.revoke_send_permissions(message.author, self.scheduler) + await self._handle_role_change(message.author, message.author.add_roles) await _message.pin(message) + try: await _message.dm_on_open(message) except Exception as e: @@ -112,11 +138,16 @@ class HelpChannels(commands.Cog): self.bot.stats.incr("help.claimed") - # Must use a timezone-aware datetime to ensure a correct POSIX timestamp. - timestamp = datetime.now(timezone.utc).timestamp() + # datetime.timestamp() would assume it's local, despite d.py giving a (naïve) UTC time. + timestamp = arrow.Arrow.fromdatetime(message.created_at).timestamp() + await _caches.claim_times.set(message.channel.id, timestamp) + await _caches.claimant_last_message_times.set(message.channel.id, timestamp) + # Delete to indicate that the help session has yet to receive an answer. + await _caches.non_claimant_last_message_times.delete(message.channel.id) - await _caches.unanswered.set(message.channel.id, True) + # Removing the help channel from the dynamic message, and editing/sending that message. + self.available_help_channels.remove(message.channel) # Not awaited because it may indefinitely hold the lock while waiting for a channel. scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}") @@ -187,7 +218,7 @@ class HelpChannels(commands.Cog): # Don't use a discord.py check because the check needs to fail silently. if await self.close_check(ctx): log.info(f"Close command invoked by {ctx.author} in #{ctx.channel}.") - await self.unclaim_channel(ctx.channel, is_auto=False) + await self.unclaim_channel(ctx.channel, closed_on=_channel.ClosingReason.COMMAND) async def get_available_candidate(self) -> discord.TextChannel: """ @@ -233,7 +264,11 @@ class HelpChannels(commands.Cog): elif missing < 0: log.trace(f"Moving {abs(missing)} superfluous available channels over to the Dormant category.") for channel in channels[:abs(missing)]: - await self.unclaim_channel(channel) + await self.unclaim_channel(channel, closed_on=_channel.ClosingReason.CLEANUP) + + # Getting channels that need to be included in the dynamic message. + await self.update_available_help_channels() + log.trace("Dynamic available help message updated.") async def init_categories(self) -> None: """Get the help category objects. Remove the cog if retrieval fails.""" @@ -260,7 +295,6 @@ class HelpChannels(commands.Cog): log.trace("Initialising the cog.") await self.init_categories() - await _cooldown.check_cooldowns(self.scheduler) self.channel_queue = self.create_channel_queue() self.name_queue = _name.create_name_queue( @@ -279,6 +313,10 @@ class HelpChannels(commands.Cog): # This may confuse users. So would potentially long delays for the cog to become ready. self.close_command.enabled = True + # Acquiring the dynamic message ID, if it exists within the cache. + log.trace("Attempting to fetch How-to-get-help dynamic message ID.") + self.dynamic_message = await _caches.dynamic_message.get("message_id") + await self.init_available() _stats.report_counts() @@ -293,26 +331,23 @@ class HelpChannels(commands.Cog): """ log.trace(f"Handling in-use channel #{channel} ({channel.id}).") - if not await _message.is_empty(channel): - idle_seconds = constants.HelpChannels.idle_minutes * 60 - else: - idle_seconds = constants.HelpChannels.deleted_idle_minutes * 60 - - time_elapsed = await _channel.get_idle_time(channel) + closing_time, closed_on = await _channel.get_closing_time(channel, self.init_task.done()) - if time_elapsed is None or time_elapsed >= idle_seconds: + # Closing time is in the past. + # Add 1 second due to POSIX timestamps being lower resolution than datetime objects. + if closing_time < (arrow.utcnow() + timedelta(seconds=1)): log.info( - f"#{channel} ({channel.id}) is idle longer than {idle_seconds} seconds " - f"and will be made dormant." + f"#{channel} ({channel.id}) is idle past {closing_time} " + f"and will be made dormant. Reason: {closed_on.value}" ) - await self.unclaim_channel(channel) + await self.unclaim_channel(channel, closed_on=closed_on) else: # Cancel the existing task, if any. if has_task: self.scheduler.cancel(channel.id) - delay = idle_seconds - time_elapsed + delay = (closing_time - arrow.utcnow()).seconds log.info( f"#{channel} ({channel.id}) is still active; " f"scheduling it to be moved after {delay} seconds." @@ -336,6 +371,10 @@ class HelpChannels(commands.Cog): category_id=constants.Categories.help_available, ) + # Adding the help channel to the dynamic message, and editing/sending that message. + self.available_help_channels.add(channel) + await self.update_available_help_channels() + _stats.report_counts() async def move_to_dormant(self, channel: discord.TextChannel) -> None: @@ -356,7 +395,7 @@ class HelpChannels(commands.Cog): _stats.report_counts() @lock.lock_arg(f"{NAMESPACE}.unclaim", "channel") - async def unclaim_channel(self, channel: discord.TextChannel, *, is_auto: bool = True) -> None: + async def unclaim_channel(self, channel: discord.TextChannel, *, closed_on: _channel.ClosingReason) -> None: """ Unclaim an in-use help `channel` to make it dormant. @@ -364,7 +403,7 @@ class HelpChannels(commands.Cog): Remove the cooldown role from the channel claimant if they have no other channels claimed. Cancel the scheduled cooldown role removal task. - Set `is_auto` to True if the channel was automatically closed or False if manually closed. + `closed_on` is the reason that the channel was closed. See _channel.ClosingReason for possible values. """ claimant_id = await _caches.claimants.get(channel.id) _unclaim_channel = self._unclaim_channel @@ -375,30 +414,30 @@ class HelpChannels(commands.Cog): decorator = lock.lock_arg(f"{NAMESPACE}.unclaim", "claimant_id", wait=True) _unclaim_channel = decorator(_unclaim_channel) - return await _unclaim_channel(channel, claimant_id, is_auto) + return await _unclaim_channel(channel, claimant_id, closed_on) - async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, is_auto: bool) -> None: + async def _unclaim_channel( + self, + channel: discord.TextChannel, + claimant_id: int, + closed_on: _channel.ClosingReason + ) -> None: """Actual implementation of `unclaim_channel`. See that for full documentation.""" await _caches.claimants.delete(channel.id) - # Ignore missing tasks because a channel may still be dormant after the cooldown expires. - if claimant_id in self.scheduler: - self.scheduler.cancel(claimant_id) - claimant = self.bot.get_guild(constants.Guild.id).get_member(claimant_id) if claimant is None: log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed") - elif not any(claimant.id == user_id for _, user_id in await _caches.claimants.items()): - # Remove the cooldown role if the claimant has no other channels left - await _cooldown.remove_cooldown_role(claimant) + else: + await self._handle_role_change(claimant, claimant.remove_roles) await _message.unpin(channel) - await _stats.report_complete_session(channel.id, is_auto) + await _stats.report_complete_session(channel.id, closed_on) await self.move_to_dormant(channel) # Cancel the task that makes the channel dormant only if called by the close command. # In other cases, the task is either already done or not-existent. - if not is_auto: + if closed_on == _channel.ClosingReason.COMMAND: self.scheduler.cancel(channel.id) async def move_to_in_use(self, channel: discord.TextChannel) -> None: @@ -410,7 +449,7 @@ class HelpChannels(commands.Cog): category_id=constants.Categories.help_in_use, ) - timeout = constants.HelpChannels.idle_minutes * 60 + timeout = constants.HelpChannels.idle_minutes_claimant * 60 log.trace(f"Scheduling #{channel} ({channel.id}) to become dormant in {timeout} sec.") self.scheduler.schedule_later(timeout, channel.id, self.move_idle_channel(channel)) @@ -428,7 +467,7 @@ class HelpChannels(commands.Cog): if not _channel.is_excluded_channel(message.channel): await self.claim_channel(message) else: - await _message.check_for_answer(message) + await _message.update_message_caches(message) @commands.Cog.listener() async def on_message_delete(self, msg: discord.Message) -> None: @@ -465,3 +504,34 @@ class HelpChannels(commands.Cog): self.queue_tasks.remove(task) return channel + + async def update_available_help_channels(self) -> None: + """Updates the dynamic message within #how-to-get-help for available help channels.""" + if not self.available_help_channels: + self.available_help_channels = set( + c for c in self.available_category.channels if not _channel.is_excluded_channel(c) + ) + + available_channels = AVAILABLE_HELP_CHANNELS.format( + available=", ".join( + c.mention for c in sorted(self.available_help_channels, key=attrgetter("position")) + ) or None + ) + + if self.dynamic_message is not None: + try: + log.trace("Help channels have changed, dynamic message has been edited.") + await self.bot.http.edit_message( + constants.Channels.how_to_get_help, self.dynamic_message, content=available_channels + ) + except discord.NotFound: + pass + else: + return + + log.trace("Dynamic message could not be edited or found. Creating a new one.") + new_dynamic_message = await self.bot.http.send_message( + constants.Channels.how_to_get_help, available_channels + ) + self.dynamic_message = new_dynamic_message["id"] + await _caches.dynamic_message.set("message_id", self.dynamic_message) diff --git a/bot/exts/help_channels/_cooldown.py b/bot/exts/help_channels/_cooldown.py deleted file mode 100644 index c5c39297f..000000000 --- a/bot/exts/help_channels/_cooldown.py +++ /dev/null @@ -1,95 +0,0 @@ -import logging -from typing import Callable, Coroutine - -import discord - -import bot -from bot import constants -from bot.exts.help_channels import _caches, _channel -from bot.utils.scheduling import Scheduler - -log = logging.getLogger(__name__) -CoroutineFunc = Callable[..., Coroutine] - - -async def add_cooldown_role(member: discord.Member) -> None: - """Add the help cooldown role to `member`.""" - log.trace(f"Adding cooldown role for {member} ({member.id}).") - await _change_cooldown_role(member, member.add_roles) - - -async def check_cooldowns(scheduler: Scheduler) -> None: - """Remove expired cooldowns and re-schedule active ones.""" - log.trace("Checking all cooldowns to remove or re-schedule them.") - guild = bot.instance.get_guild(constants.Guild.id) - cooldown = constants.HelpChannels.claim_minutes * 60 - - for channel_id, member_id in await _caches.claimants.items(): - member = guild.get_member(member_id) - if not member: - continue # Member probably left the guild. - - in_use_time = await _channel.get_in_use_time(channel_id) - - if not in_use_time or in_use_time.seconds > cooldown: - # Remove the role if no claim time could be retrieved or if the cooldown expired. - # Since the channel is in the claimants cache, it is definitely strange for a time - # to not exist. However, it isn't a reason to keep the user stuck with a cooldown. - await remove_cooldown_role(member) - else: - # The member is still on a cooldown; re-schedule it for the remaining time. - delay = cooldown - in_use_time.seconds - scheduler.schedule_later(delay, member.id, remove_cooldown_role(member)) - - -async def remove_cooldown_role(member: discord.Member) -> None: - """Remove the help cooldown role from `member`.""" - log.trace(f"Removing cooldown role for {member} ({member.id}).") - await _change_cooldown_role(member, member.remove_roles) - - -async def revoke_send_permissions(member: discord.Member, scheduler: Scheduler) -> None: - """ - Disallow `member` to send messages in the Available category for a certain time. - - The time until permissions are reinstated can be configured with - `HelpChannels.claim_minutes`. - """ - log.trace( - f"Revoking {member}'s ({member.id}) send message permissions in the Available category." - ) - - await add_cooldown_role(member) - - # Cancel the existing task, if any. - # Would mean the user somehow bypassed the lack of permissions (e.g. user is guild owner). - if member.id in scheduler: - scheduler.cancel(member.id) - - delay = constants.HelpChannels.claim_minutes * 60 - scheduler.schedule_later(delay, member.id, remove_cooldown_role(member)) - - -async def _change_cooldown_role(member: discord.Member, coro_func: CoroutineFunc) -> None: - """ - Change `member`'s cooldown role via awaiting `coro_func` and handle errors. - - `coro_func` is intended to be `discord.Member.add_roles` or `discord.Member.remove_roles`. - """ - guild = bot.instance.get_guild(constants.Guild.id) - role = guild.get_role(constants.Roles.help_cooldown) - if role is None: - log.warning(f"Help cooldown role ({constants.Roles.help_cooldown}) could not be found!") - return - - try: - await coro_func(role) - except discord.NotFound: - log.debug(f"Failed to change role for {member} ({member.id}): member not found") - except discord.Forbidden: - log.debug( - f"Forbidden to change role for {member} ({member.id}); " - f"possibly due to role hierarchy" - ) - except discord.HTTPException as e: - log.error(f"Failed to change role for {member} ({member.id}): {e.status} {e.code}") diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index 36388f9bd..afd698ffe 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -1,9 +1,10 @@ import logging import textwrap import typing as t -from datetime import datetime +import arrow import discord +from arrow import Arrow import bot from bot import constants @@ -28,7 +29,7 @@ For more tips, check out our guide on **[asking good questions]({ASKING_GUIDE_UR AVAILABLE_TITLE = "Available help channel" -AVAILABLE_FOOTER = f"Closes after {constants.HelpChannels.idle_minutes} minutes of inactivity or when you send !close." +AVAILABLE_FOOTER = "Closes after a period of inactivity, or when you send !close." DORMANT_MSG = f""" This help channel has been marked as **dormant**, and has been moved into the **Help: Dormant** \ @@ -42,25 +43,27 @@ through our guide for **[asking a good question]({ASKING_GUIDE_URL})**. """ -async def check_for_answer(message: discord.Message) -> None: - """Checks for whether new content in a help channel comes from non-claimants.""" +async def update_message_caches(message: discord.Message) -> None: + """Checks the source of new content in a help channel and updates the appropriate cache.""" channel = message.channel # Confirm the channel is an in use help channel if is_in_category(channel, constants.Categories.help_in_use): - log.trace(f"Checking if #{channel} ({channel.id}) has been answered.") + log.trace(f"Checking if #{channel} ({channel.id}) has had a reply.") - # Check if there is an entry in unanswered - if await _caches.unanswered.contains(channel.id): - claimant_id = await _caches.claimants.get(channel.id) - if not claimant_id: - # The mapping for this channel doesn't exist, we can't do anything. - return + claimant_id = await _caches.claimants.get(channel.id) + if not claimant_id: + # The mapping for this channel doesn't exist, we can't do anything. + return - # Check the message did not come from the claimant - if claimant_id != message.author.id: - # Mark the channel as answered - await _caches.unanswered.set(channel.id, False) + # datetime.timestamp() would assume it's local, despite d.py giving a (naïve) UTC time. + timestamp = Arrow.fromdatetime(message.created_at).timestamp() + + # Overwrite the appropriate last message cache depending on the author of the message + if message.author.id == claimant_id: + await _caches.claimant_last_message_times.set(channel.id, timestamp) + else: + await _caches.non_claimant_last_message_times.set(channel.id, timestamp) async def get_last_message(channel: discord.TextChannel) -> t.Optional[discord.Message]: @@ -125,12 +128,12 @@ async def dm_on_open(message: discord.Message) -> None: ) -async def notify(channel: discord.TextChannel, last_notification: t.Optional[datetime]) -> t.Optional[datetime]: +async def notify(channel: discord.TextChannel, last_notification: t.Optional[Arrow]) -> t.Optional[Arrow]: """ Send a message in `channel` notifying about a lack of available help channels. - If a notification was sent, return the `datetime` at which the message was sent. Otherwise, - return None. + If a notification was sent, return the time at which the message was sent. + Otherwise, return None. Configuration: @@ -144,7 +147,7 @@ async def notify(channel: discord.TextChannel, last_notification: t.Optional[dat log.trace("Notifying about lack of channels.") if last_notification: - elapsed = (datetime.utcnow() - last_notification).seconds + elapsed = (arrow.utcnow() - last_notification).seconds minimum_interval = constants.HelpChannels.notify_minutes * 60 should_send = elapsed >= minimum_interval else: @@ -167,7 +170,7 @@ async def notify(channel: discord.TextChannel, last_notification: t.Optional[dat allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles) ) - return message.created_at + return Arrow.fromdatetime(message.created_at) except Exception: # Handle it here cause this feature isn't critical for the functionality of the system. log.exception("Failed to send notification about lack of dormant channels!") diff --git a/bot/exts/help_channels/_stats.py b/bot/exts/help_channels/_stats.py index b8778e7d9..eb34e75e1 100644 --- a/bot/exts/help_channels/_stats.py +++ b/bot/exts/help_channels/_stats.py @@ -22,21 +22,20 @@ def report_counts() -> None: log.warning(f"Couldn't find category {name!r} to track channel count stats.") -async def report_complete_session(channel_id: int, is_auto: bool) -> None: +async def report_complete_session(channel_id: int, closed_on: _channel.ClosingReason) -> None: """ Report stats for a completed help session channel `channel_id`. - Set `is_auto` to True if the channel was automatically closed or False if manually closed. + `closed_on` is the reason why the channel was closed. See `_channel.ClosingReason` for possible reasons. """ - caller = "auto" if is_auto else "command" - bot.instance.stats.incr(f"help.dormant_calls.{caller}") + bot.instance.stats.incr(f"help.dormant_calls.{closed_on.value}") in_use_time = await _channel.get_in_use_time(channel_id) if in_use_time: bot.instance.stats.timing("help.in_use_time", in_use_time) - unanswered = await _caches.unanswered.get(channel_id) - if unanswered: + non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel_id) + if non_claimant_last_message_time is None: bot.instance.stats.incr("help.sessions.unanswered") - elif unanswered is not None: + else: bot.instance.stats.incr("help.sessions.answered") diff --git a/bot/exts/info/code_snippets.py b/bot/exts/info/code_snippets.py new file mode 100644 index 000000000..24a9ae28a --- /dev/null +++ b/bot/exts/info/code_snippets.py @@ -0,0 +1,265 @@ +import logging +import re +import textwrap +from typing import Any +from urllib.parse import quote_plus + +from aiohttp import ClientResponseError +from discord import Message +from discord.ext.commands import Cog + +from bot.bot import Bot +from bot.constants import Channels +from bot.utils.messages import wait_for_deletion + +log = logging.getLogger(__name__) + +GITHUB_RE = re.compile( + r'https://github\.com/(?P<repo>[a-zA-Z0-9-]+/[\w.-]+)/blob/' + r'(?P<path>[^#>]+)(\?[^#>]+)?(#L(?P<start_line>\d+)(([-~:]|(\.\.))L(?P<end_line>\d+))?)' +) + +GITHUB_GIST_RE = re.compile( + r'https://gist\.github\.com/([a-zA-Z0-9-]+)/(?P<gist_id>[a-zA-Z0-9]+)/*' + r'(?P<revision>[a-zA-Z0-9]*)/*#file-(?P<file_path>[^#>]+?)(\?[^#>]+)?' + r'(-L(?P<start_line>\d+)([-~:]L(?P<end_line>\d+))?)' +) + +GITHUB_HEADERS = {'Accept': 'application/vnd.github.v3.raw'} + +GITLAB_RE = re.compile( + r'https://gitlab\.com/(?P<repo>[\w.-]+/[\w.-]+)/\-/blob/(?P<path>[^#>]+)' + r'(\?[^#>]+)?(#L(?P<start_line>\d+)(-(?P<end_line>\d+))?)' +) + +BITBUCKET_RE = re.compile( + r'https://bitbucket\.org/(?P<repo>[a-zA-Z0-9-]+/[\w.-]+)/src/(?P<ref>[0-9a-zA-Z]+)' + r'/(?P<file_path>[^#>]+)(\?[^#>]+)?(#lines-(?P<start_line>\d+)(:(?P<end_line>\d+))?)' +) + + +class CodeSnippets(Cog): + """ + Cog that parses and sends code snippets to Discord. + + Matches each message against a regex and prints the contents of all matched snippets. + """ + + async def _fetch_response(self, url: str, response_format: str, **kwargs) -> Any: + """Makes http requests using aiohttp.""" + async with self.bot.http_session.get(url, raise_for_status=True, **kwargs) as response: + if response_format == 'text': + return await response.text() + elif response_format == 'json': + return await response.json() + + def _find_ref(self, path: str, refs: tuple) -> tuple: + """Loops through all branches and tags to find the required ref.""" + # Base case: there is no slash in the branch name + ref, file_path = path.split('/', 1) + # In case there are slashes in the branch name, we loop through all branches and tags + for possible_ref in refs: + if path.startswith(possible_ref['name'] + '/'): + ref = possible_ref['name'] + file_path = path[len(ref) + 1:] + break + return ref, file_path + + async def _fetch_github_snippet( + self, + repo: str, + path: str, + start_line: str, + end_line: str + ) -> str: + """Fetches a snippet from a GitHub repo.""" + # Search the GitHub API for the specified branch + branches = await self._fetch_response( + f'https://api.github.com/repos/{repo}/branches', + 'json', + headers=GITHUB_HEADERS + ) + tags = await self._fetch_response(f'https://api.github.com/repos/{repo}/tags', 'json', headers=GITHUB_HEADERS) + refs = branches + tags + ref, file_path = self._find_ref(path, refs) + + file_contents = await self._fetch_response( + f'https://api.github.com/repos/{repo}/contents/{file_path}?ref={ref}', + 'text', + headers=GITHUB_HEADERS, + ) + return self._snippet_to_codeblock(file_contents, file_path, start_line, end_line) + + async def _fetch_github_gist_snippet( + self, + gist_id: str, + revision: str, + file_path: str, + start_line: str, + end_line: str + ) -> str: + """Fetches a snippet from a GitHub gist.""" + gist_json = await self._fetch_response( + f'https://api.github.com/gists/{gist_id}{f"/{revision}" if len(revision) > 0 else ""}', + 'json', + headers=GITHUB_HEADERS, + ) + + # Check each file in the gist for the specified file + for gist_file in gist_json['files']: + if file_path == gist_file.lower().replace('.', '-'): + file_contents = await self._fetch_response( + gist_json['files'][gist_file]['raw_url'], + 'text', + ) + return self._snippet_to_codeblock(file_contents, gist_file, start_line, end_line) + return '' + + async def _fetch_gitlab_snippet( + self, + repo: str, + path: str, + start_line: str, + end_line: str + ) -> str: + """Fetches a snippet from a GitLab repo.""" + enc_repo = quote_plus(repo) + + # Searches the GitLab API for the specified branch + branches = await self._fetch_response( + f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/branches', + 'json' + ) + tags = await self._fetch_response(f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/tags', 'json') + refs = branches + tags + ref, file_path = self._find_ref(path, refs) + enc_ref = quote_plus(ref) + enc_file_path = quote_plus(file_path) + + file_contents = await self._fetch_response( + f'https://gitlab.com/api/v4/projects/{enc_repo}/repository/files/{enc_file_path}/raw?ref={enc_ref}', + 'text', + ) + return self._snippet_to_codeblock(file_contents, file_path, start_line, end_line) + + async def _fetch_bitbucket_snippet( + self, + repo: str, + ref: str, + file_path: str, + start_line: str, + end_line: str + ) -> str: + """Fetches a snippet from a BitBucket repo.""" + file_contents = await self._fetch_response( + f'https://bitbucket.org/{quote_plus(repo)}/raw/{quote_plus(ref)}/{quote_plus(file_path)}', + 'text', + ) + return self._snippet_to_codeblock(file_contents, file_path, start_line, end_line) + + def _snippet_to_codeblock(self, file_contents: str, file_path: str, start_line: str, end_line: str) -> str: + """ + Given the entire file contents and target lines, creates a code block. + + First, we split the file contents into a list of lines and then keep and join only the required + ones together. + + We then dedent the lines to look nice, and replace all ` characters with `\u200b to prevent + markdown injection. + + Finally, we surround the code with ``` characters. + """ + # Parse start_line and end_line into integers + if end_line is None: + start_line = end_line = int(start_line) + else: + start_line = int(start_line) + end_line = int(end_line) + + split_file_contents = file_contents.splitlines() + + # Make sure that the specified lines are in range + if start_line > end_line: + start_line, end_line = end_line, start_line + if start_line > len(split_file_contents) or end_line < 1: + return '' + start_line = max(1, start_line) + end_line = min(len(split_file_contents), end_line) + + # Gets the code lines, dedents them, and inserts zero-width spaces to prevent Markdown injection + required = '\n'.join(split_file_contents[start_line - 1:end_line]) + required = textwrap.dedent(required).rstrip().replace('`', '`\u200b') + + # Extracts the code language and checks whether it's a "valid" language + language = file_path.split('/')[-1].split('.')[-1] + trimmed_language = language.replace('-', '').replace('+', '').replace('_', '') + is_valid_language = trimmed_language.isalnum() + if not is_valid_language: + language = '' + + # Adds a label showing the file path to the snippet + if start_line == end_line: + ret = f'`{file_path}` line {start_line}\n' + else: + ret = f'`{file_path}` lines {start_line} to {end_line}\n' + + if len(required) != 0: + return f'{ret}```{language}\n{required}```' + # Returns an empty codeblock if the snippet is empty + return f'{ret}``` ```' + + def __init__(self, bot: Bot): + """Initializes the cog's bot.""" + self.bot = bot + + self.pattern_handlers = [ + (GITHUB_RE, self._fetch_github_snippet), + (GITHUB_GIST_RE, self._fetch_github_gist_snippet), + (GITLAB_RE, self._fetch_gitlab_snippet), + (BITBUCKET_RE, self._fetch_bitbucket_snippet) + ] + + @Cog.listener() + async def on_message(self, message: Message) -> None: + """Checks if the message has a snippet link, removes the embed, then sends the snippet contents.""" + if not message.author.bot: + all_snippets = [] + + for pattern, handler in self.pattern_handlers: + for match in pattern.finditer(message.content): + try: + snippet = await handler(**match.groupdict()) + all_snippets.append((match.start(), snippet)) + except ClientResponseError as error: + error_message = error.message # noqa: B306 + log.log( + logging.DEBUG if error.status == 404 else logging.ERROR, + f'Failed to fetch code snippet from {match[0]!r}: {error.status} ' + f'{error_message} for GET {error.request_info.real_url.human_repr()}' + ) + + # Sorts the list of snippets by their match index and joins them into a single message + message_to_send = '\n'.join(map(lambda x: x[1], sorted(all_snippets))) + + if 0 < len(message_to_send) <= 2000 and message_to_send.count('\n') <= 15: + await message.edit(suppress=True) + if len(message_to_send) > 1000 and message.channel.id != Channels.bot_commands: + # Redirects to #bot-commands if the snippet contents are too long + await self.bot.wait_until_guild_available() + await message.channel.send(('The snippet you tried to send was too long. Please ' + f'see <#{Channels.bot_commands}> for the full snippet.')) + bot_commands_channel = self.bot.get_channel(Channels.bot_commands) + await wait_for_deletion( + await bot_commands_channel.send(message_to_send), + (message.author.id,) + ) + else: + await wait_for_deletion( + await message.channel.send(message_to_send), + (message.author.id,) + ) + + +def setup(bot: Bot) -> None: + """Load the CodeSnippets cog.""" + bot.add_cog(CodeSnippets(bot)) diff --git a/bot/exts/info/doc.py b/bot/exts/info/doc.py deleted file mode 100644 index 9b5bd6504..000000000 --- a/bot/exts/info/doc.py +++ /dev/null @@ -1,485 +0,0 @@ -import asyncio -import functools -import logging -import re -import textwrap -from contextlib import suppress -from types import SimpleNamespace -from typing import Optional, Tuple - -import discord -from bs4 import BeautifulSoup -from bs4.element import PageElement, Tag -from discord.errors import NotFound -from discord.ext import commands -from markdownify import MarkdownConverter -from requests import ConnectTimeout, ConnectionError, HTTPError -from sphinx.ext import intersphinx -from urllib3.exceptions import ProtocolError - -from bot.bot import Bot -from bot.constants import MODERATION_ROLES, RedirectOutput -from bot.converters import ValidPythonIdentifier, ValidURL -from bot.pagination import LinePaginator -from bot.utils.cache import AsyncCache -from bot.utils.messages import wait_for_deletion - - -log = logging.getLogger(__name__) -logging.getLogger('urllib3').setLevel(logging.WARNING) - -# Since Intersphinx is intended to be used with Sphinx, -# we need to mock its configuration. -SPHINX_MOCK_APP = SimpleNamespace( - config=SimpleNamespace( - intersphinx_timeout=3, - tls_verify=True, - user_agent="python3:python-discord/bot:1.0.0" - ) -) - -NO_OVERRIDE_GROUPS = ( - "2to3fixer", - "token", - "label", - "pdbcommand", - "term", -) -NO_OVERRIDE_PACKAGES = ( - "python", -) - -SEARCH_END_TAG_ATTRS = ( - "data", - "function", - "class", - "exception", - "seealso", - "section", - "rubric", - "sphinxsidebar", -) -UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") -WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") - -FAILED_REQUEST_RETRY_AMOUNT = 3 -NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay - -symbol_cache = AsyncCache() - - -class DocMarkdownConverter(MarkdownConverter): - """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" - - def convert_code(self, el: PageElement, text: str) -> str: - """Undo `markdownify`s underscore escaping.""" - return f"`{text}`".replace('\\', '') - - def convert_pre(self, el: PageElement, text: str) -> str: - """Wrap any codeblocks in `py` for syntax highlighting.""" - code = ''.join(el.strings) - return f"```py\n{code}```" - - -def markdownify(html: str) -> DocMarkdownConverter: - """Create a DocMarkdownConverter object from the input html.""" - return DocMarkdownConverter(bullets='•').convert(html) - - -class InventoryURL(commands.Converter): - """ - Represents an Intersphinx inventory URL. - - This converter checks whether intersphinx accepts the given inventory URL, and raises - `BadArgument` if that is not the case. - - Otherwise, it simply passes through the given URL. - """ - - @staticmethod - async def convert(ctx: commands.Context, url: str) -> str: - """Convert url to Intersphinx inventory URL.""" - try: - intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url) - except AttributeError: - raise commands.BadArgument(f"Failed to fetch Intersphinx inventory from URL `{url}`.") - except ConnectionError: - if url.startswith('https'): - raise commands.BadArgument( - f"Cannot establish a connection to `{url}`. Does it support HTTPS?" - ) - raise commands.BadArgument(f"Cannot connect to host with URL `{url}`.") - except ValueError: - raise commands.BadArgument( - f"Failed to read Intersphinx inventory from URL `{url}`. " - "Are you sure that it's a valid inventory file?" - ) - return url - - -class Doc(commands.Cog): - """A set of commands for querying & displaying documentation.""" - - def __init__(self, bot: Bot): - self.base_urls = {} - self.bot = bot - self.inventories = {} - self.renamed_symbols = set() - - self.bot.loop.create_task(self.init_refresh_inventory()) - - async def init_refresh_inventory(self) -> None: - """Refresh documentation inventory on cog initialization.""" - await self.bot.wait_until_guild_available() - await self.refresh_inventory() - - async def update_single( - self, package_name: str, base_url: str, inventory_url: str - ) -> None: - """ - Rebuild the inventory for a single package. - - Where: - * `package_name` is the package name to use, appears in the log - * `base_url` is the root documentation URL for the specified package, used to build - absolute paths that link to specific symbols - * `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running - `intersphinx.fetch_inventory` in an executor on the bot's event loop - """ - self.base_urls[package_name] = base_url - - package = await self._fetch_inventory(inventory_url) - if not package: - return None - - for group, value in package.items(): - for symbol, (package_name, _version, relative_doc_url, _) in value.items(): - absolute_doc_url = base_url + relative_doc_url - - if symbol in self.inventories: - group_name = group.split(":")[1] - symbol_base_url = self.inventories[symbol].split("/", 3)[2] - if ( - group_name in NO_OVERRIDE_GROUPS - or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES) - ): - - symbol = f"{group_name}.{symbol}" - # If renamed `symbol` already exists, add library name in front to differentiate between them. - if symbol in self.renamed_symbols: - # Split `package_name` because of packages like Pillow that have spaces in them. - symbol = f"{package_name.split()[0]}.{symbol}" - - self.inventories[symbol] = absolute_doc_url - self.renamed_symbols.add(symbol) - continue - - self.inventories[symbol] = absolute_doc_url - - log.trace(f"Fetched inventory for {package_name}.") - - async def refresh_inventory(self) -> None: - """Refresh internal documentation inventory.""" - log.debug("Refreshing documentation inventory...") - - # Clear the old base URLS and inventories to ensure - # that we start from a fresh local dataset. - # Also, reset the cache used for fetching documentation. - self.base_urls.clear() - self.inventories.clear() - self.renamed_symbols.clear() - symbol_cache.clear() - - # Run all coroutines concurrently - since each of them performs a HTTP - # request, this speeds up fetching the inventory data heavily. - coros = [ - self.update_single( - package["package"], package["base_url"], package["inventory_url"] - ) for package in await self.bot.api_client.get('bot/documentation-links') - ] - await asyncio.gather(*coros) - - async def get_symbol_html(self, symbol: str) -> Optional[Tuple[list, str]]: - """ - Given a Python symbol, return its signature and description. - - The first tuple element is the signature of the given symbol as a markup-free string, and - the second tuple element is the description of the given symbol with HTML markup included. - - If the given symbol is a module, returns a tuple `(None, str)` - else if the symbol could not be found, returns `None`. - """ - url = self.inventories.get(symbol) - if url is None: - return None - - async with self.bot.http_session.get(url) as response: - html = await response.text(encoding='utf-8') - - # Find the signature header and parse the relevant parts. - symbol_id = url.split('#')[-1] - soup = BeautifulSoup(html, 'lxml') - symbol_heading = soup.find(id=symbol_id) - search_html = str(soup) - - if symbol_heading is None: - return None - - if symbol_id == f"module-{symbol}": - # Get page content from the module headerlink to the - # first tag that has its class in `SEARCH_END_TAG_ATTRS` - start_tag = symbol_heading.find("a", attrs={"class": "headerlink"}) - if start_tag is None: - return [], "" - - end_tag = start_tag.find_next(self._match_end_tag) - if end_tag is None: - return [], "" - - description_start_index = search_html.find(str(start_tag.parent)) + len(str(start_tag.parent)) - description_end_index = search_html.find(str(end_tag)) - description = search_html[description_start_index:description_end_index] - signatures = None - - else: - signatures = [] - description = str(symbol_heading.find_next_sibling("dd")) - description_pos = search_html.find(description) - # Get text of up to 3 signatures, remove unwanted symbols - for element in [symbol_heading] + symbol_heading.find_next_siblings("dt", limit=2): - signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) - if signature and search_html.find(str(element)) < description_pos: - signatures.append(signature) - - return signatures, description.replace('¶', '') - - @symbol_cache(arg_offset=1) - async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]: - """ - Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents. - - If the symbol is known, an Embed with documentation about it is returned. - """ - scraped_html = await self.get_symbol_html(symbol) - if scraped_html is None: - return None - - signatures = scraped_html[0] - permalink = self.inventories[symbol] - description = markdownify(scraped_html[1]) - - # Truncate the description of the embed to the last occurrence - # of a double newline (interpreted as a paragraph) before index 1000. - if len(description) > 1000: - shortened = description[:1000] - description_cutoff = shortened.rfind('\n\n', 100) - if description_cutoff == -1: - # Search the shortened version for cutoff points in decreasing desirability, - # cutoff at 1000 if none are found. - for string in (". ", ", ", ",", " "): - description_cutoff = shortened.rfind(string) - if description_cutoff != -1: - break - else: - description_cutoff = 1000 - description = description[:description_cutoff] - - # If there is an incomplete code block, cut it out - if description.count("```") % 2: - codeblock_start = description.rfind('```py') - description = description[:codeblock_start].rstrip() - description += f"... [read more]({permalink})" - - description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description) - if signatures is None: - # If symbol is a module, don't show signature. - embed_description = description - - elif not signatures: - # It's some "meta-page", for example: - # https://docs.djangoproject.com/en/dev/ref/views/#module-django.views - embed_description = "This appears to be a generic page not tied to a specific symbol." - - else: - embed_description = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures) - embed_description += f"\n{description}" - - embed = discord.Embed( - title=f'`{symbol}`', - url=permalink, - description=embed_description - ) - # Show all symbols with the same name that were renamed in the footer. - embed.set_footer( - text=", ".join(renamed for renamed in self.renamed_symbols - {symbol} if renamed.endswith(f".{symbol}")) - ) - return embed - - @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True) - async def docs_group(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: - """Lookup documentation for Python symbols.""" - await self.get_command(ctx, symbol) - - @docs_group.command(name='get', aliases=('g',)) - async def get_command(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None: - """ - Return a documentation embed for a given symbol. - - If no symbol is given, return a list of all available inventories. - - Examples: - !docs - !docs aiohttp - !docs aiohttp.ClientSession - !docs get aiohttp.ClientSession - """ - if symbol is None: - inventory_embed = discord.Embed( - title=f"All inventories (`{len(self.base_urls)}` total)", - colour=discord.Colour.blue() - ) - - lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items()) - if self.base_urls: - await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False) - - else: - inventory_embed.description = "Hmmm, seems like there's nothing here yet." - await ctx.send(embed=inventory_embed) - - else: - # Fetching documentation for a symbol (at least for the first time, since - # caching is used) takes quite some time, so let's send typing to indicate - # that we got the command, but are still working on it. - async with ctx.typing(): - doc_embed = await self.get_symbol_embed(symbol) - - if doc_embed is None: - error_embed = discord.Embed( - description=f"Sorry, I could not find any documentation for `{symbol}`.", - colour=discord.Colour.red() - ) - error_message = await ctx.send(embed=error_embed) - with suppress(NotFound): - await error_message.delete(delay=NOT_FOUND_DELETE_DELAY) - await ctx.message.delete(delay=NOT_FOUND_DELETE_DELAY) - else: - msg = await ctx.send(embed=doc_embed) - await wait_for_deletion(msg, (ctx.author.id,)) - - @docs_group.command(name='set', aliases=('s',)) - @commands.has_any_role(*MODERATION_ROLES) - async def set_command( - self, ctx: commands.Context, package_name: ValidPythonIdentifier, - base_url: ValidURL, inventory_url: InventoryURL - ) -> None: - """ - Adds a new documentation metadata object to the site's database. - - The database will update the object, should an existing item with the specified `package_name` already exist. - - Example: - !docs set \ - python \ - https://docs.python.org/3/ \ - https://docs.python.org/3/objects.inv - """ - body = { - 'package': package_name, - 'base_url': base_url, - 'inventory_url': inventory_url - } - await self.bot.api_client.post('bot/documentation-links', json=body) - - log.info( - f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" - f"Package name: {package_name}\n" - f"Base url: {base_url}\n" - f"Inventory URL: {inventory_url}" - ) - - # Rebuilding the inventory can take some time, so lets send out a - # typing event to show that the Bot is still working. - async with ctx.typing(): - await self.refresh_inventory() - await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.") - - @docs_group.command(name='delete', aliases=('remove', 'rm', 'd')) - @commands.has_any_role(*MODERATION_ROLES) - async def delete_command(self, ctx: commands.Context, package_name: ValidPythonIdentifier) -> None: - """ - Removes the specified package from the database. - - Examples: - !docs delete aiohttp - """ - await self.bot.api_client.delete(f'bot/documentation-links/{package_name}') - - async with ctx.typing(): - # Rebuild the inventory to ensure that everything - # that was from this package is properly deleted. - await self.refresh_inventory() - await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.") - - @docs_group.command(name="refresh", aliases=("rfsh", "r")) - @commands.has_any_role(*MODERATION_ROLES) - async def refresh_command(self, ctx: commands.Context) -> None: - """Refresh inventories and send differences to channel.""" - old_inventories = set(self.base_urls) - with ctx.typing(): - await self.refresh_inventory() - # Get differences of added and removed inventories - added = ', '.join(inv for inv in self.base_urls if inv not in old_inventories) - if added: - added = f"+ {added}" - - removed = ', '.join(inv for inv in old_inventories if inv not in self.base_urls) - if removed: - removed = f"- {removed}" - - embed = discord.Embed( - title="Inventories refreshed", - description=f"```diff\n{added}\n{removed}```" if added or removed else "" - ) - await ctx.send(embed=embed) - - async def _fetch_inventory(self, inventory_url: str) -> Optional[dict]: - """Get and return inventory from `inventory_url`. If fetching fails, return None.""" - fetch_func = functools.partial(intersphinx.fetch_inventory, SPHINX_MOCK_APP, '', inventory_url) - for retry in range(1, FAILED_REQUEST_RETRY_AMOUNT+1): - try: - package = await self.bot.loop.run_in_executor(None, fetch_func) - except ConnectTimeout: - log.error( - f"Fetching of inventory {inventory_url} timed out," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except ProtocolError: - log.error( - f"Connection lost while fetching inventory {inventory_url}," - f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})" - ) - except HTTPError as e: - log.error(f"Fetching of inventory {inventory_url} failed with status code {e.response.status_code}.") - return None - except ConnectionError: - log.error(f"Couldn't establish connection to inventory {inventory_url}.") - return None - else: - return package - log.error(f"Fetching of inventory {inventory_url} failed.") - return None - - @staticmethod - def _match_end_tag(tag: Tag) -> bool: - """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table.""" - for attr in SEARCH_END_TAG_ATTRS: - if attr in tag.get("class", ()): - return True - - return tag.name == "table" - - -def setup(bot: Bot) -> None: - """Load the Doc cog.""" - bot.add_cog(Doc(bot)) diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py new file mode 100644 index 000000000..38a8975c0 --- /dev/null +++ b/bot/exts/info/doc/__init__.py @@ -0,0 +1,16 @@ +from bot.bot import Bot +from ._redis_cache import DocRedisCache + +MAX_SIGNATURE_AMOUNT = 3 +PRIORITY_PACKAGES = ( + "python", +) +NAMESPACE = "doc" + +doc_cache = DocRedisCache(namespace=NAMESPACE) + + +def setup(bot: Bot) -> None: + """Load the Doc cog.""" + from ._cog import DocCog + bot.add_cog(DocCog(bot)) diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py new file mode 100644 index 000000000..369bb462c --- /dev/null +++ b/bot/exts/info/doc/_batch_parser.py @@ -0,0 +1,186 @@ +from __future__ import annotations + +import asyncio +import collections +import logging +from collections import defaultdict +from contextlib import suppress +from operator import attrgetter +from typing import Deque, Dict, List, NamedTuple, Optional, Union + +import discord +from bs4 import BeautifulSoup + +import bot +from bot.constants import Channels +from bot.utils import scheduling +from . import _cog, doc_cache +from ._parsing import get_symbol_markdown + +log = logging.getLogger(__name__) + + +class StaleInventoryNotifier: + """Handle sending notifications about stale inventories through `DocItem`s to dev log.""" + + def __init__(self): + self._init_task = bot.instance.loop.create_task( + self._init_channel(), + name="StaleInventoryNotifier channel init" + ) + self._warned_urls = set() + + async def _init_channel(self) -> None: + """Wait for guild and get channel.""" + await bot.instance.wait_until_guild_available() + self._dev_log = bot.instance.get_channel(Channels.dev_log) + + async def send_warning(self, doc_item: _cog.DocItem) -> None: + """Send a warning to dev log if one wasn't already sent for `item`'s url.""" + if doc_item.url not in self._warned_urls: + self._warned_urls.add(doc_item.url) + await self._init_task + embed = discord.Embed( + description=f"Doc item `{doc_item.symbol_id=}` present in loaded documentation inventories " + f"not found on [site]({doc_item.url}), inventories may need to be refreshed." + ) + await self._dev_log.send(embed=embed) + + +class QueueItem(NamedTuple): + """Contains a `DocItem` and the `BeautifulSoup` object needed to parse it.""" + + doc_item: _cog.DocItem + soup: BeautifulSoup + + def __eq__(self, other: Union[QueueItem, _cog.DocItem]): + if isinstance(other, _cog.DocItem): + return self.doc_item == other + return NamedTuple.__eq__(self, other) + + +class ParseResultFuture(asyncio.Future): + """ + Future with metadata for the parser class. + + `user_requested` is set by the parser when a Future is requested by an user and moved to the front, + allowing the futures to only be waited for when clearing if they were user requested. + """ + + def __init__(self): + super().__init__() + self.user_requested = False + + +class BatchParser: + """ + Get the Markdown of all symbols on a page and send them to redis when a symbol is requested. + + DocItems are added through the `add_item` method which adds them to the `_page_doc_items` dict. + `get_markdown` is used to fetch the Markdown; when this is used for the first time on a page, + all of the symbols are queued to be parsed to avoid multiple web requests to the same page. + """ + + def __init__(self): + self._queue: Deque[QueueItem] = collections.deque() + self._page_doc_items: Dict[str, List[_cog.DocItem]] = defaultdict(list) + self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = defaultdict(ParseResultFuture) + self._parse_task = None + + self.stale_inventory_notifier = StaleInventoryNotifier() + + async def get_markdown(self, doc_item: _cog.DocItem) -> Optional[str]: + """ + Get the result Markdown of `doc_item`. + + If no symbols were fetched from `doc_item`s page before, + the HTML has to be fetched and then all items from the page are put into the parse queue. + + Not safe to run while `self.clear` is running. + """ + if doc_item not in self._item_futures and doc_item not in self._queue: + self._item_futures[doc_item].user_requested = True + + async with bot.instance.http_session.get(doc_item.url) as response: + soup = await bot.instance.loop.run_in_executor( + None, + BeautifulSoup, + await response.text(encoding="utf8"), + "lxml", + ) + + self._queue.extendleft(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url]) + log.debug(f"Added items from {doc_item.url} to the parse queue.") + + if self._parse_task is None: + self._parse_task = scheduling.create_task(self._parse_queue(), name="Queue parse") + else: + self._item_futures[doc_item].user_requested = True + with suppress(ValueError): + # If the item is not in the queue then the item is already parsed or is being parsed + self._move_to_front(doc_item) + return await self._item_futures[doc_item] + + async def _parse_queue(self) -> None: + """ + Parse all items from the queue, setting their result Markdown on the futures and sending them to redis. + + The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished. + """ + log.trace("Starting queue parsing.") + try: + while self._queue: + item, soup = self._queue.pop() + markdown = None + + if (future := self._item_futures[item]).done(): + # Some items are present in the inventories multiple times under different symbol names, + # if we already parsed an equal item, we can just skip it. + continue + + try: + markdown = await bot.instance.loop.run_in_executor(None, get_symbol_markdown, soup, item) + if markdown is not None: + await doc_cache.set(item, markdown) + else: + # Don't wait for this coro as the parsing doesn't depend on anything it does. + scheduling.create_task( + self.stale_inventory_notifier.send_warning(item), name="Stale inventory warning" + ) + except Exception: + log.exception(f"Unexpected error when handling {item}") + future.set_result(markdown) + del self._item_futures[item] + await asyncio.sleep(0.1) + finally: + self._parse_task = None + log.trace("Finished parsing queue.") + + def _move_to_front(self, item: Union[QueueItem, _cog.DocItem]) -> None: + """Move `item` to the front of the parse queue.""" + # The parse queue stores soups along with the doc symbols in QueueItem objects, + # in case we're moving a DocItem we have to get the associated QueueItem first and then move it. + item_index = self._queue.index(item) + queue_item = self._queue[item_index] + del self._queue[item_index] + + self._queue.append(queue_item) + log.trace(f"Moved {item} to the front of the queue.") + + def add_item(self, doc_item: _cog.DocItem) -> None: + """Map a DocItem to its page so that the symbol will be parsed once the page is requested.""" + self._page_doc_items[doc_item.url].append(doc_item) + + async def clear(self) -> None: + """ + Clear all internal symbol data. + + Wait for all user-requested symbols to be parsed before clearing the parser. + """ + for future in filter(attrgetter("user_requested"), self._item_futures.values()): + await future + if self._parse_task is not None: + self._parse_task.cancel() + self._queue.clear() + self._page_doc_items.clear() + self._item_futures.clear() diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py new file mode 100644 index 000000000..c54a3ee1c --- /dev/null +++ b/bot/exts/info/doc/_cog.py @@ -0,0 +1,447 @@ +from __future__ import annotations + +import asyncio +import logging +import sys +import textwrap +from collections import defaultdict +from contextlib import suppress +from types import SimpleNamespace +from typing import Dict, NamedTuple, Optional, Tuple, Union + +import aiohttp +import discord +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import MODERATION_ROLES, RedirectOutput +from bot.converters import Inventory, PackageName, ValidURL, allowed_strings +from bot.pagination import LinePaginator +from bot.utils.lock import SharedEvent, lock +from bot.utils.messages import send_denial, wait_for_deletion +from bot.utils.scheduling import Scheduler +from . import NAMESPACE, PRIORITY_PACKAGES, _batch_parser, doc_cache +from ._inventory_parser import InventoryDict, fetch_inventory + +log = logging.getLogger(__name__) + +# symbols with a group contained here will get the group prefixed on duplicates +FORCE_PREFIX_GROUPS = ( + "term", + "label", + "token", + "doc", + "pdbcommand", + "2to3fixer", +) +NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay +# Delay to wait before trying to reach a rescheduled inventory again, in minutes +FETCH_RESCHEDULE_DELAY = SimpleNamespace(first=2, repeated=5) + +COMMAND_LOCK_SINGLETON = "inventory refresh" + + +class DocItem(NamedTuple): + """Holds inventory symbol information.""" + + package: str # Name of the package name the symbol is from + group: str # Interpshinx "role" of the symbol, for example `label` or `method` + base_url: str # Absolute path to to which the relative path resolves, same for all items with the same package + relative_url_path: str # Relative path to the page where the symbol is located + symbol_id: str # Fragment id used to locate the symbol on the page + + @property + def url(self) -> str: + """Return the absolute url to the symbol.""" + return self.base_url + self.relative_url_path + + +class DocCog(commands.Cog): + """A set of commands for querying & displaying documentation.""" + + def __init__(self, bot: Bot): + # Contains URLs to documentation home pages. + # Used to calculate inventory diffs on refreshes and to display all currently stored inventories. + self.base_urls = {} + self.bot = bot + self.doc_symbols: Dict[str, DocItem] = {} # Maps symbol names to objects containing their metadata. + self.item_fetcher = _batch_parser.BatchParser() + # Maps a conflicting symbol name to a list of the new, disambiguated names created from conflicts with the name. + self.renamed_symbols = defaultdict(list) + + self.inventory_scheduler = Scheduler(self.__class__.__name__) + + self.refresh_event = asyncio.Event() + self.refresh_event.set() + self.symbol_get_event = SharedEvent() + + self.init_refresh_task = self.bot.loop.create_task( + self.init_refresh_inventory(), + name="Doc inventory init" + ) + + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) + async def init_refresh_inventory(self) -> None: + """Refresh documentation inventory on cog initialization.""" + await self.bot.wait_until_guild_available() + await self.refresh_inventories() + + def update_single(self, package_name: str, base_url: str, inventory: InventoryDict) -> None: + """ + Build the inventory for a single package. + + Where: + * `package_name` is the package name to use in logs and when qualifying symbols + * `base_url` is the root documentation URL for the specified package, used to build + absolute paths that link to specific symbols + * `package` is the content of a intersphinx inventory. + """ + self.base_urls[package_name] = base_url + + for group, items in inventory.items(): + for symbol_name, relative_doc_url in items: + + # e.g. get 'class' from 'py:class' + group_name = group.split(":")[1] + symbol_name = self.ensure_unique_symbol_name( + package_name, + group_name, + symbol_name, + ) + + relative_url_path, _, symbol_id = relative_doc_url.partition("#") + # Intern fields that have shared content so we're not storing unique strings for every object + doc_item = DocItem( + package_name, + sys.intern(group_name), + base_url, + sys.intern(relative_url_path), + symbol_id, + ) + self.doc_symbols[symbol_name] = doc_item + self.item_fetcher.add_item(doc_item) + + log.trace(f"Fetched inventory for {package_name}.") + + async def update_or_reschedule_inventory( + self, + api_package_name: str, + base_url: str, + inventory_url: str, + ) -> None: + """ + Update the cog's inventories, or reschedule this method to execute again if the remote inventory is unreachable. + + The first attempt is rescheduled to execute in `FETCH_RESCHEDULE_DELAY.first` minutes, the subsequent attempts + in `FETCH_RESCHEDULE_DELAY.repeated` minutes. + """ + package = await fetch_inventory(inventory_url) + + if not package: + if api_package_name in self.inventory_scheduler: + self.inventory_scheduler.cancel(api_package_name) + delay = FETCH_RESCHEDULE_DELAY.repeated + else: + delay = FETCH_RESCHEDULE_DELAY.first + log.info(f"Failed to fetch inventory; attempting again in {delay} minutes.") + self.inventory_scheduler.schedule_later( + delay*60, + api_package_name, + self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url), + ) + else: + self.update_single(api_package_name, base_url, package) + + def ensure_unique_symbol_name(self, package_name: str, group_name: str, symbol_name: str) -> str: + """ + Ensure `symbol_name` doesn't overwrite an another symbol in `doc_symbols`. + + For conflicts, rename either the current symbol or the existing symbol with which it conflicts. + Store the new name in `renamed_symbols` and return the name to use for the symbol. + + If the existing symbol was renamed or there was no conflict, the returned name is equivalent to `symbol_name`. + """ + if (item := self.doc_symbols.get(symbol_name)) is None: + return symbol_name # There's no conflict so it's fine to simply use the given symbol name. + + def rename(prefix: str, *, rename_extant: bool = False) -> str: + new_name = f"{prefix}.{symbol_name}" + if new_name in self.doc_symbols: + # If there's still a conflict, qualify the name further. + if rename_extant: + new_name = f"{item.package}.{item.group}.{symbol_name}" + else: + new_name = f"{package_name}.{group_name}.{symbol_name}" + + self.renamed_symbols[symbol_name].append(new_name) + + if rename_extant: + # Instead of renaming the current symbol, rename the symbol with which it conflicts. + self.doc_symbols[new_name] = self.doc_symbols[symbol_name] + return symbol_name + else: + return new_name + + # When there's a conflict, and the package names of the items differ, use the package name as a prefix. + if package_name != item.package: + if package_name in PRIORITY_PACKAGES: + return rename(item.package, rename_extant=True) + else: + return rename(package_name) + + # If the symbol's group is a non-priority group from FORCE_PREFIX_GROUPS, + # add it as a prefix to disambiguate the symbols. + elif group_name in FORCE_PREFIX_GROUPS: + if item.group in FORCE_PREFIX_GROUPS: + needs_moving = FORCE_PREFIX_GROUPS.index(group_name) < FORCE_PREFIX_GROUPS.index(item.group) + else: + needs_moving = False + return rename(item.group if needs_moving else group_name, rename_extant=needs_moving) + + # If the above conditions didn't pass, either the existing symbol has its group in FORCE_PREFIX_GROUPS, + # or deciding which item to rename would be arbitrary, so we rename the existing symbol. + else: + return rename(item.group, rename_extant=True) + + async def refresh_inventories(self) -> None: + """Refresh internal documentation inventories.""" + self.refresh_event.clear() + await self.symbol_get_event.wait() + log.debug("Refreshing documentation inventory...") + self.inventory_scheduler.cancel_all() + + self.base_urls.clear() + self.doc_symbols.clear() + self.renamed_symbols.clear() + await self.item_fetcher.clear() + + coros = [ + self.update_or_reschedule_inventory( + package["package"], package["base_url"], package["inventory_url"] + ) for package in await self.bot.api_client.get("bot/documentation-links") + ] + await asyncio.gather(*coros) + log.debug("Finished inventory refresh.") + self.refresh_event.set() + + def get_symbol_item(self, symbol_name: str) -> Tuple[str, Optional[DocItem]]: + """ + Get the `DocItem` and the symbol name used to fetch it from the `doc_symbols` dict. + + If the doc item is not found directly from the passed in name and the name contains a space, + the first word of the name will be attempted to be used to get the item. + """ + doc_item = self.doc_symbols.get(symbol_name) + if doc_item is None and " " in symbol_name: + symbol_name = symbol_name.split(" ", maxsplit=1)[0] + doc_item = self.doc_symbols.get(symbol_name) + + return symbol_name, doc_item + + async def get_symbol_markdown(self, doc_item: DocItem) -> str: + """ + Get the Markdown from the symbol `doc_item` refers to. + + First a redis lookup is attempted, if that fails the `item_fetcher` + is used to fetch the page and parse the HTML from it into Markdown. + """ + markdown = await doc_cache.get(doc_item) + + if markdown is None: + log.debug(f"Redis cache miss with {doc_item}.") + try: + markdown = await self.item_fetcher.get_markdown(doc_item) + + except aiohttp.ClientError as e: + log.warning(f"A network error has occurred when requesting parsing of {doc_item}.", exc_info=e) + return "Unable to parse the requested symbol due to a network error." + + except Exception: + log.exception(f"An unexpected error has occurred when requesting parsing of {doc_item}.") + return "Unable to parse the requested symbol due to an error." + + if markdown is None: + return "Unable to parse the requested symbol." + return markdown + + async def create_symbol_embed(self, symbol_name: str) -> Optional[discord.Embed]: + """ + Attempt to scrape and fetch the data for the given `symbol_name`, and build an embed from its contents. + + If the symbol is known, an Embed with documentation about it is returned. + + First check the DocRedisCache before querying the cog's `BatchParser`. + """ + log.trace(f"Building embed for symbol `{symbol_name}`") + if not self.refresh_event.is_set(): + log.debug("Waiting for inventories to be refreshed before processing item.") + await self.refresh_event.wait() + # Ensure a refresh can't run in case of a context switch until the with block is exited + with self.symbol_get_event: + symbol_name, doc_item = self.get_symbol_item(symbol_name) + if doc_item is None: + log.debug("Symbol does not exist.") + return None + + self.bot.stats.incr(f"doc_fetches.{doc_item.package}") + + # Show all symbols with the same name that were renamed in the footer, + # with a max of 200 chars. + if symbol_name in self.renamed_symbols: + renamed_symbols = ", ".join(self.renamed_symbols[symbol_name]) + footer_text = textwrap.shorten("Similar names: " + renamed_symbols, 200, placeholder=" ...") + else: + footer_text = "" + + embed = discord.Embed( + title=discord.utils.escape_markdown(symbol_name), + url=f"{doc_item.url}#{doc_item.symbol_id}", + description=await self.get_symbol_markdown(doc_item) + ) + embed.set_footer(text=footer_text) + return embed + + @commands.group(name="docs", aliases=("doc", "d"), invoke_without_command=True) + async def docs_group(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None: + """Look up documentation for Python symbols.""" + await self.get_command(ctx, symbol_name=symbol_name) + + @docs_group.command(name="getdoc", aliases=("g",)) + async def get_command(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None: + """ + Return a documentation embed for a given symbol. + + If no symbol is given, return a list of all available inventories. + + Examples: + !docs + !docs aiohttp + !docs aiohttp.ClientSession + !docs getdoc aiohttp.ClientSession + """ + if not symbol_name: + inventory_embed = discord.Embed( + title=f"All inventories (`{len(self.base_urls)}` total)", + colour=discord.Colour.blue() + ) + + lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items()) + if self.base_urls: + await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False) + + else: + inventory_embed.description = "Hmmm, seems like there's nothing here yet." + await ctx.send(embed=inventory_embed) + + else: + symbol = symbol_name.strip("`") + async with ctx.typing(): + doc_embed = await self.create_symbol_embed(symbol) + + if doc_embed is None: + error_message = await send_denial(ctx, "No documentation found for the requested symbol.") + await wait_for_deletion(error_message, (ctx.author.id,), timeout=NOT_FOUND_DELETE_DELAY) + with suppress(discord.NotFound): + await ctx.message.delete() + with suppress(discord.NotFound): + await error_message.delete() + else: + msg = await ctx.send(embed=doc_embed) + await wait_for_deletion(msg, (ctx.author.id,)) + + @docs_group.command(name="setdoc", aliases=("s",)) + @commands.has_any_role(*MODERATION_ROLES) + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) + async def set_command( + self, + ctx: commands.Context, + package_name: PackageName, + base_url: ValidURL, + inventory: Inventory, + ) -> None: + """ + Adds a new documentation metadata object to the site's database. + + The database will update the object, should an existing item with the specified `package_name` already exist. + + Example: + !docs setdoc \ + python \ + https://docs.python.org/3/ \ + https://docs.python.org/3/objects.inv + """ + if not base_url.endswith("/"): + raise commands.BadArgument("The base url must end with a slash.") + inventory_url, inventory_dict = inventory + body = { + "package": package_name, + "base_url": base_url, + "inventory_url": inventory_url + } + await self.bot.api_client.post("bot/documentation-links", json=body) + + log.info( + f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n" + + "\n".join(f"{key}: {value}" for key, value in body.items()) + ) + + self.update_single(package_name, base_url, inventory_dict) + await ctx.send(f"Added the package `{package_name}` to the database and updated the inventories.") + + @docs_group.command(name="deletedoc", aliases=("removedoc", "rm", "d")) + @commands.has_any_role(*MODERATION_ROLES) + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) + async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None: + """ + Removes the specified package from the database. + + Example: + !docs deletedoc aiohttp + """ + await self.bot.api_client.delete(f"bot/documentation-links/{package_name}") + + async with ctx.typing(): + await self.refresh_inventories() + await doc_cache.delete(package_name) + await ctx.send(f"Successfully deleted `{package_name}` and refreshed the inventories.") + + @docs_group.command(name="refreshdoc", aliases=("rfsh", "r")) + @commands.has_any_role(*MODERATION_ROLES) + @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True) + async def refresh_command(self, ctx: commands.Context) -> None: + """Refresh inventories and show the difference.""" + old_inventories = set(self.base_urls) + with ctx.typing(): + await self.refresh_inventories() + new_inventories = set(self.base_urls) + + if added := ", ".join(new_inventories - old_inventories): + added = "+ " + added + + if removed := ", ".join(old_inventories - new_inventories): + removed = "- " + removed + + embed = discord.Embed( + title="Inventories refreshed", + description=f"```diff\n{added}\n{removed}```" if added or removed else "" + ) + await ctx.send(embed=embed) + + @docs_group.command(name="cleardoccache", aliases=("deletedoccache",)) + @commands.has_any_role(*MODERATION_ROLES) + async def clear_cache_command( + self, + ctx: commands.Context, + package_name: Union[PackageName, allowed_strings("*")] # noqa: F722 + ) -> None: + """Clear the persistent redis cache for `package`.""" + if await doc_cache.delete(package_name): + await ctx.send(f"Successfully cleared the cache for `{package_name}`.") + else: + await ctx.send("No keys matching the package found.") + + def cog_unload(self) -> None: + """Clear scheduled inventories, queued symbols and cleanup task on cog unload.""" + self.inventory_scheduler.cancel_all() + self.init_refresh_task.cancel() + asyncio.create_task(self.item_fetcher.clear(), name="DocCog.item_fetcher unload clear") diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py new file mode 100644 index 000000000..94efd81b7 --- /dev/null +++ b/bot/exts/info/doc/_html.py @@ -0,0 +1,136 @@ +import logging +import re +from functools import partial +from typing import Callable, Container, Iterable, List, Union + +from bs4 import BeautifulSoup +from bs4.element import NavigableString, PageElement, SoupStrainer, Tag + +from . import MAX_SIGNATURE_AMOUNT + +log = logging.getLogger(__name__) + +_UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶") +_SEARCH_END_TAG_ATTRS = ( + "data", + "function", + "class", + "exception", + "seealso", + "section", + "rubric", + "sphinxsidebar", +) + + +class Strainer(SoupStrainer): + """Subclass of SoupStrainer to allow matching of both `Tag`s and `NavigableString`s.""" + + def __init__(self, *, include_strings: bool, **kwargs): + self.include_strings = include_strings + passed_text = kwargs.pop("text", None) + if passed_text is not None: + log.warning("`text` is not a supported kwarg in the custom strainer.") + super().__init__(**kwargs) + + Markup = Union[PageElement, List["Markup"]] + + def search(self, markup: Markup) -> Union[PageElement, str]: + """Extend default SoupStrainer behaviour to allow matching both `Tag`s` and `NavigableString`s.""" + if isinstance(markup, str): + # Let everything through the text filter if we're including strings and tags. + if not self.name and not self.attrs and self.include_strings: + return markup + else: + return super().search(markup) + + +def _find_elements_until_tag( + start_element: PageElement, + end_tag_filter: Union[Container[str], Callable[[Tag], bool]], + *, + func: Callable, + include_strings: bool = False, + limit: int = None, +) -> List[Union[Tag, NavigableString]]: + """ + Get all elements up to `limit` or until a tag matching `end_tag_filter` is found. + + `end_tag_filter` can be either a container of string names to check against, + or a filtering callable that's applied to tags. + + When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s. + + `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`. + The method is then iterated over and all elements until the matching tag or the limit are added to the return list. + """ + use_container_filter = not callable(end_tag_filter) + elements = [] + + for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit): + if isinstance(element, Tag): + if use_container_filter: + if element.name in end_tag_filter: + break + elif end_tag_filter(element): + break + elements.append(element) + + return elements + + +_find_next_children_until_tag = partial(_find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False)) +_find_recursive_children_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_all) +_find_next_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_next_siblings) +_find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings) + + +def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]: + """Create callable that returns True when the passed in tag's class is in `class_names` or when it's a table.""" + def match_tag(tag: Tag) -> bool: + for attr in class_names: + if attr in tag.get("class", ()): + return True + return tag.name == "table" + + return match_tag + + +def get_general_description(start_element: Tag) -> List[Union[Tag, NavigableString]]: + """ + Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`. + + A headerlink tag is attempted to be found to skip repeating the symbol information in the description. + If it's found it's used as the tag to start the search from instead of the `start_element`. + """ + child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100) + header = next(filter(_class_filter_factory(["headerlink"]), child_tags), None) + start_tag = header.parent if header is not None else start_element + return _find_next_siblings_until_tag(start_tag, _class_filter_factory(_SEARCH_END_TAG_ATTRS), include_strings=True) + + +def get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString]]: + """Get the contents of the next dd tag, up to a dt or a dl tag.""" + description_tag = symbol.find_next("dd") + return _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True) + + +def get_signatures(start_signature: PageElement) -> List[str]: + """ + Collect up to `_MAX_SIGNATURE_AMOUNT` signatures from dt tags around the `start_signature` dt tag. + + First the signatures under the `start_signature` are included; + if less than 2 are found, tags above the start signature are added to the result if any are present. + """ + signatures = [] + for element in ( + *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)), + start_signature, + *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2), + )[-MAX_SIGNATURE_AMOUNT:]: + signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text) + + if signature: + signatures.append(signature) + + return signatures diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py new file mode 100644 index 000000000..80d5841a0 --- /dev/null +++ b/bot/exts/info/doc/_inventory_parser.py @@ -0,0 +1,126 @@ +import logging +import re +import zlib +from collections import defaultdict +from typing import AsyncIterator, DefaultDict, List, Optional, Tuple + +import aiohttp + +import bot + +log = logging.getLogger(__name__) + +FAILED_REQUEST_ATTEMPTS = 3 +_V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)') + +InventoryDict = DefaultDict[str, List[Tuple[str, str]]] + + +class ZlibStreamReader: + """Class used for decoding zlib data of a stream line by line.""" + + READ_CHUNK_SIZE = 16 * 1024 + + def __init__(self, stream: aiohttp.StreamReader) -> None: + self.stream = stream + + async def _read_compressed_chunks(self) -> AsyncIterator[bytes]: + """Read zlib data in `READ_CHUNK_SIZE` sized chunks and decompress.""" + decompressor = zlib.decompressobj() + async for chunk in self.stream.iter_chunked(self.READ_CHUNK_SIZE): + yield decompressor.decompress(chunk) + + yield decompressor.flush() + + async def __aiter__(self) -> AsyncIterator[str]: + """Yield lines of decompressed text.""" + buf = b'' + async for chunk in self._read_compressed_chunks(): + buf += chunk + pos = buf.find(b'\n') + while pos != -1: + yield buf[:pos].decode() + buf = buf[pos + 1:] + pos = buf.find(b'\n') + + +async def _load_v1(stream: aiohttp.StreamReader) -> InventoryDict: + invdata = defaultdict(list) + + async for line in stream: + name, type_, location = line.decode().rstrip().split(maxsplit=2) + # version 1 did not add anchors to the location + if type_ == "mod": + type_ = "py:module" + location += "#module-" + name + else: + type_ = "py:" + type_ + location += "#" + name + invdata[type_].append((name, location)) + return invdata + + +async def _load_v2(stream: aiohttp.StreamReader) -> InventoryDict: + invdata = defaultdict(list) + + async for line in ZlibStreamReader(stream): + m = _V2_LINE_RE.match(line.rstrip()) + name, type_, _prio, location, _dispname = m.groups() # ignore the parsed items we don't need + if location.endswith("$"): + location = location[:-1] + name + + invdata[type_].append((name, location)) + return invdata + + +async def _fetch_inventory(url: str) -> InventoryDict: + """Fetch, parse and return an intersphinx inventory file from an url.""" + timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5) + async with bot.instance.http_session.get(url, timeout=timeout, raise_for_status=True) as response: + stream = response.content + + inventory_header = (await stream.readline()).decode().rstrip() + inventory_version = int(inventory_header[-1:]) + await stream.readline() # skip project name + await stream.readline() # skip project version + + if inventory_version == 1: + return await _load_v1(stream) + + elif inventory_version == 2: + if b"zlib" not in await stream.readline(): + raise ValueError(f"Invalid inventory file at url {url}.") + return await _load_v2(stream) + + raise ValueError(f"Invalid inventory file at url {url}.") + + +async def fetch_inventory(url: str) -> Optional[InventoryDict]: + """ + Get an inventory dict from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors. + + `url` should point at a valid sphinx objects.inv inventory file, which will be parsed into the + inventory dict in the format of {"domain:role": [("symbol_name", "relative_url_to_symbol"), ...], ...} + """ + for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1): + try: + inventory = await _fetch_inventory(url) + except aiohttp.ClientConnectorError: + log.warning( + f"Failed to connect to inventory url at {url}; " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + except aiohttp.ClientError: + log.error( + f"Failed to get inventory from {url}; " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + except Exception: + log.exception( + f"An unexpected error has occurred during fetching of {url}; " + f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." + ) + else: + return inventory + + return None diff --git a/bot/exts/info/doc/_markdown.py b/bot/exts/info/doc/_markdown.py new file mode 100644 index 000000000..1b7d8232b --- /dev/null +++ b/bot/exts/info/doc/_markdown.py @@ -0,0 +1,58 @@ +from urllib.parse import urljoin + +from bs4.element import PageElement +from markdownify import MarkdownConverter + + +class DocMarkdownConverter(MarkdownConverter): + """Subclass markdownify's MarkdownCoverter to provide custom conversion methods.""" + + def __init__(self, *, page_url: str, **options): + super().__init__(**options) + self.page_url = page_url + + def convert_li(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Fix markdownify's erroneous indexing in ol tags.""" + parent = el.parent + if parent is not None and parent.name == "ol": + li_tags = parent.find_all("li") + bullet = f"{li_tags.index(el)+1}." + else: + depth = -1 + while el: + if el.name == "ul": + depth += 1 + el = el.parent + bullets = self.options["bullets"] + bullet = bullets[depth % len(bullets)] + return f"{bullet} {text}\n" + + def convert_hn(self, _n: int, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Convert h tags to bold text with ** instead of adding #.""" + if convert_as_inline: + return text + return f"**{text}**\n\n" + + def convert_code(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Undo `markdownify`s underscore escaping.""" + return f"`{text}`".replace("\\", "") + + def convert_pre(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Wrap any codeblocks in `py` for syntax highlighting.""" + code = "".join(el.strings) + return f"```py\n{code}```" + + def convert_a(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Resolve relative URLs to `self.page_url`.""" + el["href"] = urljoin(self.page_url, el["href"]) + return super().convert_a(el, text, convert_as_inline) + + def convert_p(self, el: PageElement, text: str, convert_as_inline: bool) -> str: + """Include only one newline instead of two when the parent is a li tag.""" + if convert_as_inline: + return text + + parent = el.parent + if parent is not None and parent.name == "li": + return f"{text}\n" + return super().convert_p(el, text, convert_as_inline) diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py new file mode 100644 index 000000000..bf840b96f --- /dev/null +++ b/bot/exts/info/doc/_parsing.py @@ -0,0 +1,256 @@ +from __future__ import annotations + +import logging +import re +import string +import textwrap +from collections import namedtuple +from typing import Collection, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union + +from bs4 import BeautifulSoup +from bs4.element import NavigableString, Tag + +from bot.utils.helpers import find_nth_occurrence +from . import MAX_SIGNATURE_AMOUNT +from ._html import get_dd_description, get_general_description, get_signatures +from ._markdown import DocMarkdownConverter +if TYPE_CHECKING: + from ._cog import DocItem + +log = logging.getLogger(__name__) + +_WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)") +_PARAMETERS_RE = re.compile(r"\((.+)\)") + +_NO_SIGNATURE_GROUPS = { + "attribute", + "envvar", + "setting", + "tempaltefilter", + "templatetag", + "term", +} +_EMBED_CODE_BLOCK_LINE_LENGTH = 61 +# _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight +_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * MAX_SIGNATURE_AMOUNT +# Maximum embed description length - signatures on top +_MAX_DESCRIPTION_LENGTH = 2048 - _MAX_SIGNATURES_LENGTH +_TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace + +BracketPair = namedtuple("BracketPair", ["opening_bracket", "closing_bracket"]) +_BRACKET_PAIRS = { + "{": BracketPair("{", "}"), + "(": BracketPair("(", ")"), + "[": BracketPair("[", "]"), + "<": BracketPair("<", ">"), +} + + +def _split_parameters(parameters_string: str) -> Iterator[str]: + """ + Split parameters of a signature into individual parameter strings on commas. + + Long string literals are not accounted for. + """ + last_split = 0 + depth = 0 + current_search: Optional[BracketPair] = None + + enumerated_string = enumerate(parameters_string) + for index, character in enumerated_string: + if character in {"'", '"'}: + # Skip everything inside of strings, regardless of the depth. + quote_character = character # The closing quote must equal the opening quote. + preceding_backslashes = 0 + for _, character in enumerated_string: + # If an odd number of backslashes precedes the quote, it was escaped. + if character == quote_character and not preceding_backslashes % 2: + break + if character == "\\": + preceding_backslashes += 1 + else: + preceding_backslashes = 0 + + elif current_search is None: + if (current_search := _BRACKET_PAIRS.get(character)) is not None: + depth = 1 + elif character == ",": + yield parameters_string[last_split:index] + last_split = index + 1 + + else: + if character == current_search.opening_bracket: + depth += 1 + + elif character == current_search.closing_bracket: + depth -= 1 + if depth == 0: + current_search = None + + yield parameters_string[last_split:] + + +def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collection[str]]: + """ + Truncate passed signatures to not exceed `_MAX_SIGNATURES_LENGTH`. + + If the signatures need to be truncated, parameters are collapsed until they fit withing the limit. + Individual signatures can consist of max 1, 2, ..., `_MAX_SIGNATURE_AMOUNT` lines of text, + inversely proportional to the amount of signatures. + A maximum of `_MAX_SIGNATURE_AMOUNT` signatures is assumed to be passed. + """ + if sum(len(signature) for signature in signatures) <= _MAX_SIGNATURES_LENGTH: + # Total length of signatures is under the length limit; no truncation needed. + return signatures + + max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (MAX_SIGNATURE_AMOUNT + 1 - len(signatures)) + formatted_signatures = [] + for signature in signatures: + signature = signature.strip() + if len(signature) > max_signature_length: + if (parameters_match := _PARAMETERS_RE.search(signature)) is None: + # The signature has no parameters or the regex failed; perform a simple truncation of the text. + formatted_signatures.append(textwrap.shorten(signature, max_signature_length, placeholder="...")) + continue + + truncated_signature = [] + parameters_string = parameters_match[1] + running_length = len(signature) - len(parameters_string) + for parameter in _split_parameters(parameters_string): + # Check if including this parameter would still be within the maximum length. + if (len(parameter) + running_length) <= max_signature_length - 5: # account for comma and placeholder + truncated_signature.append(parameter) + running_length += len(parameter) + 1 + else: + # There's no more room for this parameter. Truncate the parameter list and put it in the signature. + truncated_signature.append(" ...") + formatted_signatures.append(signature.replace(parameters_string, ",".join(truncated_signature))) + break + else: + # The current signature is under the length limit; no truncation needed. + formatted_signatures.append(signature) + + return formatted_signatures + + +def _get_truncated_description( + elements: Iterable[Union[Tag, NavigableString]], + markdown_converter: DocMarkdownConverter, + max_length: int, + max_lines: int, +) -> str: + """ + Truncate the Markdown from `elements` to be at most `max_length` characters when rendered or `max_lines` newlines. + + `max_length` limits the length of the rendered characters in the string, + with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits. + """ + result = "" + markdown_element_ends = [] # Stores indices into `result` which point to the end boundary of each Markdown element. + rendered_length = 0 + + tag_end_index = 0 + for element in elements: + is_tag = isinstance(element, Tag) + element_length = len(element.text) if is_tag else len(element) + + if rendered_length + element_length < max_length: + if is_tag: + element_markdown = markdown_converter.process_tag(element, convert_as_inline=False) + else: + element_markdown = markdown_converter.process_text(element) + + rendered_length += element_length + tag_end_index += len(element_markdown) + + if not element_markdown.isspace(): + markdown_element_ends.append(tag_end_index) + result += element_markdown + else: + break + + if not markdown_element_ends: + return "" + + # Determine the "hard" truncation index. Account for the ellipsis placeholder for the max length. + newline_truncate_index = find_nth_occurrence(result, "\n", max_lines) + if newline_truncate_index is not None and newline_truncate_index < _MAX_DESCRIPTION_LENGTH - 3: + # Truncate based on maximum lines if there are more than the maximum number of lines. + truncate_index = newline_truncate_index + else: + # There are less than the maximum number of lines; truncate based on the max char length. + truncate_index = _MAX_DESCRIPTION_LENGTH - 3 + + # Nothing needs to be truncated if the last element ends before the truncation index. + if truncate_index >= markdown_element_ends[-1]: + return result + + # Determine the actual truncation index. + possible_truncation_indices = [cut for cut in markdown_element_ends if cut < truncate_index] + if not possible_truncation_indices: + # In case there is no Markdown element ending before the truncation index, try to find a good cutoff point. + force_truncated = result[:truncate_index] + # If there is an incomplete codeblock, cut it out. + if force_truncated.count("```") % 2: + force_truncated = force_truncated[:force_truncated.rfind("```")] + # Search for substrings to truncate at, with decreasing desirability. + for string_ in ("\n\n", "\n", ". ", ", ", ",", " "): + cutoff = force_truncated.rfind(string_) + + if cutoff != -1: + truncated_result = force_truncated[:cutoff] + break + else: + truncated_result = force_truncated + + else: + # Truncate at the last Markdown element that comes before the truncation index. + markdown_truncate_index = possible_truncation_indices[-1] + truncated_result = result[:markdown_truncate_index] + + return truncated_result.strip(_TRUNCATE_STRIP_CHARACTERS) + "..." + + +def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str: + """ + Create a Markdown string with the signatures at the top, and the converted html description below them. + + The signatures are wrapped in python codeblocks, separated from the description by a newline. + The result Markdown string is max 750 rendered characters for the description with signatures at the start. + """ + description = _get_truncated_description( + description, + markdown_converter=DocMarkdownConverter(bullets="•", page_url=url), + max_length=750, + max_lines=13 + ) + description = _WHITESPACE_AFTER_NEWLINES_RE.sub("", description) + if signatures is not None: + signature = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures)) + return f"{signature}\n{description}" + else: + return description + + +def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]: + """ + Return parsed Markdown of the passed item using the passed in soup, truncated to fit within a discord message. + + The method of parsing and what information gets included depends on the symbol's group. + """ + symbol_heading = soup.find(id=symbol_data.symbol_id) + if symbol_heading is None: + return None + signature = None + # Modules, doc pages and labels don't point to description list tags but to tags like divs, + # no special parsing can be done so we only try to include what's under them. + if symbol_heading.name != "dt": + description = get_general_description(symbol_heading) + + elif symbol_data.group in _NO_SIGNATURE_GROUPS: + description = get_dd_description(symbol_heading) + + else: + signature = get_signatures(symbol_heading) + description = get_dd_description(symbol_heading) + return _create_markdown(signature, description, symbol_data.url).replace("¶", "").strip() diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py new file mode 100644 index 000000000..ad764816f --- /dev/null +++ b/bot/exts/info/doc/_redis_cache.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +import datetime +from typing import Optional, TYPE_CHECKING + +from async_rediscache.types.base import RedisObject, namespace_lock +if TYPE_CHECKING: + from ._cog import DocItem + +WEEK_SECONDS = datetime.timedelta(weeks=1).total_seconds() + + +class DocRedisCache(RedisObject): + """Interface for redis functionality needed by the Doc cog.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._set_expires = set() + + @namespace_lock + async def set(self, item: DocItem, value: str) -> None: + """ + Set the Markdown `value` for the symbol `item`. + + All keys from a single page are stored together, expiring a week after the first set. + """ + url_key = remove_suffix(item.relative_url_path, ".html") + redis_key = f"{self.namespace}:{item.package}:{url_key}" + needs_expire = False + + with await self._get_pool_connection() as connection: + if redis_key not in self._set_expires: + # An expire is only set if the key didn't exist before. + # If this is the first time setting values for this key check if it exists and add it to + # `_set_expires` to prevent redundant checks for subsequent uses with items from the same page. + self._set_expires.add(redis_key) + needs_expire = not await connection.exists(redis_key) + + await connection.hset(redis_key, item.symbol_id, value) + if needs_expire: + await connection.expire(redis_key, WEEK_SECONDS) + + @namespace_lock + async def get(self, item: DocItem) -> Optional[str]: + """Return the Markdown content of the symbol `item` if it exists.""" + url_key = remove_suffix(item.relative_url_path, ".html") + + with await self._get_pool_connection() as connection: + return await connection.hget(f"{self.namespace}:{item.package}:{url_key}", item.symbol_id, encoding="utf8") + + @namespace_lock + async def delete(self, package: str) -> bool: + """Remove all values for `package`; return True if at least one key was deleted, False otherwise.""" + with await self._get_pool_connection() as connection: + package_keys = [ + package_key async for package_key in connection.iscan(match=f"{self.namespace}:{package}:*") + ] + if package_keys: + await connection.delete(*package_keys) + return True + return False + + +def remove_suffix(string: str, suffix: str) -> str: + """Remove `suffix` from end of `string`.""" + # TODO replace usages with str.removesuffix on 3.9 + if string.endswith(suffix): + return string[:-len(suffix)] + else: + return string diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index c54ca96bf..834fee1b4 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -6,7 +6,7 @@ from collections import defaultdict from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple, Union import fuzzywuzzy -from discord import Colour, Embed, Guild, Message, Role +from discord import AllowedMentions, Colour, Embed, Guild, Message, Role from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role from bot import constants @@ -230,6 +230,11 @@ class Information(Cog): if on_server and user.nick: name = f"{user.nick} ({name})" + if user.public_flags.verified_bot: + name += f" {constants.Emojis.verified_bot}" + elif user.bot: + name += f" {constants.Emojis.bot}" + badges = [] for badge, is_set in user.public_flags: @@ -284,7 +289,7 @@ class Information(Cog): embed.add_field(name=field_name, value=field_content, inline=False) embed.set_thumbnail(url=user.avatar_url_as(static_format="png")) - embed.colour = user.top_role.colour if roles else Colour.blurple() + embed.colour = user.colour if user.colour != Colour.default() else Colour.blurple() return embed @@ -447,9 +452,9 @@ class Information(Cog): def add_content(title: str, content: str) -> None: paginator.add_line(f'== {title} ==\n') - # replace backticks as it breaks out of code blocks. Spaces seemed to be the most reasonable solution. - # we hope it's not close to 2000 - paginator.add_line(content.replace('```', '`` `')) + # Replace backticks as it breaks out of code blocks. + # An invisible character seemed to be the most reasonable solution. We hope it's not close to 2000. + paginator.add_line(content.replace('`', '`\u200b')) paginator.close_page() if message.content: @@ -468,7 +473,7 @@ class Information(Cog): add_content(title, transformer(item)) for page in paginator.pages: - await ctx.send(page) + await ctx.send(page, allowed_mentions=AllowedMentions.none()) @raw.command() async def json(self, ctx: Context, message: Message) -> None: diff --git a/bot/exts/info/reddit.py b/bot/exts/info/reddit.py deleted file mode 100644 index 6790be762..000000000 --- a/bot/exts/info/reddit.py +++ /dev/null @@ -1,308 +0,0 @@ -import asyncio -import logging -import random -import textwrap -from collections import namedtuple -from datetime import datetime, timedelta -from typing import List - -from aiohttp import BasicAuth, ClientError -from discord import Colour, Embed, TextChannel -from discord.ext.commands import Cog, Context, group, has_any_role -from discord.ext.tasks import loop -from discord.utils import escape_markdown, sleep_until - -from bot.bot import Bot -from bot.constants import Channels, ERROR_REPLIES, Emojis, Reddit as RedditConfig, STAFF_ROLES, Webhooks -from bot.converters import Subreddit -from bot.pagination import LinePaginator -from bot.utils.messages import sub_clyde - -log = logging.getLogger(__name__) - -AccessToken = namedtuple("AccessToken", ["token", "expires_at"]) - - -class Reddit(Cog): - """Track subreddit posts and show detailed statistics about them.""" - - HEADERS = {"User-Agent": "python3:python-discord/bot:1.0.0 (by /u/PythonDiscord)"} - URL = "https://www.reddit.com" - OAUTH_URL = "https://oauth.reddit.com" - MAX_RETRIES = 3 - - def __init__(self, bot: Bot): - self.bot = bot - - self.webhook = None - self.access_token = None - self.client_auth = BasicAuth(RedditConfig.client_id, RedditConfig.secret) - - bot.loop.create_task(self.init_reddit_ready()) - self.auto_poster_loop.start() - - def cog_unload(self) -> None: - """Stop the loop task and revoke the access token when the cog is unloaded.""" - self.auto_poster_loop.cancel() - if self.access_token and self.access_token.expires_at > datetime.utcnow(): - self.bot.closing_tasks.append(asyncio.create_task(self.revoke_access_token())) - - async def init_reddit_ready(self) -> None: - """Sets the reddit webhook when the cog is loaded.""" - await self.bot.wait_until_guild_available() - if not self.webhook: - self.webhook = await self.bot.fetch_webhook(Webhooks.reddit) - - @property - def channel(self) -> TextChannel: - """Get the #reddit channel object from the bot's cache.""" - return self.bot.get_channel(Channels.reddit) - - async def get_access_token(self) -> None: - """ - Get a Reddit API OAuth2 access token and assign it to self.access_token. - - A token is valid for 1 hour. There will be MAX_RETRIES to get a token, after which the cog - will be unloaded and a ClientError raised if retrieval was still unsuccessful. - """ - for i in range(1, self.MAX_RETRIES + 1): - response = await self.bot.http_session.post( - url=f"{self.URL}/api/v1/access_token", - headers=self.HEADERS, - auth=self.client_auth, - data={ - "grant_type": "client_credentials", - "duration": "temporary" - } - ) - - if response.status == 200 and response.content_type == "application/json": - content = await response.json() - expiration = int(content["expires_in"]) - 60 # Subtract 1 minute for leeway. - self.access_token = AccessToken( - token=content["access_token"], - expires_at=datetime.utcnow() + timedelta(seconds=expiration) - ) - - log.debug(f"New token acquired; expires on UTC {self.access_token.expires_at}") - return - else: - log.debug( - f"Failed to get an access token: " - f"status {response.status} & content type {response.content_type}; " - f"retrying ({i}/{self.MAX_RETRIES})" - ) - - await asyncio.sleep(3) - - self.bot.remove_cog(self.qualified_name) - raise ClientError("Authentication with the Reddit API failed. Unloading the cog.") - - async def revoke_access_token(self) -> None: - """ - Revoke the OAuth2 access token for the Reddit API. - - For security reasons, it's good practice to revoke the token when it's no longer being used. - """ - response = await self.bot.http_session.post( - url=f"{self.URL}/api/v1/revoke_token", - headers=self.HEADERS, - auth=self.client_auth, - data={ - "token": self.access_token.token, - "token_type_hint": "access_token" - } - ) - - if response.status == 204 and response.content_type == "application/json": - self.access_token = None - else: - log.warning(f"Unable to revoke access token: status {response.status}.") - - async def fetch_posts(self, route: str, *, amount: int = 25, params: dict = None) -> List[dict]: - """A helper method to fetch a certain amount of Reddit posts at a given route.""" - # Reddit's JSON responses only provide 25 posts at most. - if not 25 >= amount > 0: - raise ValueError("Invalid amount of subreddit posts requested.") - - # Renew the token if necessary. - if not self.access_token or self.access_token.expires_at < datetime.utcnow(): - await self.get_access_token() - - url = f"{self.OAUTH_URL}/{route}" - for _ in range(self.MAX_RETRIES): - response = await self.bot.http_session.get( - url=url, - headers={**self.HEADERS, "Authorization": f"bearer {self.access_token.token}"}, - params=params - ) - if response.status == 200 and response.content_type == 'application/json': - # Got appropriate response - process and return. - content = await response.json() - posts = content["data"]["children"] - - filtered_posts = [post for post in posts if not post["data"]["over_18"]] - - return filtered_posts[:amount] - - await asyncio.sleep(3) - - log.debug(f"Invalid response from: {url} - status code {response.status}, mimetype {response.content_type}") - return list() # Failed to get appropriate response within allowed number of retries. - - async def get_top_posts(self, subreddit: Subreddit, time: str = "all", amount: int = 5) -> Embed: - """ - Get the top amount of posts for a given subreddit within a specified timeframe. - - A time of "all" will get posts from all time, "day" will get top daily posts and "week" will get the top - weekly posts. - - The amount should be between 0 and 25 as Reddit's JSON requests only provide 25 posts at most. - """ - embed = Embed(description="") - - posts = await self.fetch_posts( - route=f"{subreddit}/top", - amount=amount, - params={"t": time} - ) - if not posts: - embed.title = random.choice(ERROR_REPLIES) - embed.colour = Colour.red() - embed.description = ( - "Sorry! We couldn't find any SFW posts from that subreddit. " - "If this problem persists, please let us know." - ) - - return embed - - for post in posts: - data = post["data"] - - text = data["selftext"] - if text: - text = textwrap.shorten(text, width=128, placeholder="...") - text += "\n" # Add newline to separate embed info - - ups = data["ups"] - comments = data["num_comments"] - author = data["author"] - - title = textwrap.shorten(data["title"], width=64, placeholder="...") - # Normal brackets interfere with Markdown. - title = escape_markdown(title).replace("[", "⦋").replace("]", "⦌") - link = self.URL + data["permalink"] - - embed.description += ( - f"**[{title}]({link})**\n" - f"{text}" - f"{Emojis.upvotes} {ups} {Emojis.comments} {comments} {Emojis.user} {author}\n\n" - ) - - embed.colour = Colour.blurple() - return embed - - @loop() - async def auto_poster_loop(self) -> None: - """Post the top 5 posts daily, and the top 5 posts weekly.""" - # once d.py get support for `time` parameter in loop decorator, - # this can be removed and the loop can use the `time=datetime.time.min` parameter - now = datetime.utcnow() - tomorrow = now + timedelta(days=1) - midnight_tomorrow = tomorrow.replace(hour=0, minute=0, second=0) - - await sleep_until(midnight_tomorrow) - - await self.bot.wait_until_guild_available() - if not self.webhook: - await self.bot.fetch_webhook(Webhooks.reddit) - - if datetime.utcnow().weekday() == 0: - await self.top_weekly_posts() - # if it's a monday send the top weekly posts - - for subreddit in RedditConfig.subreddits: - top_posts = await self.get_top_posts(subreddit=subreddit, time="day") - username = sub_clyde(f"{subreddit} Top Daily Posts") - message = await self.webhook.send(username=username, embed=top_posts, wait=True) - - if message.channel.is_news(): - await message.publish() - - async def top_weekly_posts(self) -> None: - """Post a summary of the top posts.""" - for subreddit in RedditConfig.subreddits: - # Send and pin the new weekly posts. - top_posts = await self.get_top_posts(subreddit=subreddit, time="week") - username = sub_clyde(f"{subreddit} Top Weekly Posts") - message = await self.webhook.send(wait=True, username=username, embed=top_posts) - - if subreddit.lower() == "r/python": - if not self.channel: - log.warning("Failed to get #reddit channel to remove pins in the weekly loop.") - return - - # Remove the oldest pins so that only 12 remain at most. - pins = await self.channel.pins() - - while len(pins) >= 12: - await pins[-1].unpin() - del pins[-1] - - await message.pin() - - if message.channel.is_news(): - await message.publish() - - @group(name="reddit", invoke_without_command=True) - async def reddit_group(self, ctx: Context) -> None: - """View the top posts from various subreddits.""" - await ctx.send_help(ctx.command) - - @reddit_group.command(name="top") - async def top_command(self, ctx: Context, subreddit: Subreddit = "r/Python") -> None: - """Send the top posts of all time from a given subreddit.""" - async with ctx.typing(): - embed = await self.get_top_posts(subreddit=subreddit, time="all") - - await ctx.send(content=f"Here are the top {subreddit} posts of all time!", embed=embed) - - @reddit_group.command(name="daily") - async def daily_command(self, ctx: Context, subreddit: Subreddit = "r/Python") -> None: - """Send the top posts of today from a given subreddit.""" - async with ctx.typing(): - embed = await self.get_top_posts(subreddit=subreddit, time="day") - - await ctx.send(content=f"Here are today's top {subreddit} posts!", embed=embed) - - @reddit_group.command(name="weekly") - async def weekly_command(self, ctx: Context, subreddit: Subreddit = "r/Python") -> None: - """Send the top posts of this week from a given subreddit.""" - async with ctx.typing(): - embed = await self.get_top_posts(subreddit=subreddit, time="week") - - await ctx.send(content=f"Here are this week's top {subreddit} posts!", embed=embed) - - @has_any_role(*STAFF_ROLES) - @reddit_group.command(name="subreddits", aliases=("subs",)) - async def subreddits_command(self, ctx: Context) -> None: - """Send a paginated embed of all the subreddits we're relaying.""" - embed = Embed() - embed.title = "Relayed subreddits." - embed.colour = Colour.blurple() - - await LinePaginator.paginate( - RedditConfig.subreddits, - ctx, embed, - footer_text="Use the reddit commands along with these to view their posts.", - empty=False, - max_lines=15 - ) - - -def setup(bot: Bot) -> None: - """Load the Reddit cog.""" - if not RedditConfig.secret or not RedditConfig.client_id: - log.error("Credentials not provided, cog not loaded.") - return - bot.add_cog(Reddit(bot)) diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py index 49e74f204..ef07c77a1 100644 --- a/bot/exts/info/source.py +++ b/bot/exts/info/source.py @@ -14,9 +14,10 @@ SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, str, co class SourceConverter(commands.Converter): """Convert an argument into a help command, tag, command, or cog.""" - async def convert(self, ctx: commands.Context, argument: str) -> SourceType: + @staticmethod + async def convert(ctx: commands.Context, argument: str) -> SourceType: """Convert argument into source object.""" - if argument.lower().startswith("help"): + if argument.lower() == "help": return ctx.bot.help_command cog = ctx.bot.get_cog(argument) @@ -68,7 +69,8 @@ class BotSource(commands.Cog): Raise BadArgument if `source_item` is a dynamically-created object (e.g. via internal eval). """ if isinstance(source_item, commands.Command): - src = source_item.callback.__code__ + source_item = inspect.unwrap(source_item.callback) + src = source_item.__code__ filename = src.co_filename elif isinstance(source_item, str): tags_cog = self.bot.get_cog("Tags") diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index bab95405c..dfb1afd19 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -181,7 +181,7 @@ class Defcon(Cog): role = ctx.guild.default_role permissions = role.permissions - permissions.update(send_messages=False, add_reactions=False) + permissions.update(send_messages=False, add_reactions=False, connect=False) await role.edit(reason="DEFCON shutdown", permissions=permissions) await ctx.send(f"{Action.SERVER_SHUTDOWN.value.emoji} Server shut down.") @@ -192,7 +192,7 @@ class Defcon(Cog): role = ctx.guild.default_role permissions = role.permissions - permissions.update(send_messages=True, add_reactions=True) + permissions.update(send_messages=True, add_reactions=True, connect=True) await role.edit(reason="DEFCON unshutdown", permissions=permissions) await ctx.send(f"{Action.SERVER_OPEN.value.emoji} Server reopened.") diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py index 6d081741c..1d2206e27 100644 --- a/bot/exts/moderation/dm_relay.py +++ b/bot/exts/moderation/dm_relay.py @@ -1,132 +1,72 @@ import logging -from typing import Optional import discord -from async_rediscache import RedisCache -from discord import Color -from discord.ext import commands -from discord.ext.commands import Cog +from discord.ext.commands import Cog, Context, command, has_any_role -from bot import constants from bot.bot import Bot -from bot.converters import UserMentionOrID -from bot.utils.checks import in_whitelist_check -from bot.utils.messages import send_attachments -from bot.utils.webhooks import send_webhook +from bot.constants import Emojis, MODERATION_ROLES +from bot.utils.services import send_to_paste_service log = logging.getLogger(__name__) class DMRelay(Cog): - """Relay direct messages to and from the bot.""" - - # RedisCache[str, t.Union[discord.User.id, discord.Member.id]] - dm_cache = RedisCache() + """Inspect messages sent to the bot.""" def __init__(self, bot: Bot): self.bot = bot - self.webhook_id = constants.Webhooks.dm_log - self.webhook = None - self.bot.loop.create_task(self.fetch_webhook()) - - @commands.command(aliases=("reply",)) - async def send_dm(self, ctx: commands.Context, member: Optional[UserMentionOrID], *, message: str) -> None: - """ - Allows you to send a DM to a user from the bot. - - If `member` is not provided, it will send to the last user who DM'd the bot. - - This feature should be used extremely sparingly. Use ModMail if you need to have a serious - conversation with a user. This is just for responding to extraordinary DMs, having a little - fun with users, and telling people they are DMing the wrong bot. - - NOTE: This feature will be removed if it is overused. - """ - if not member: - user_id = await self.dm_cache.get("last_user") - member = ctx.guild.get_member(user_id) if user_id else None - - # If we still don't have a Member at this point, give up - if not member: - log.debug("This bot has never gotten a DM, or the RedisCache has been cleared.") - await ctx.message.add_reaction("❌") + + @command(aliases=("relay", "dr")) + async def dmrelay(self, ctx: Context, user: discord.User, limit: int = 100) -> None: + """Relays the direct message history between the bot and given user.""" + log.trace(f"Relaying DMs with {user.name} ({user.id})") + + if user.bot: + await ctx.send(f"{Emojis.cross_mark} No direct message history with bots.") return - if member.id == self.bot.user.id: - log.debug("Not sending message to bot user") - return await ctx.send("🚫 I can't send messages to myself!") - - try: - await member.send(message) - except discord.errors.Forbidden: - log.debug("User has disabled DMs.") - await ctx.message.add_reaction("❌") - else: - await ctx.message.add_reaction("✅") - self.bot.stats.incr("dm_relay.dm_sent") - - async def fetch_webhook(self) -> None: - """Fetches the webhook object, so we can post to it.""" - await self.bot.wait_until_guild_available() - - try: - self.webhook = await self.bot.fetch_webhook(self.webhook_id) - except discord.HTTPException: - log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`") - - @Cog.listener() - async def on_message(self, message: discord.Message) -> None: - """Relays the message's content and attachments to the dm_log channel.""" - # Only relay DMs from humans - if message.author.bot or message.guild or self.webhook is None: + output = "" + async for msg in user.history(limit=limit, oldest_first=True): + created_at = msg.created_at.strftime(r"%Y-%m-%d %H:%M") + + # Metadata (author, created_at, id) + output += f"{msg.author} [{created_at}] ({msg.id}): " + + # Content + if msg.content: + output += msg.content + "\n" + + # Embeds + if (embeds := len(msg.embeds)) > 0: + output += f"<{embeds} embed{'s' if embeds > 1 else ''}>\n" + + # Attachments + attachments = "\n".join(a.url for a in msg.attachments) + if attachments: + output += attachments + "\n" + + if not output: + await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.") + return + + metadata = ( + f"User: {user} ({user.id})\n" + f"Channel ID: {user.dm_channel.id}\n\n" + ) + + paste_link = await send_to_paste_service(metadata + output, extension="txt") + + if paste_link is None: + await ctx.send(f"{Emojis.cross_mark} Failed to upload output to hastebin.") return - if message.clean_content: - await send_webhook( - webhook=self.webhook, - content=message.clean_content, - username=f"{message.author.display_name} ({message.author.id})", - avatar_url=message.author.avatar_url - ) - await self.dm_cache.set("last_user", message.author.id) - self.bot.stats.incr("dm_relay.dm_received") - - # Handle any attachments - if message.attachments: - try: - await send_attachments( - message, - self.webhook, - username=f"{message.author.display_name} ({message.author.id})" - ) - except (discord.errors.Forbidden, discord.errors.NotFound): - e = discord.Embed( - description=":x: **This message contained an attachment, but it could not be retrieved**", - color=Color.red() - ) - await send_webhook( - webhook=self.webhook, - embed=e, - username=f"{message.author.display_name} ({message.author.id})", - avatar_url=message.author.avatar_url - ) - except discord.HTTPException: - log.exception("Failed to send an attachment to the webhook") - - async def cog_check(self, ctx: commands.Context) -> bool: + await ctx.send(paste_link) + + async def cog_check(self, ctx: Context) -> bool: """Only allow moderators to invoke the commands in this cog.""" - checks = [ - await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx), - in_whitelist_check( - ctx, - channels=[constants.Channels.dm_log], - redirect=None, - fail_silently=True, - ) - ] - return all(checks) + return await has_any_role(*MODERATION_ROLES).predicate(ctx) def setup(bot: Bot) -> None: - """Load the DMRelay cog.""" + """Load the DMRelay cog.""" bot.add_cog(DMRelay(bot)) diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py index d89e80acc..f19323c7c 100644 --- a/bot/exts/moderation/infraction/infractions.py +++ b/bot/exts/moderation/infraction/infractions.py @@ -54,8 +54,12 @@ class Infractions(InfractionScheduler, commands.Cog): # region: Permanent infractions @command() - async def warn(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None: + async def warn(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: """Warn a user for the given reason.""" + if not isinstance(user, Member): + await ctx.send(":x: The user doesn't appear to be on the server.") + return + infraction = await _utils.post_infraction(ctx, user, "warning", reason, active=False) if infraction is None: return @@ -63,36 +67,61 @@ class Infractions(InfractionScheduler, commands.Cog): await self.apply_infraction(ctx, infraction, user) @command() - async def kick(self, ctx: Context, user: Member, *, reason: t.Optional[str] = None) -> None: + async def kick(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: """Kick a user for the given reason.""" + if not isinstance(user, Member): + await ctx.send(":x: The user doesn't appear to be on the server.") + return + await self.apply_kick(ctx, user, reason) @command() - async def ban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str] = None) -> None: - """Permanently ban a user for the given reason and stop watching them with Big Brother.""" - await self.apply_ban(ctx, user, reason) + async def ban( + self, + ctx: Context, + user: FetchedMember, + duration: t.Optional[Expiry] = None, + *, + reason: t.Optional[str] = None + ) -> None: + """ + Permanently ban a user for the given reason and stop watching them with Big Brother. + + If duration is specified, it temporarily bans that user for the given duration. + """ + await self.apply_ban(ctx, user, reason, expires_at=duration) @command(aliases=('pban',)) async def purgeban( self, ctx: Context, user: FetchedMember, - purge_days: t.Optional[int] = 1, + duration: t.Optional[Expiry] = None, *, reason: t.Optional[str] = None ) -> None: """ - Same as ban but removes all their messages for the given number of days, default being 1. + Same as ban but removes all their messages of the last 24 hours. - `purge_days` can only be values between 0 and 7. - Anything outside these bounds are automatically adjusted to their respective limits. + If duration is specified, it temporarily bans that user for the given duration. """ - await self.apply_ban(ctx, user, reason, max(min(purge_days, 7), 0)) + await self.apply_ban(ctx, user, reason, 1, expires_at=duration) @command(aliases=('vban',)) - async def voiceban(self, ctx: Context, user: FetchedMember, *, reason: t.Optional[str]) -> None: - """Permanently ban user from using voice channels.""" - await self.apply_voice_ban(ctx, user, reason) + async def voiceban( + self, + ctx: Context, + user: FetchedMember, + duration: t.Optional[Expiry] = None, + *, + reason: t.Optional[str] + ) -> None: + """ + Permanently ban user from using voice channels. + + If duration is specified, it temporarily voice bans that user for the given duration. + """ + await self.apply_voice_ban(ctx, user, reason, expires_at=duration) # endregion # region: Temporary infractions @@ -100,7 +129,7 @@ class Infractions(InfractionScheduler, commands.Cog): @command(aliases=["mute"]) async def tempmute( self, ctx: Context, - user: Member, + user: FetchedMember, duration: t.Optional[Expiry] = None, *, reason: t.Optional[str] = None @@ -122,6 +151,10 @@ class Infractions(InfractionScheduler, commands.Cog): If no duration is given, a one hour duration is used by default. """ + if not isinstance(user, Member): + await ctx.send(":x: The user doesn't appear to be on the server.") + return + if duration is None: duration = await Duration().convert(ctx, "1h") await self.apply_mute(ctx, user, reason, expires_at=duration) diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py index 704dddf9c..07e79b9fe 100644 --- a/bot/exts/moderation/infraction/superstarify.py +++ b/bot/exts/moderation/infraction/superstarify.py @@ -11,7 +11,7 @@ from discord.utils import escape_markdown from bot import constants from bot.bot import Bot -from bot.converters import Expiry +from bot.converters import Duration, Expiry from bot.exts.moderation.infraction import _utils from bot.exts.moderation.infraction._scheduler import InfractionScheduler from bot.utils.messages import format_user @@ -19,6 +19,7 @@ from bot.utils.time import format_infraction log = logging.getLogger(__name__) NICKNAME_POLICY_URL = "https://pythondiscord.com/pages/rules/#nickname-policy" +SUPERSTARIFY_DEFAULT_DURATION = "1h" with Path("bot/resources/stars.json").open(encoding="utf-8") as stars_file: STAR_NAMES = json.load(stars_file) @@ -109,7 +110,7 @@ class Superstarify(InfractionScheduler, Cog): self, ctx: Context, member: Member, - duration: Expiry, + duration: t.Optional[Expiry], *, reason: str = '', ) -> None: @@ -134,6 +135,9 @@ class Superstarify(InfractionScheduler, Cog): if await _utils.get_active_infraction(ctx, member, "superstar"): return + # Set to default duration if none was provided. + duration = duration or await Duration().convert(ctx, SUPERSTARIFY_DEFAULT_DURATION) + # Post the infraction to the API old_nick = member.display_name infraction_reason = f'Old nickname: {old_nick}. {reason}' diff --git a/bot/exts/moderation/metabase.py b/bot/exts/moderation/metabase.py new file mode 100644 index 000000000..db5f04d83 --- /dev/null +++ b/bot/exts/moderation/metabase.py @@ -0,0 +1,179 @@ +import csv +import json +import logging +from datetime import timedelta +from io import StringIO +from typing import Dict, List, Optional + +import arrow +from aiohttp.client_exceptions import ClientResponseError +from arrow import Arrow +from async_rediscache import RedisCache +from discord.ext.commands import Cog, Context, group, has_any_role + +from bot.bot import Bot +from bot.constants import Metabase as MetabaseConfig, Roles +from bot.converters import allowed_strings +from bot.utils import send_to_paste_service +from bot.utils.channel import is_mod_channel +from bot.utils.scheduling import Scheduler + +log = logging.getLogger(__name__) + +BASE_HEADERS = { + "Content-Type": "application/json" +} + + +class Metabase(Cog): + """Commands for admins to interact with metabase.""" + + session_info = RedisCache() + + def __init__(self, bot: Bot) -> None: + self.bot = bot + self._session_scheduler = Scheduler(self.__class__.__name__) + + self.session_token: Optional[str] = None # session_info["session_token"]: str + self.session_expiry: Optional[float] = None # session_info["session_expiry"]: UtcPosixTimestamp + self.headers = BASE_HEADERS + + self.exports: Dict[int, List[Dict]] = {} # Saves the output of each question, so internal eval can access it + + self.init_task = self.bot.loop.create_task(self.init_cog()) + + async def init_cog(self) -> None: + """Initialise the metabase session.""" + expiry_time = await self.session_info.get("session_expiry") + if expiry_time: + expiry_time = Arrow.utcfromtimestamp(expiry_time) + + if expiry_time is None or expiry_time < arrow.utcnow(): + # Force a refresh and end the task + await self.refresh_session() + return + + # Cached token is in date, so get it and schedule a refresh for later + self.session_token = await self.session_info.get("session_token") + self.headers["X-Metabase-Session"] = self.session_token + + self._session_scheduler.schedule_at(expiry_time, 0, self.refresh_session()) + + async def refresh_session(self) -> None: + """Refresh metabase session token.""" + data = { + "username": MetabaseConfig.username, + "password": MetabaseConfig.password + } + async with self.bot.http_session.post(f"{MetabaseConfig.url}/session", json=data) as resp: + json_data = await resp.json() + self.session_token = json_data.get("id") + + self.headers["X-Metabase-Session"] = self.session_token + log.info("Successfully updated metabase session.") + + # When the creds are going to expire + refresh_time = arrow.utcnow() + timedelta(minutes=MetabaseConfig.max_session_age) + + # Cache the session info, since login in heavily ratelimitted + await self.session_info.set("session_token", self.session_token) + await self.session_info.set("session_expiry", refresh_time.timestamp()) + + self._session_scheduler.schedule_at(refresh_time, 0, self.refresh_session()) + + @group(name="metabase", invoke_without_command=True) + async def metabase_group(self, ctx: Context) -> None: + """A group of commands for interacting with metabase.""" + await ctx.send_help(ctx.command) + + @metabase_group.command(name="extract") + async def metabase_extract( + self, + ctx: Context, + question_id: int, + extension: allowed_strings("csv", "json") = "csv" + ) -> None: + """ + Extract data from a metabase question. + + You can find the question_id at the end of the url on metabase. + I.E. /question/{question_id} + + If, instead of an id, there is a long URL, make sure to save the question first. + + If you want to extract data from a question within a dashboard, click the + question title at the top left of the chart to go directly to that page. + + Valid extensions are: csv and json. + """ + async with ctx.typing(): + + # Make sure we have a session token before running anything + await self.init_task + + url = f"{MetabaseConfig.url}/card/{question_id}/query/{extension}" + try: + async with self.bot.http_session.post(url, headers=self.headers, raise_for_status=True) as resp: + if extension == "csv": + out = await resp.text() + # Save the output for use with int e + self.exports[question_id] = list(csv.DictReader(StringIO(out))) + + elif extension == "json": + out = await resp.json() + # Save the output for use with int e + self.exports[question_id] = out + + # Format it nicely for human eyes + out = json.dumps(out, indent=4, sort_keys=True) + except ClientResponseError as e: + if e.status == 403: + # User doesn't have access to the given question + log.warning(f"Failed to auth with Metabase for question {question_id}.") + await ctx.send(f":x: {ctx.author.mention} Failed to auth with Metabase for that question.") + else: + # User credentials are invalid, or the refresh failed. + # Delete the expiry time, to force a refresh on next startup. + await self.session_info.delete("session_expiry") + log.exception("Session token is invalid or refresh failed.") + await ctx.send(f":x: {ctx.author.mention} Session token is invalid or refresh failed.") + return + + paste_link = await send_to_paste_service(out, extension=extension) + if paste_link: + message = f":+1: {ctx.author.mention} Here's your link: {paste_link}" + else: + message = f":x: {ctx.author.mention} Link service is unavailible." + await ctx.send( + f"{message}\nYou can also access this data within internal eval by doing: " + f"`bot.get_cog('Metabase').exports[{question_id}]`" + ) + + # This cannot be static (must have a __func__ attribute). + async def cog_check(self, ctx: Context) -> bool: + """Only allow admins inside moderator channels to invoke the commands in this cog.""" + checks = [ + await has_any_role(Roles.admins).predicate(ctx), + is_mod_channel(ctx.channel) + ] + return all(checks) + + def cog_unload(self) -> None: + """ + Cancel the init task and scheduled tasks. + + It's important to wait for init_task to be cancelled before cancelling scheduled + tasks. Otherwise, it's possible for _session_scheduler to schedule another task + after cancel_all has finished, despite _init_task.cancel being called first. + This is cause cancel() on its own doesn't block until the task is cancelled. + """ + self.init_task.cancel() + self.init_task.add_done_callback(lambda _: self._session_scheduler.cancel_all()) + + +def setup(bot: Bot) -> None: + """Load the Metabase cog.""" + if not all((MetabaseConfig.username, MetabaseConfig.password)): + log.error("Credentials not provided, cog not loaded.") + return + bot.add_cog(Metabase(bot)) diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index 2dae9d268..be65ade6e 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -12,9 +12,10 @@ from deepdiff import DeepDiff from discord import Colour from discord.abc import GuildChannel from discord.ext.commands import Cog, Context +from discord.utils import escape_markdown from bot.bot import Bot -from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, URLs +from bot.constants import Categories, Channels, Colours, Emojis, Event, Guild as GuildConstant, Icons, Roles, URLs from bot.utils.messages import format_user from bot.utils.time import humanize_delta @@ -115,9 +116,9 @@ class ModLog(Cog, name="ModLog"): if ping_everyone: if content: - content = f"@everyone\n{content}" + content = f"<@&{Roles.moderators}>\n{content}" else: - content = "@everyone" + content = f"<@&{Roles.moderators}>" # Truncate content to 2000 characters and append an ellipsis. if content and len(content) > 2000: @@ -127,8 +128,7 @@ class ModLog(Cog, name="ModLog"): log_message = await channel.send( content=content, embed=embed, - files=files, - allowed_mentions=discord.AllowedMentions(everyone=True) + files=files ) if additional_embeds: @@ -641,9 +641,10 @@ class ModLog(Cog, name="ModLog"): channel = msg_before.channel channel_name = f"{channel.category}/#{channel.name}" if channel.category else f"#{channel.name}" + cleaned_contents = (escape_markdown(msg.clean_content).split() for msg in (msg_before, msg_after)) # Getting the difference per words and group them by type - add, remove, same # Note that this is intended grouping without sorting - diff = difflib.ndiff(msg_before.clean_content.split(), msg_after.clean_content.split()) + diff = difflib.ndiff(*cleaned_contents) diff_groups = tuple( (diff_type, tuple(s[2:] for s in diff_words)) for diff_type, diff_words in itertools.groupby(diff, key=lambda s: s[0]) diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py new file mode 100644 index 000000000..1ad5005de --- /dev/null +++ b/bot/exts/moderation/modpings.py @@ -0,0 +1,138 @@ +import datetime +import logging + +from async_rediscache import RedisCache +from dateutil.parser import isoparse +from discord import Embed, Member +from discord.ext.commands import Cog, Context, group, has_any_role + +from bot.bot import Bot +from bot.constants import Colours, Emojis, Guild, Icons, MODERATION_ROLES, Roles +from bot.converters import Expiry +from bot.utils.scheduling import Scheduler + +log = logging.getLogger(__name__) + + +class ModPings(Cog): + """Commands for a moderator to turn moderator pings on and off.""" + + # RedisCache[discord.Member.id, 'Naïve ISO 8601 string'] + # The cache's keys are mods who have pings off. + # The cache's values are the times when the role should be re-applied to them, stored in ISO format. + pings_off_mods = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self._role_scheduler = Scheduler(self.__class__.__name__) + + self.guild = None + self.moderators_role = None + + self.reschedule_task = self.bot.loop.create_task(self.reschedule_roles(), name="mod-pings-reschedule") + + async def reschedule_roles(self) -> None: + """Reschedule moderators role re-apply times.""" + await self.bot.wait_until_guild_available() + self.guild = self.bot.get_guild(Guild.id) + self.moderators_role = self.guild.get_role(Roles.moderators) + + mod_team = self.guild.get_role(Roles.mod_team) + pings_on = self.moderators_role.members + pings_off = await self.pings_off_mods.to_dict() + + log.trace("Applying the moderators role to the mod team where necessary.") + for mod in mod_team.members: + if mod in pings_on: # Make sure that on-duty mods aren't in the cache. + if mod in pings_off: + await self.pings_off_mods.delete(mod.id) + continue + + # Keep the role off only for those in the cache. + if mod.id not in pings_off: + await self.reapply_role(mod) + else: + expiry = isoparse(pings_off[mod.id]).replace(tzinfo=None) + self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod)) + + async def reapply_role(self, mod: Member) -> None: + """Reapply the moderator's role to the given moderator.""" + log.trace(f"Re-applying role to mod with ID {mod.id}.") + await mod.add_roles(self.moderators_role, reason="Pings off period expired.") + + @group(name='modpings', aliases=('modping',), invoke_without_command=True) + @has_any_role(*MODERATION_ROLES) + async def modpings_group(self, ctx: Context) -> None: + """Allow the removal and re-addition of the pingable moderators role.""" + await ctx.send_help(ctx.command) + + @modpings_group.command(name='off') + @has_any_role(*MODERATION_ROLES) + async def off_command(self, ctx: Context, duration: Expiry) -> None: + """ + Temporarily removes the pingable moderators role for a set amount of time. + + A unit of time should be appended to the duration. + Units (∗case-sensitive): + \u2003`y` - years + \u2003`m` - months∗ + \u2003`w` - weeks + \u2003`d` - days + \u2003`h` - hours + \u2003`M` - minutes∗ + \u2003`s` - seconds + + Alternatively, an ISO 8601 timestamp can be provided for the duration. + + The duration cannot be longer than 30 days. + """ + duration: datetime.datetime + delta = duration - datetime.datetime.utcnow() + if delta > datetime.timedelta(days=30): + await ctx.send(":x: Cannot remove the role for longer than 30 days.") + return + + mod = ctx.author + + until_date = duration.replace(microsecond=0).isoformat() # Looks noisy with microseconds. + await mod.remove_roles(self.moderators_role, reason=f"Turned pings off until {until_date}.") + + await self.pings_off_mods.set(mod.id, duration.isoformat()) + + # Allow rescheduling the task without cancelling it separately via the `on` command. + if mod.id in self._role_scheduler: + self._role_scheduler.cancel(mod.id) + self._role_scheduler.schedule_at(duration, mod.id, self.reapply_role(mod)) + + embed = Embed(timestamp=duration, colour=Colours.bright_green) + embed.set_footer(text="Moderators role has been removed until", icon_url=Icons.green_checkmark) + await ctx.send(embed=embed) + + @modpings_group.command(name='on') + @has_any_role(*MODERATION_ROLES) + async def on_command(self, ctx: Context) -> None: + """Re-apply the pingable moderators role.""" + mod = ctx.author + if mod in self.moderators_role.members: + await ctx.send(":question: You already have the role.") + return + + await mod.add_roles(self.moderators_role, reason="Pings off period canceled.") + + await self.pings_off_mods.delete(mod.id) + + # We assume the task exists. Lack of it may indicate a bug. + self._role_scheduler.cancel(mod.id) + + await ctx.send(f"{Emojis.check_mark} Moderators role has been re-applied.") + + def cog_unload(self) -> None: + """Cancel role tasks when the cog unloads.""" + log.trace("Cog unload: canceling role tasks.") + self.reschedule_task.cancel() + self._role_scheduler.cancel_all() + + +def setup(bot: Bot) -> None: + """Load the ModPings cog.""" + bot.add_cog(ModPings(bot)) diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py new file mode 100644 index 000000000..fd856a7f4 --- /dev/null +++ b/bot/exts/moderation/stream.py @@ -0,0 +1,245 @@ +import logging +from datetime import timedelta, timezone +from operator import itemgetter + +import arrow +import discord +from arrow import Arrow +from async_rediscache import RedisCache +from discord.ext import commands + +from bot.bot import Bot +from bot.constants import Colours, Emojis, Guild, MODERATION_ROLES, Roles, STAFF_ROLES, VideoPermission +from bot.converters import Expiry +from bot.pagination import LinePaginator +from bot.utils.scheduling import Scheduler +from bot.utils.time import format_infraction_with_duration + +log = logging.getLogger(__name__) + + +class Stream(commands.Cog): + """Grant and revoke streaming permissions from members.""" + + # Stores tasks to remove streaming permission + # RedisCache[discord.Member.id, UtcPosixTimestamp] + task_cache = RedisCache() + + def __init__(self, bot: Bot): + self.bot = bot + self.scheduler = Scheduler(self.__class__.__name__) + self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis()) + + def cog_unload(self) -> None: + """Cancel all scheduled tasks.""" + self.reload_task.cancel() + self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all()) + + async def _revoke_streaming_permission(self, member: discord.Member) -> None: + """Remove the streaming permission from the given Member.""" + await self.task_cache.delete(member.id) + await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked") + + async def _reload_tasks_from_redis(self) -> None: + """Reload outstanding tasks from redis on startup, delete the task if the member has since left the server.""" + await self.bot.wait_until_guild_available() + items = await self.task_cache.items() + for key, value in items: + member = self.bot.get_guild(Guild.id).get_member(key) + + if not member: + # Member isn't found in the cache + try: + member = await self.bot.get_guild(Guild.id).fetch_member(key) + except discord.errors.NotFound: + log.debug( + f"Member {key} left the guild before we could schedule " + "the revoking of their streaming permissions." + ) + await self.task_cache.delete(key) + continue + except discord.HTTPException: + log.exception(f"Exception while trying to retrieve member {key} from Discord.") + continue + + revoke_time = Arrow.utcfromtimestamp(value) + log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}") + self.scheduler.schedule_at( + revoke_time, + key, + self._revoke_streaming_permission(member) + ) + + async def _suspend_stream(self, ctx: commands.Context, member: discord.Member) -> None: + """Suspend a member's stream.""" + await self.bot.wait_until_guild_available() + voice_state = member.voice + + if not voice_state: + return + + # If the user is streaming. + if voice_state.self_stream: + # End user's stream by moving them to AFK voice channel and back. + original_vc = voice_state.channel + await member.move_to(ctx.guild.afk_channel) + await member.move_to(original_vc) + + # Notify. + await ctx.send(f"{member.mention}'s stream has been suspended!") + log.debug(f"Successfully suspended stream from {member} ({member.id}).") + return + + log.debug(f"No stream found to suspend from {member} ({member.id}).") + + @commands.command(aliases=("streaming",)) + @commands.has_any_role(*MODERATION_ROLES) + async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None: + """ + Temporarily grant streaming permissions to a member for a given duration. + + A unit of time should be appended to the duration. + Units (∗case-sensitive): + \u2003`y` - years + \u2003`m` - months∗ + \u2003`w` - weeks + \u2003`d` - days + \u2003`h` - hours + \u2003`M` - minutes∗ + \u2003`s` - seconds + + Alternatively, an ISO 8601 timestamp can be provided for the duration. + """ + log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).") + + if duration is None: + # Use default duration and convert back to datetime as Embed.timestamp doesn't support Arrow + duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration) + duration = duration.datetime + elif duration.tzinfo is None: + # Make duration tz-aware. + # ISODateTime could already include tzinfo, this check is so it isn't overwritten. + duration.replace(tzinfo=timezone.utc) + + # Check if the member already has streaming permission + already_allowed = any(Roles.video == role.id for role in member.roles) + if already_allowed: + await ctx.send(f"{Emojis.cross_mark} {member.mention} can already stream.") + log.debug(f"{member} ({member.id}) already has permission to stream.") + return + + # Schedule task to remove streaming permission from Member and add it to task cache + self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member)) + await self.task_cache.set(member.id, duration.timestamp()) + + await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted") + + # Use embed as embed timestamps do timezone conversions. + embed = discord.Embed( + description=f"{Emojis.check_mark} {member.mention} can now stream.", + colour=Colours.soft_green + ) + embed.set_footer(text=f"Streaming permission has been given to {member} until") + embed.timestamp = duration + + # Mention in content as mentions in embeds don't ping + await ctx.send(content=member.mention, embed=embed) + + # Convert here for nicer logging + revoke_time = format_infraction_with_duration(str(duration)) + log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.") + + @commands.command(aliases=("pstream",)) + @commands.has_any_role(*MODERATION_ROLES) + async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None: + """Permanently grants the given member the permission to stream.""" + log.trace(f"Attempting to give permanent streaming permission to {member} ({member.id}).") + + # Check if the member already has streaming permission + if any(Roles.video == role.id for role in member.roles): + if member.id in self.scheduler: + # Member has temp permission, so cancel the task to revoke later and delete from cache + self.scheduler.cancel(member.id) + await self.task_cache.delete(member.id) + + await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.") + log.debug( + f"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent." + ) + return + + await ctx.send(f"{Emojis.cross_mark} This member can already stream.") + log.debug(f"{member} ({member.id}) already had permanent streaming permission.") + return + + await member.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted") + await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.") + log.debug(f"Successfully gave {member} ({member.id}) permanent streaming permission.") + + @commands.command(aliases=("unstream", "rstream")) + @commands.has_any_role(*MODERATION_ROLES) + async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None: + """Revoke the permission to stream from the given member.""" + log.trace(f"Attempting to remove streaming permission from {member} ({member.id}).") + + # Check if the member already has streaming permission + if any(Roles.video == role.id for role in member.roles): + if member.id in self.scheduler: + # Member has temp permission, so cancel the task to revoke later and delete from cache + self.scheduler.cancel(member.id) + await self.task_cache.delete(member.id) + await self._revoke_streaming_permission(member) + + await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.") + log.debug(f"Successfully revoked streaming permission from {member} ({member.id}).") + + else: + await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!") + log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!") + + await self._suspend_stream(ctx, member) + + @commands.command(aliases=('lstream',)) + @commands.has_any_role(*MODERATION_ROLES) + async def liststream(self, ctx: commands.Context) -> None: + """Lists all non-staff users who have permission to stream.""" + non_staff_members_with_stream = [ + member + for member in ctx.guild.get_role(Roles.video).members + if not any(role.id in STAFF_ROLES for role in member.roles) + ] + + # List of tuples (UtcPosixTimestamp, str) + # So that the list can be sorted on the UtcPosixTimestamp before the message is passed to the paginator. + streamer_info = [] + for member in non_staff_members_with_stream: + if revoke_time := await self.task_cache.get(member.id): + # Member only has temporary streaming perms + revoke_delta = Arrow.utcfromtimestamp(revoke_time).humanize() + message = f"{member.mention} will have stream permissions revoked {revoke_delta}." + else: + message = f"{member.mention} has permanent streaming permissions." + + # If revoke_time is None use max timestamp to force sort to put them at the end + streamer_info.append( + (revoke_time or Arrow.max.timestamp(), message) + ) + + if streamer_info: + # Sort based on duration left of streaming perms + streamer_info.sort(key=itemgetter(0)) + + # Only output the message in the pagination + lines = [line[1] for line in streamer_info] + embed = discord.Embed( + title=f"Members with streaming permission (`{len(lines)}` total)", + colour=Colours.soft_green + ) + await LinePaginator.paginate(lines, ctx, embed, max_size=400, empty=False) + else: + await ctx.send("No members with stream permissions found.") + + +def setup(bot: Bot) -> None: + """Loads the Stream cog.""" + bot.add_cog(Stream(bot)) diff --git a/bot/exts/recruitment/talentpool/_cog.py b/bot/exts/recruitment/talentpool/_cog.py index 7824d22d7..a5b6de00f 100644 --- a/bot/exts/recruitment/talentpool/_cog.py +++ b/bot/exts/recruitment/talentpool/_cog.py @@ -1,14 +1,16 @@ import logging import textwrap from collections import ChainMap, defaultdict +from io import StringIO from typing import Union -from discord import Color, Embed, Member, User +import discord +from discord import Color, Embed, Member, PartialMessage, RawReactionActionEvent, User from discord.ext.commands import Cog, Context, group, has_any_role from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Guild, MODERATION_ROLES, STAFF_ROLES +from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, STAFF_ROLES from bot.converters import FetchedMember from bot.exts.recruitment.talentpool._review import Reviewer from bot.pagination import LinePaginator @@ -138,14 +140,39 @@ class TalentPool(Cog, name="Talentpool"): """ await ctx.invoke(self.list_command, oldest_first=True, update_cache=update_cache) - @nomination_group.command(name='add', aliases=('w', 'a', 'watch'), root_aliases=("nominate",)) + @nomination_group.command(name='forcewatch', aliases=('fw', 'forceadd', 'fa'), root_aliases=("forcenominate",)) + @has_any_role(*MODERATION_ROLES) + async def force_watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: + """ + Adds the given `user` to the talent pool, from any channel. + + A `reason` for adding the user to the talent pool is optional. + """ + await self._watch_user(ctx, user, reason) + + @nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",)) @has_any_role(*STAFF_ROLES) async def add_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None: """ - Adds user nomination (or nomination entry) to Talent Pool. + Adds the given `user` to the talent pool. - If user already have nomination, then entry associated with existing nomination will be created. + A `reason` for adding the user to the talent pool is optional. + This command can only be used in the `#nominations` channel. """ + if ctx.channel.id != Channels.nominations: + if any(role.id in MODERATION_ROLES for role in ctx.author.roles): + await ctx.send( + f":x: Nominations should be run in the <#{Channels.nominations}> channel. " + "Use `!tp forcewatch` to override this check." + ) + else: + await ctx.send(f":x: Nominations must be run in the <#{Channels.nominations}> channel") + return + + await self._watch_user(ctx, user, reason) + + async def _watch_user(self, ctx: Context, user: FetchedMember, reason: str) -> None: + """Adds the given user to the talent pool.""" if user.bot: await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.") return @@ -330,7 +357,18 @@ class TalentPool(Cog, name="Talentpool"): """Mark a user's nomination as reviewed and cancel the review task.""" if not await self.reviewer.mark_reviewed(ctx, user_id): return - await ctx.send(f"✅ The user with ID `{user_id}` was marked as reviewed.") + await ctx.send(f"{Emojis.check_mark} The user with ID `{user_id}` was marked as reviewed.") + + @nomination_group.command(aliases=('gr',)) + @has_any_role(*MODERATION_ROLES) + async def get_review(self, ctx: Context, user_id: int) -> None: + """Get the user's review as a markdown file.""" + review = (await self.reviewer.make_review(user_id))[0] + if review: + file = discord.File(StringIO(review), f"{user_id}_review.md") + await ctx.send(file=file) + else: + await ctx.send(f"There doesn't appear to be an active nomination for {user_id}") @nomination_group.command(aliases=('review',)) @has_any_role(*MODERATION_ROLES) @@ -340,13 +378,33 @@ class TalentPool(Cog, name="Talentpool"): return await self.reviewer.post_review(user_id, update_database=False) - await ctx.message.add_reaction("✅") + await ctx.message.add_reaction(Emojis.check_mark) @Cog.listener() async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None: """Remove `user` from the talent pool after they are banned.""" await self.unwatch(user.id, "User was banned.") + @Cog.listener() + async def on_raw_reaction_add(self, payload: RawReactionActionEvent) -> None: + """ + Watch for reactions in the #nomination-voting channel to automate it. + + Adding a ticket emoji will unpin the message. + Adding an incident reaction will archive the message. + """ + if payload.channel_id != Channels.nomination_voting: + return + + message: PartialMessage = self.bot.get_channel(payload.channel_id).get_partial_message(payload.message_id) + emoji = str(payload.emoji) + + if emoji == "\N{TICKET}": + await message.unpin(reason="Admin task created.") + elif emoji in {Emojis.incident_actioned, Emojis.incident_unactioned}: + log.info(f"Archiving nomination {message.id}") + await self.reviewer.archive_vote(message, emoji == Emojis.incident_actioned) + async def unwatch(self, user_id: int, reason: str) -> bool: """End the active nomination of a user with the given reason and return True on success.""" active_nomination = await self.bot.api_client.get( diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index bc2878451..585640699 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -1,6 +1,8 @@ import asyncio +import contextlib import logging import random +import re import textwrap import typing from collections import Counter @@ -9,12 +11,13 @@ from typing import List, Optional, Union from dateutil.parser import isoparse from dateutil.relativedelta import relativedelta -from discord import Emoji, Member, Message, TextChannel +from discord import Embed, Emoji, Member, Message, NoMoreItems, PartialMessage, TextChannel from discord.ext.commands import Context from bot.api import ResponseCodeError from bot.bot import Bot -from bot.constants import Channels, Guild, Roles +from bot.constants import Channels, Colours, Emojis, Guild, Roles +from bot.utils.messages import count_unique_users_reaction, pin_no_system_message from bot.utils.scheduling import Scheduler from bot.utils.time import get_time_delta, humanize_delta, time_since @@ -29,6 +32,11 @@ MAX_DAYS_IN_POOL = 30 # Maximum amount of characters allowed in a message MAX_MESSAGE_SIZE = 2000 +# Regex finding the user ID of a user mention +MENTION_RE = re.compile(r"<@!?(\d+?)>") +# Regex matching role pings +ROLE_MENTION_RE = re.compile(r"<@&\d+>") + class Reviewer: """Schedules, formats, and publishes reviews of helper nominees.""" @@ -66,31 +74,54 @@ class Reviewer: self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) async def post_review(self, user_id: int, update_database: bool) -> None: - """Format a generic review of a user and post it to the nomination voting channel.""" + """Format the review of a user and post it to the nomination voting channel.""" + review, seen_emoji = await self.make_review(user_id) + if not review: + return + + guild = self.bot.get_guild(Guild.id) + channel = guild.get_channel(Channels.nomination_voting) + log.trace(f"Posting the review of {user_id}") + messages = await self._bulk_send(channel, review) + + await pin_no_system_message(messages[0]) + last_message = messages[-1] + if seen_emoji: + for reaction in (seen_emoji, "\N{THUMBS UP SIGN}", "\N{THUMBS DOWN SIGN}"): + await last_message.add_reaction(reaction) + + if update_database: + nomination = self._pool.cache[user_id] + await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True}) + + async def make_review(self, user_id: int) -> typing.Tuple[str, Optional[Emoji]]: + """Format a generic review of a user and return it with the seen emoji.""" + log.trace(f"Formatting the review of {user_id}") + + # Since `watched_users` is a defaultdict, we should take care + # not to accidentally insert the IDs of users that have no + # active nominated by using the `watched_users.get(user_id)` + # instead of `watched_users[user_id]`. nomination = self._pool.cache[user_id] if not nomination: log.trace(f"There doesn't appear to be an active nomination for {user_id}") - return + return "", None guild = self.bot.get_guild(Guild.id) - channel = guild.get_channel(Channels.nomination_voting) member = guild.get_member(user_id) - if update_database: - await self.bot.api_client.patch(f"bot/nominations/{nomination['id']}", json={"reviewed": True}) - if not member: - await channel.send( - f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server 😔" - ) - return + return ( + f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server :pensive:" + ), None - opening = f"<@&{Roles.moderators}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" + opening = f"<@&{Roles.mod_team}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!" current_nominations = "\n\n".join( - f"**<@{entry['actor']}>:** {entry['reason'] or '*no reason given*'}" for entry in nomination['entries'] + f"**<@{entry['actor']}>:** {entry['reason'] or '*no reason given*'}" + for entry in nomination['entries'][::-1] ) current_nominations = f"**Nominated by:**\n{current_nominations}" @@ -100,14 +131,84 @@ class Reviewer: vote_request = ( "*Refer to their nomination and infraction histories for further details*.\n" f"*Please react {seen_emoji} if you've seen this post." - " Then react 👍 for approval, or 👎 for disapproval*." + " Then react :+1: for approval, or :-1: for disapproval*." ) - review = "\n\n".join(part for part in (opening, current_nominations, review_body, vote_request)) + review = "\n\n".join((opening, current_nominations, review_body, vote_request)) + return review, seen_emoji + + async def archive_vote(self, message: PartialMessage, passed: bool) -> None: + """Archive this vote to #nomination-archive.""" + message = await message.fetch() + + # We consider the first message in the nomination to contain the two role pings + messages = [message] + if not len(ROLE_MENTION_RE.findall(message.content)) >= 2: + with contextlib.suppress(NoMoreItems): + async for new_message in message.channel.history(before=message.created_at): + messages.append(new_message) + + if len(ROLE_MENTION_RE.findall(new_message.content)) >= 2: + break + + log.debug(f"Found {len(messages)} messages: {', '.join(str(m.id) for m in messages)}") + + parts = [] + for message_ in messages[::-1]: + parts.append(message_.content) + parts.append("\n" if message_.content.endswith(".") else " ") + content = "".join(parts) + + # We assume that the first user mentioned is the user that we are voting on + user_id = int(MENTION_RE.search(content).group(1)) + + # Get reaction counts + seen = await count_unique_users_reaction( + messages[0], + lambda r: "ducky" in str(r) or str(r) == "\N{EYES}", + count_bots=False + ) + upvotes = await count_unique_users_reaction( + messages[0], + lambda r: str(r) == "\N{THUMBS UP SIGN}", + count_bots=False + ) + downvotes = await count_unique_users_reaction( + messages[0], + lambda r: str(r) == "\N{THUMBS DOWN SIGN}", + count_bots=False + ) + + # Remove the first and last paragraphs + stripped_content = content.split("\n\n", maxsplit=1)[1].rsplit("\n\n", maxsplit=1)[0] + + result = f"**Passed** {Emojis.incident_actioned}" if passed else f"**Rejected** {Emojis.incident_unactioned}" + colour = Colours.soft_green if passed else Colours.soft_red + timestamp = datetime.utcnow().strftime("%Y/%m/%d") + + embed_content = ( + f"{result} on {timestamp}\n" + f"With {seen} {Emojis.ducky_dave} {upvotes} :+1: {downvotes} :-1:\n\n" + f"{stripped_content}" + ) + + if user := await self.bot.fetch_user(user_id): + embed_title = f"Vote for {user} (`{user.id}`)" + else: + embed_title = f"Vote for `{user_id}`" + + channel = self.bot.get_channel(Channels.nomination_archive) + for number, part in enumerate( + textwrap.wrap(embed_content, width=MAX_MESSAGE_SIZE, replace_whitespace=False, placeholder="") + ): + await channel.send(embed=Embed( + title=embed_title if number == 0 else None, + description="[...] " + part if number != 0 else part, + colour=colour + )) - message = (await self._bulk_send(channel, review))[-1] - for reaction in (seen_emoji, "👍", "👎"): - await message.add_reaction(reaction) + for message_ in messages: + await message_.delete() async def _construct_review_body(self, member: Member) -> str: """Formats the body of the nomination, with details of activity, infractions, and previous nominations.""" @@ -256,10 +357,10 @@ class Reviewer: @staticmethod def _random_ducky(guild: Guild) -> Union[Emoji, str]: - """Picks a random ducky emoji to be used to mark the vote as seen. If no duckies found returns 👀.""" + """Picks a random ducky emoji to be used to mark the vote as seen. If no duckies found returns :eyes:.""" duckies = [emoji for emoji in guild.emojis if emoji.name.startswith("ducky")] if not duckies: - return "👀" + return ":eyes:" return random.choice(duckies) @staticmethod @@ -289,12 +390,12 @@ class Reviewer: await self._pool.refresh_cache() if user_id not in self._pool.cache: log.trace(f"Can't find a nominated user with id {user_id}") - await ctx.send(f"❌ Can't find a currently nominated user with id `{user_id}`") + await ctx.send(f":x: Can't find a currently nominated user with id `{user_id}`") return False nomination = self._pool.cache[user_id] if nomination["reviewed"]: - await ctx.send("❌ This nomination was already reviewed, but here's a cookie 🍪") + await ctx.send(":x: This nomination was already reviewed, but here's a cookie :cookie:") return False await self.bot.api_client.patch(f"bot/nominations/{nomination['id']}", json={"reviewed": True}) diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py index 8acaf9131..cb662e852 100644 --- a/bot/exts/utils/clean.py +++ b/bot/exts/utils/clean.py @@ -3,7 +3,7 @@ import random import re from typing import Iterable, Optional -from discord import Colour, Embed, Message, TextChannel, User +from discord import Colour, Embed, Message, TextChannel, User, errors from discord.ext import commands from discord.ext.commands import Cog, Context, group, has_any_role @@ -115,7 +115,11 @@ class Clean(Cog): # Delete the invocation first self.mod_log.ignore(Event.message_delete, ctx.message.id) - await ctx.message.delete() + try: + await ctx.message.delete() + except errors.NotFound: + # Invocation message has already been deleted + log.info("Tried to delete invocation message, but it was already deleted.") messages = [] message_ids = [] diff --git a/bot/exts/utils/extensions.py b/bot/exts/utils/extensions.py index 418db0150..8a1ed98f4 100644 --- a/bot/exts/utils/extensions.py +++ b/bot/exts/utils/extensions.py @@ -109,7 +109,7 @@ class Extensions(commands.Cog): blacklisted = "\n".join(UNLOAD_BLACKLIST & set(extensions)) if blacklisted: - msg = f":x: The following extension(s) may not be unloaded:```{blacklisted}```" + msg = f":x: The following extension(s) may not be unloaded:```\n{blacklisted}```" else: if "*" in extensions or "**" in extensions: extensions = set(self.bot.extensions.keys()) - UNLOAD_BLACKLIST @@ -212,7 +212,7 @@ class Extensions(commands.Cog): if failures: failures = "\n".join(f"{ext}\n {err}" for ext, err in failures.items()) - msg += f"\nFailures:```{failures}```" + msg += f"\nFailures:```\n{failures}```" log.debug(f"Batch {verb}ed extensions.") @@ -239,7 +239,7 @@ class Extensions(commands.Cog): log.exception(f"Extension '{ext}' failed to {verb}.") error_msg = f"{e.__class__.__name__}: {e}" - msg = f":x: Failed to {verb} extension `{ext}`:\n```{error_msg}```" + msg = f":x: Failed to {verb} extension `{ext}`:\n```\n{error_msg}```" else: msg = f":ok_hand: Extension successfully {verb}ed: `{ext}`." log.debug(msg[10:]) diff --git a/bot/exts/utils/ping.py b/bot/exts/utils/ping.py index 572fc934b..750ff46d2 100644 --- a/bot/exts/utils/ping.py +++ b/bot/exts/utils/ping.py @@ -1,4 +1,5 @@ import socket +import urllib.parse from datetime import datetime import aioping @@ -34,11 +35,19 @@ class Latency(commands.Cog): # datetime.datetime objects do not have the "milliseconds" attribute. # It must be converted to seconds before converting to milliseconds. bot_ping = (datetime.utcnow() - ctx.message.created_at).total_seconds() * 1000 - bot_ping = f"{bot_ping:.{ROUND_LATENCY}f} ms" + if bot_ping <= 0: + bot_ping = "Your clock is out of sync, could not calculate ping." + else: + bot_ping = f"{bot_ping:.{ROUND_LATENCY}f} ms" try: - delay = await aioping.ping(URLs.site, family=socket.AddressFamily.AF_INET) * 1000 - site_ping = f"{delay:.{ROUND_LATENCY}f} ms" + url = urllib.parse.urlparse(URLs.site_schema + URLs.site).hostname + try: + delay = await aioping.ping(url, family=socket.AddressFamily.AF_INET) * 1000 + site_ping = f"{delay:.{ROUND_LATENCY}f} ms" + except OSError: + # Some machines do not have permission to run ping + site_ping = "Permission denied, could not ping." except TimeoutError: site_ping = f"{Emojis.cross_mark} Connection timed out." diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py index 3113a1149..6c21920a1 100644 --- a/bot/exts/utils/reminders.py +++ b/bot/exts/utils/reminders.py @@ -90,15 +90,18 @@ class Reminders(Cog): delivery_dt: t.Optional[datetime], ) -> None: """Send an embed confirming the reminder change was made successfully.""" - embed = discord.Embed() - embed.colour = discord.Colour.green() - embed.title = random.choice(POSITIVE_REPLIES) - embed.description = on_success + embed = discord.Embed( + description=on_success, + colour=discord.Colour.green(), + title=random.choice(POSITIVE_REPLIES) + ) footer_str = f"ID: {reminder_id}" + if delivery_dt: # Reminder deletion will have a `None` `delivery_dt` - footer_str = f"{footer_str}, Due: {delivery_dt.strftime('%Y-%m-%dT%H:%M:%S')}" + footer_str += ', Due' + embed.timestamp = delivery_dt embed.set_footer(text=footer_str) diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py index 9f480c067..b1f1ba6a8 100644 --- a/bot/exts/utils/snekbox.py +++ b/bot/exts/utils/snekbox.py @@ -13,7 +13,7 @@ from discord.ext.commands import Cog, Context, command, guild_only from bot.bot import Bot from bot.constants import Categories, Channels, Roles, URLs -from bot.decorators import in_whitelist +from bot.decorators import redirect_output from bot.utils import send_to_paste_service from bot.utils.messages import wait_for_deletion @@ -38,9 +38,9 @@ RAW_CODE_REGEX = re.compile( MAX_PASTE_LEN = 10000 -# `!eval` command whitelists -EVAL_CHANNELS = (Channels.bot_commands, Channels.esoteric) -EVAL_CATEGORIES = (Categories.help_available, Categories.help_in_use, Categories.voice) +# `!eval` command whitelists and blacklists. +NO_EVAL_CHANNELS = (Channels.python_general,) +NO_EVAL_CATEGORIES = () EVAL_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners) SIGKILL = 9 @@ -280,7 +280,13 @@ class Snekbox(Cog): @command(name="eval", aliases=("e",)) @guild_only() - @in_whitelist(channels=EVAL_CHANNELS, categories=EVAL_CATEGORIES, roles=EVAL_ROLES) + @redirect_output( + destination_channel=Channels.bot_commands, + bypass_roles=EVAL_ROLES, + categories=NO_EVAL_CATEGORIES, + channels=NO_EVAL_CHANNELS, + ping_user=False + ) async def eval_command(self, ctx: Context, *, code: str = None) -> None: """ Run Python code and get the results. diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index a5d6f69b9..4c39a7c2a 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -9,7 +9,7 @@ from discord.ext.commands import BadArgument, Cog, Context, clean_content, comma from discord.utils import snowflake_time from bot.bot import Bot -from bot.constants import Channels, MODERATION_ROLES, STAFF_ROLES +from bot.constants import Channels, MODERATION_ROLES, Roles, STAFF_ROLES from bot.converters import Snowflake from bot.decorators import in_whitelist from bot.pagination import LinePaginator @@ -109,7 +109,7 @@ class Utils(Cog): # handle if it's an index int if isinstance(search_value, int): upper_bound = len(zen_lines) - 1 - lower_bound = -1 * upper_bound + lower_bound = -1 * len(zen_lines) if not (lower_bound <= search_value <= upper_bound): raise BadArgument(f"Please provide an index between {lower_bound} and {upper_bound}.") @@ -162,20 +162,30 @@ class Utils(Cog): if len(snowflakes) > 1 and await has_no_roles_check(ctx, *STAFF_ROLES): raise BadArgument("Cannot process more than one snowflake in one invocation.") + if not snowflakes: + raise BadArgument("At least one snowflake must be provided.") + + embed = Embed(colour=Colour.blue()) + embed.set_author( + name=f"Snowflake{'s'[:len(snowflakes)^1]}", # Deals with pluralisation + icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" + ) + + lines = [] for snowflake in snowflakes: created_at = snowflake_time(snowflake) - embed = Embed( - description=f"**Created at {created_at}** ({time_since(created_at, max_units=3)}).", - colour=Colour.blue() - ) - embed.set_author( - name=f"Snowflake: {snowflake}", - icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true" - ) - await ctx.send(embed=embed) + lines.append(f"**{snowflake}**\nCreated at {created_at} ({time_since(created_at, max_units=3)}).") + + await LinePaginator.paginate( + lines, + ctx=ctx, + embed=embed, + max_lines=5, + max_size=1000 + ) @command(aliases=("poll",)) - @has_any_role(*MODERATION_ROLES) + @has_any_role(*MODERATION_ROLES, Roles.project_leads, Roles.domain_leads) async def vote(self, ctx: Context, title: clean_content(fix_channel_mentions=True), *options: str) -> None: """ Build a quick voting poll with matching reactions with the provided options. diff --git a/bot/log.py b/bot/log.py index e92233a33..4e20c005e 100644 --- a/bot/log.py +++ b/bot/log.py @@ -20,7 +20,6 @@ def setup() -> None: logging.addLevelName(TRACE_LEVEL, "TRACE") Logger.trace = _monkeypatch_trace - log_level = TRACE_LEVEL if constants.DEBUG_MODE else logging.INFO format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s" log_format = logging.Formatter(format_string) @@ -30,7 +29,6 @@ def setup() -> None: file_handler.setFormatter(log_format) root_log = logging.getLogger() - root_log.setLevel(log_level) root_log.addHandler(file_handler) if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: @@ -44,11 +42,9 @@ def setup() -> None: if "COLOREDLOGS_LOG_FORMAT" not in os.environ: coloredlogs.DEFAULT_LOG_FORMAT = format_string - if "COLOREDLOGS_LOG_LEVEL" not in os.environ: - coloredlogs.DEFAULT_LOG_LEVEL = log_level - - coloredlogs.install(logger=root_log, stream=sys.stdout) + coloredlogs.install(level=logging.TRACE, logger=root_log, stream=sys.stdout) + root_log.setLevel(logging.DEBUG if constants.DEBUG_MODE else logging.INFO) logging.getLogger("discord").setLevel(logging.WARNING) logging.getLogger("websockets").setLevel(logging.WARNING) logging.getLogger("chardet").setLevel(logging.WARNING) @@ -57,6 +53,8 @@ def setup() -> None: # Set back to the default of INFO even if asyncio's debug mode is enabled. logging.getLogger("asyncio").setLevel(logging.INFO) + _set_trace_loggers() + def setup_sentry() -> None: """Set up the Sentry logging integrations.""" @@ -86,3 +84,30 @@ def _monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: """ if self.isEnabledFor(TRACE_LEVEL): self._log(TRACE_LEVEL, msg, args, **kwargs) + + +def _set_trace_loggers() -> None: + """ + Set loggers to the trace level according to the value from the BOT_TRACE_LOGGERS env var. + + When the env var is a list of logger names delimited by a comma, + each of the listed loggers will be set to the trace level. + + If this list is prefixed with a "!", all of the loggers except the listed ones will be set to the trace level. + + Otherwise if the env var begins with a "*", + the root logger is set to the trace level and other contents are ignored. + """ + level_filter = constants.Bot.trace_loggers + if level_filter: + if level_filter.startswith("*"): + logging.getLogger().setLevel(logging.TRACE) + + elif level_filter.startswith("!"): + logging.getLogger().setLevel(logging.TRACE) + for logger_name in level_filter.strip("!,").split(","): + logging.getLogger(logger_name).setLevel(logging.DEBUG) + + else: + for logger_name in level_filter.strip(",").split(","): + logging.getLogger(logger_name).setLevel(logging.TRACE) diff --git a/bot/pagination.py b/bot/pagination.py index 3b16cc9ff..c5c84afd9 100644 --- a/bot/pagination.py +++ b/bot/pagination.py @@ -2,14 +2,14 @@ import asyncio import logging import typing as t from contextlib import suppress +from functools import partial import discord -from discord import Member from discord.abc import User from discord.ext.commands import Context, Paginator from bot import constants -from bot.constants import MODERATION_ROLES +from bot.utils import messages FIRST_EMOJI = "\u23EE" # [:track_previous:] LEFT_EMOJI = "\u2B05" # [:arrow_left:] @@ -220,29 +220,6 @@ class LinePaginator(Paginator): >>> embed.set_author(name="Some Operation", url=url, icon_url=icon) >>> await LinePaginator.paginate([line for line in lines], ctx, embed) """ - def event_check(reaction_: discord.Reaction, user_: discord.Member) -> bool: - """Make sure that this reaction is what we want to operate on.""" - no_restrictions = ( - # The reaction was by a whitelisted user - user_.id == restrict_to_user.id - # The reaction was by a moderator - or isinstance(user_, Member) and any(role.id in MODERATION_ROLES for role in user_.roles) - ) - - return ( - # Conditions for a successful pagination: - all(( - # Reaction is on this message - reaction_.message.id == message.id, - # Reaction is one of the pagination emotes - str(reaction_.emoji) in PAGINATION_EMOJI, - # Reaction was not made by the Bot - user_.id != ctx.bot.user.id, - # There were no restrictions - no_restrictions - )) - ) - paginator = cls(prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines, scale_to_size=scale_to_size) current_page = 0 @@ -303,9 +280,16 @@ class LinePaginator(Paginator): log.trace(f"Adding reaction: {repr(emoji)}") await message.add_reaction(emoji) + check = partial( + messages.reaction_check, + message_id=message.id, + allowed_emoji=PAGINATION_EMOJI, + allowed_users=(restrict_to_user.id,), + ) + while True: try: - reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=event_check) + reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=check) log.trace(f"Got reaction: {reaction}") except asyncio.TimeoutError: log.debug("Timed out waiting for a reaction") diff --git a/bot/resources/stars.json b/bot/resources/stars.json index 5ecad0213..3eb0a9d0d 100644 --- a/bot/resources/stars.json +++ b/bot/resources/stars.json @@ -20,6 +20,7 @@ "Céline Dion", "Cher", "Christina Aguilera", + "Darude", "David Bowie", "Donna Summer", "Drake", @@ -31,11 +32,14 @@ "Flo Rida", "Frank Sinatra", "Garth Brooks", + "George Harrison", "George Michael", "George Strait", + "Guido Van Rossum", "James Taylor", "Janet Jackson", "Jay-Z", + "John Lennon", "Johnny Cash", "Johnny Hallyday", "Julio Iglesias", @@ -61,13 +65,16 @@ "Pink", "Prince", "Reba McEntire", + "Rick Astley", "Rihanna", + "Ringo Starr", "Robbie Williams", "Rod Stewart", "Santana", "Shania Twain", "Stevie Wonder", "Taylor Swift", + "The Weeknd", "Tim McGraw", "Tina Turner", "Tom Petty", diff --git a/bot/resources/tags/async-await.md b/bot/resources/tags/async-await.md new file mode 100644 index 000000000..ff71ace07 --- /dev/null +++ b/bot/resources/tags/async-await.md @@ -0,0 +1,28 @@ +**Concurrency in Python** + +Python provides the ability to run multiple tasks and coroutines simultaneously with the use of the `asyncio` library, which is included in the Python standard library. + +This works by running these coroutines in an event loop, where the context of which coroutine is being run is switches periodically to allow all of them to run, giving the appearance of running at the same time. This is different to using threads or processes in that all code is run in the main process and thread, although it is possible to run coroutines in threads. + +To call an async function we can either `await` it, or run it in an event loop which we get from `asyncio`. + +To create a coroutine that can be used with asyncio we need to define a function using the async keyword: +```py +async def main(): + await something_awaitable() +``` +Which means we can call `await something_awaitable()` directly from within the function. If this were a non-async function this would have raised an exception like: `SyntaxError: 'await' outside async function` + +To run the top level async function from outside of the event loop we can get an event loop from `asyncio`, and then use that loop to run the function: +```py +from asyncio import get_event_loop + +async def main(): + await something_awaitable() + +loop = get_event_loop() +loop.run_until_complete(main()) +``` +Note that in the `run_until_complete()` where we appear to be calling `main()`, this does not execute the code in `main`, rather it returns a `coroutine` object which is then handled and run by the event loop via `run_until_complete()`. + +To learn more about asyncio and its use, see the [asyncio documentation](https://docs.python.org/3/library/asyncio.html). diff --git a/bot/resources/tags/blocking.md b/bot/resources/tags/blocking.md new file mode 100644 index 000000000..31d91294c --- /dev/null +++ b/bot/resources/tags/blocking.md @@ -0,0 +1,28 @@ +**Why do we need asynchronous programming?** + +Imagine that you're coding a Discord bot and every time somebody uses a command, you need to get some information from a database. But there's a catch: the database servers are acting up today and take a whole 10 seconds to respond. If you do **not** use asynchronous methods, your whole bot will stop running until it gets a response from the database. How do you fix this? Asynchronous programming. + +**What is asynchronous programming?** + +An asynchronous program utilises the `async` and `await` keywords. An asynchronous program pauses what it's doing and does something else whilst it waits for some third-party service to complete whatever it's supposed to do. Any code within an `async` context manager or function marked with the `await` keyword indicates to Python, that whilst this operation is being completed, it can do something else. For example: + +```py +import discord + +# Bunch of bot code + +async def ping(ctx): + await ctx.send("Pong!") +``` + +**What does the term "blocking" mean?** + +A blocking operation is wherever you do something without `await`ing it. This tells Python that this step must be completed before it can do anything else. Common examples of blocking operations, as simple as they may seem, include: outputting text, adding two numbers and appending an item onto a list. Most common Python libraries have an asynchronous version available to use in asynchronous contexts. + +**`async` libraries** + +The standard async library - `asyncio` +Asynchronous web requests - `aiohttp` +Talking to PostgreSQL asynchronously - `asyncpg` +MongoDB interactions asynchronously - `motor` +Check out [this](https://github.com/timofurrer/awesome-asyncio) list for even more! diff --git a/bot/resources/tags/customchecks.md b/bot/resources/tags/customchecks.md new file mode 100644 index 000000000..23ff7a66f --- /dev/null +++ b/bot/resources/tags/customchecks.md @@ -0,0 +1,21 @@ +**Custom Command Checks in discord.py** + +Often you may find the need to use checks that don't exist by default in discord.py. Fortunately, discord.py provides `discord.ext.commands.check` which allows you to create you own checks like this: +```py +from discord.ext.commands import check, Context + +def in_any_channel(*channels): + async def predicate(ctx: Context): + return ctx.channel.id in channels + return check(predicate) +``` +This check is to check whether the invoked command is in a given set of channels. The inner function, named `predicate` here, is used to perform the actual check on the command, and check logic should go in this function. It must be an async function, and always provides a single `commands.Context` argument which you can use to create check logic. This check function should return a boolean value indicating whether the check passed (return `True`) or failed (return `False`). + +The check can now be used like any other commands check as a decorator of a command, such as this: +```py [email protected](name="ping") +@in_any_channel(728343273562701984) +async def ping(ctx: Context): + ... +``` +This would lock the `ping` command to only be used in the channel `728343273562701984`. If this check function fails it will raise a `CheckFailure` exception, which can be handled in your error handler. diff --git a/bot/resources/tags/dotenv.md b/bot/resources/tags/dotenv.md new file mode 100644 index 000000000..acb9a216e --- /dev/null +++ b/bot/resources/tags/dotenv.md @@ -0,0 +1,23 @@ +**Using .env files in Python** + +`.env` (dotenv) files are a type of file commonly used for storing application secrets and variables, for example API tokens and URLs, although they may also be used for storing other configurable values. While they are commonly used for storing secrets, at a high level their purpose is to load environment variables into a program. + +Dotenv files are especially suited for storing secrets as they are a key-value store in a file, which can be easily loaded in most programming languages and ignored by version control systems like Git with a single entry in a `.gitignore` file. + +In python you can use dotenv files with the [`python-dotenv`](https://pypi.org/project/python-dotenv) module from PyPI, which can be installed with `pip install python-dotenv`. To use dotenv files you'll first need a file called `.env`, with content such as the following: +``` +TOKEN=a00418c85bff087b49f23923efe40aa5 +``` +Next, in your main Python file, you need to load the environment variables from the dotenv file you just created: +```py +from dotenv import load_dotenv() + +load_dotenv(".env") +``` +The variables from the file have now been loaded into your programs environment, and you can access them using `os.getenv()` anywhere in your program, like this: +```py +from os import getenv + +my_token = getenv("TOKEN") +``` +For further reading about tokens and secrets, please read [this explanation](https://vcokltfre.dev/tips/tokens). diff --git a/bot/resources/tags/floats.md b/bot/resources/tags/floats.md index 7129b91bb..03fcd7268 100644 --- a/bot/resources/tags/floats.md +++ b/bot/resources/tags/floats.md @@ -5,7 +5,7 @@ You may have noticed that when doing arithmetic with floats in Python you someti 0.30000000000000004 ``` **Why this happens** -Internally your computer stores floats as as binary fractions. Many decimal values cannot be stored as exact binary fractions, which means an approximation has to be used. +Internally your computer stores floats as binary fractions. Many decimal values cannot be stored as exact binary fractions, which means an approximation has to be used. **How you can avoid this** You can use [math.isclose](https://docs.python.org/3/library/math.html#math.isclose) to check if two floats are close, or to get an exact decimal representation, you can use the [decimal](https://docs.python.org/3/library/decimal.html) or [fractions](https://docs.python.org/3/library/fractions.html) module. Here are some examples: diff --git a/bot/resources/tags/identity.md b/bot/resources/tags/identity.md new file mode 100644 index 000000000..fb2010759 --- /dev/null +++ b/bot/resources/tags/identity.md @@ -0,0 +1,24 @@ +**Identity vs. Equality** + +Should I be using `is` or `==`? + +To check if two objects are equal, use the equality operator (`==`). +```py +x = 5 +if x == 5: + print("x equals 5") +if x == 3: + print("x equals 3") +# Prints 'x equals 5' +``` +To check if two objects are actually the same thing in memory, use the identity comparison operator (`is`). +```py +list_1 = [1, 2, 3] +list_2 = [1, 2, 3] +if list_1 is [1, 2, 3]: + print("list_1 is list_2") +reference_to_list_1 = list_1 +if list_1 is reference_to_list_1: + print("list_1 is reference_to_list_1") +# Prints 'list_1 is reference_to_list_1' +``` diff --git a/bot/resources/tags/modmail.md b/bot/resources/tags/modmail.md index 7545419ee..412468174 100644 --- a/bot/resources/tags/modmail.md +++ b/bot/resources/tags/modmail.md @@ -6,4 +6,4 @@ It supports attachments, codeblocks, and reactions. As communication happens ove **To use it, simply send a direct message to the bot.** -Should there be an urgent and immediate need for a moderator or admin to look at a channel, feel free to ping the <@&267629731250176001> or <@&267628507062992896> role instead. +Should there be an urgent and immediate need for a moderator or admin to look at a channel, feel free to ping the <@&831776746206265384> or <@&267628507062992896> role instead. diff --git a/bot/resources/tags/star-imports.md b/bot/resources/tags/star-imports.md index 2be6aab6e..3b1b6a858 100644 --- a/bot/resources/tags/star-imports.md +++ b/bot/resources/tags/star-imports.md @@ -16,33 +16,24 @@ Example: >>> from math import * >>> sin(pi / 2) # uses sin from math rather than your custom sin ``` - • Potential namespace collision. Names defined from a previous import might get shadowed by a wildcard import. - • Causes ambiguity. From the example, it is unclear which `sin` function is actually being used. From the Zen of Python **[3]**: `Explicit is better than implicit.` - • Makes import order significant, which they shouldn't. Certain IDE's `sort import` functionality may end up breaking code due to namespace collision. **How should you import?** • Import the module under the module's namespace (Only import the name of the module, and names defined in the module can be used by prefixing the module's name) - ```python >>> import math >>> math.sin(math.pi / 2) ``` - • Explicitly import certain names from the module - ```python >>> from math import sin, pi >>> sin(pi / 2) ``` - Conclusion: Namespaces are one honking great idea -- let's do more of those! *[3]* **[1]** If the module defines the variable `__all__`, the names defined in `__all__` will get imported by the wildcard import, otherwise all the names in the module get imported (except for names with a leading underscore) - **[2]** [Namespaces and scopes](https://www.programiz.com/python-programming/namespace) - **[3]** [Zen of Python](https://www.python.org/dev/peps/pep-0020/) diff --git a/bot/resources/tags/str-join.md b/bot/resources/tags/str-join.md new file mode 100644 index 000000000..c835f9313 --- /dev/null +++ b/bot/resources/tags/str-join.md @@ -0,0 +1,28 @@ +**Joining Iterables** + +If you want to display a list (or some other iterable), you can write: +```py +colors = ['red', 'green', 'blue', 'yellow'] +output = "" +separator = ", " +for color in colors: + output += color + separator +print(output) +# Prints 'red, green, blue, yellow, ' +``` +However, the separator is still added to the last element, and it is relatively slow. + +A better solution is to use `str.join`. +```py +colors = ['red', 'green', 'blue', 'yellow'] +separator = ", " +print(separator.join(colors)) +# Prints 'red, green, blue, yellow' +``` +An important thing to note is that you can only `str.join` strings. For a list of ints, +you must convert each element to a string before joining. +```py +integers = [1, 3, 6, 10, 15] +print(", ".join(str(e) for e in integers)) +# Prints '1, 3, 6, 10, 15' +``` diff --git a/bot/resources/tags/ytdl.md b/bot/resources/tags/ytdl.md index e34ecff44..f96b7f853 100644 --- a/bot/resources/tags/ytdl.md +++ b/bot/resources/tags/ytdl.md @@ -1,12 +1,12 @@ -Per [PyDis' Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, commonly used by Discord bots to stream audio, as its use violates YouTube's Terms of Service. +Per [Python Discord's Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, pytube, or other YouTube video downloaders as their usage violates YouTube's Terms of Service. -For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?template=terms), as of 2019-07-22: +For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?gl=GB&template=terms), as of 2021-03-17: ``` -The following restrictions apply to your use of the Service. You are not allowed to: +The following restrictions apply to your use of the Service. You are not allowed to: -1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as specifically permitted by the Service; (b) with prior written permission from YouTube and, if applicable, the respective rights holders; or (c) as permitted by applicable law; +1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as specifically permitted by the Service; (b) with prior written permission from YouTube and, if applicable, the respective rights holders; or (c) as permitted by applicable law; -3. access the Service using any automated means (such as robots, botnets or scrapers) except: (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; (b) with YouTube’s prior written permission; or (c) as permitted by applicable law; +3. access the Service using any automated means (such as robots, botnets or scrapers) except: (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; (b) with YouTube’s prior written permission; or (c) as permitted by applicable law; 9. use the Service to view or listen to Content other than for personal, non-commercial use (for example, you may not publicly screen videos or stream music from the Service) ``` diff --git a/bot/utils/checks.py b/bot/utils/checks.py index 460a937d8..3d0c8a50c 100644 --- a/bot/utils/checks.py +++ b/bot/utils/checks.py @@ -20,8 +20,8 @@ from bot import constants log = logging.getLogger(__name__) -class InWhitelistCheckFailure(CheckFailure): - """Raised when the `in_whitelist` check fails.""" +class ContextCheckFailure(CheckFailure): + """Raised when a context-specific check fails.""" def __init__(self, redirect_channel: Optional[int]) -> None: self.redirect_channel = redirect_channel @@ -36,6 +36,10 @@ class InWhitelistCheckFailure(CheckFailure): super().__init__(error_message) +class InWhitelistCheckFailure(ContextCheckFailure): + """Raised when the `in_whitelist` check fails.""" + + def in_whitelist_check( ctx: Context, channels: Container[int] = (), diff --git a/bot/utils/function.py b/bot/utils/function.py index 3ab32fe3c..9bc44e753 100644 --- a/bot/utils/function.py +++ b/bot/utils/function.py @@ -1,14 +1,23 @@ """Utilities for interaction with functions.""" +import functools import inspect +import logging +import types import typing as t +log = logging.getLogger(__name__) + Argument = t.Union[int, str] BoundArgs = t.OrderedDict[str, t.Any] Decorator = t.Callable[[t.Callable], t.Callable] ArgValGetter = t.Callable[[BoundArgs], t.Any] +class GlobalNameConflictError(Exception): + """Raised when there's a conflict between the globals used to resolve annotations of wrapped and its wrapper.""" + + def get_arg_value(name_or_pos: Argument, arguments: BoundArgs) -> t.Any: """ Return a value from `arguments` based on a name or position. @@ -73,3 +82,66 @@ def get_bound_args(func: t.Callable, args: t.Tuple, kwargs: t.Dict[str, t.Any]) bound_args.apply_defaults() return bound_args.arguments + + +def update_wrapper_globals( + wrapper: types.FunctionType, + wrapped: types.FunctionType, + *, + ignored_conflict_names: t.Set[str] = frozenset(), +) -> types.FunctionType: + """ + Update globals of `wrapper` with the globals from `wrapped`. + + For forwardrefs in command annotations discordpy uses the __global__ attribute of the function + to resolve their values, with decorators that replace the function this breaks because they have + their own globals. + + This function creates a new function functionally identical to `wrapper`, which has the globals replaced with + a merge of `wrapped`s globals and the `wrapper`s globals. + + An exception will be raised in case `wrapper` and `wrapped` share a global name that is used by + `wrapped`'s typehints and is not in `ignored_conflict_names`, + as this can cause incorrect objects being used by discordpy's converters. + """ + annotation_global_names = ( + ann.split(".", maxsplit=1)[0] for ann in wrapped.__annotations__.values() if isinstance(ann, str) + ) + # Conflicting globals from both functions' modules that are also used in the wrapper and in wrapped's annotations. + shared_globals = set(wrapper.__code__.co_names) & set(annotation_global_names) + shared_globals &= set(wrapped.__globals__) & set(wrapper.__globals__) - ignored_conflict_names + if shared_globals: + raise GlobalNameConflictError( + f"wrapper and the wrapped function share the following " + f"global names used by annotations: {', '.join(shared_globals)}. Resolve the conflicts or add " + f"the name to the `ignored_conflict_names` set to suppress this error if this is intentional." + ) + + new_globals = wrapper.__globals__.copy() + new_globals.update((k, v) for k, v in wrapped.__globals__.items() if k not in wrapper.__code__.co_names) + return types.FunctionType( + code=wrapper.__code__, + globals=new_globals, + name=wrapper.__name__, + argdefs=wrapper.__defaults__, + closure=wrapper.__closure__, + ) + + +def command_wraps( + wrapped: types.FunctionType, + assigned: t.Sequence[str] = functools.WRAPPER_ASSIGNMENTS, + updated: t.Sequence[str] = functools.WRAPPER_UPDATES, + *, + ignored_conflict_names: t.Set[str] = frozenset(), +) -> t.Callable[[types.FunctionType], types.FunctionType]: + """Update the decorated function to look like `wrapped` and update globals for discordpy forwardref evaluation.""" + def decorator(wrapper: types.FunctionType) -> types.FunctionType: + return functools.update_wrapper( + update_wrapper_globals(wrapper, wrapped, ignored_conflict_names=ignored_conflict_names), + wrapped, + assigned, + updated, + ) + + return decorator diff --git a/bot/utils/lock.py b/bot/utils/lock.py index e44776340..ec6f92cd4 100644 --- a/bot/utils/lock.py +++ b/bot/utils/lock.py @@ -1,13 +1,15 @@ import asyncio import inspect import logging +import types from collections import defaultdict -from functools import partial, wraps +from functools import partial from typing import Any, Awaitable, Callable, Hashable, Union from weakref import WeakValueDictionary from bot.errors import LockedResourceError from bot.utils import function +from bot.utils.function import command_wraps log = logging.getLogger(__name__) __lock_dicts = defaultdict(WeakValueDictionary) @@ -17,6 +19,35 @@ _IdCallable = Callable[[function.BoundArgs], _IdCallableReturn] ResourceId = Union[Hashable, _IdCallable] +class SharedEvent: + """ + Context manager managing an internal event exposed through the wait coro. + + While any code is executing in this context manager, the underlying event will not be set; + when all of the holders finish the event will be set. + """ + + def __init__(self): + self._active_count = 0 + self._event = asyncio.Event() + self._event.set() + + def __enter__(self): + """Increment the count of the active holders and clear the internal event.""" + self._active_count += 1 + self._event.clear() + + def __exit__(self, _exc_type, _exc_val, _exc_tb): # noqa: ANN001 + """Decrement the count of the active holders; if 0 is reached set the internal event.""" + self._active_count -= 1 + if not self._active_count: + self._event.set() + + async def wait(self) -> None: + """Wait for all active holders to exit.""" + await self._event.wait() + + def lock( namespace: Hashable, resource_id: ResourceId, @@ -41,10 +72,10 @@ def lock( If decorating a command, this decorator must go before (below) the `command` decorator. """ - def decorator(func: Callable) -> Callable: + def decorator(func: types.FunctionType) -> types.FunctionType: name = func.__name__ - @wraps(func) + @command_wraps(func) async def wrapper(*args, **kwargs) -> Any: log.trace(f"{name}: mutually exclusive decorator called") diff --git a/bot/utils/messages.py b/bot/utils/messages.py index 077dd9569..b6f6c1f66 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -3,33 +3,77 @@ import contextlib import logging import random import re +from functools import partial from io import BytesIO -from typing import List, Optional, Sequence, Union +from typing import Callable, List, Optional, Sequence, Union import discord +from discord import Message, MessageType, Reaction, User from discord.errors import HTTPException from discord.ext.commands import Context import bot from bot.constants import Emojis, MODERATION_ROLES, NEGATIVE_REPLIES +from bot.utils import scheduling log = logging.getLogger(__name__) +def reaction_check( + reaction: discord.Reaction, + user: discord.abc.User, + *, + message_id: int, + allowed_emoji: Sequence[str], + allowed_users: Sequence[int], + allow_mods: bool = True, +) -> bool: + """ + Check if a reaction's emoji and author are allowed and the message is `message_id`. + + If the user is not allowed, remove the reaction. Ignore reactions made by the bot. + If `allow_mods` is True, allow users with moderator roles even if they're not in `allowed_users`. + """ + right_reaction = ( + user != bot.instance.user + and reaction.message.id == message_id + and str(reaction.emoji) in allowed_emoji + ) + if not right_reaction: + return False + + is_moderator = ( + allow_mods + and any(role.id in MODERATION_ROLES for role in getattr(user, "roles", [])) + ) + + if user.id in allowed_users or is_moderator: + log.trace(f"Allowed reaction {reaction} by {user} on {reaction.message.id}.") + return True + else: + log.trace(f"Removing reaction {reaction} by {user} on {reaction.message.id}: disallowed user.") + scheduling.create_task( + reaction.message.remove_reaction(reaction.emoji, user), + HTTPException, # Suppress the HTTPException if adding the reaction fails + name=f"remove_reaction-{reaction}-{reaction.message.id}-{user}" + ) + return False + + async def wait_for_deletion( message: discord.Message, - user_ids: Sequence[discord.abc.Snowflake], + user_ids: Sequence[int], deletion_emojis: Sequence[str] = (Emojis.trashcan,), timeout: float = 60 * 5, attach_emojis: bool = True, - allow_moderation_roles: bool = True + allow_mods: bool = True ) -> None: """ Wait for up to `timeout` seconds for a reaction by any of the specified `user_ids` to delete the message. An `attach_emojis` bool may be specified to determine whether to attach the given `deletion_emojis` to the message in the given `context`. - An `allow_moderation_roles` bool may also be specified to allow anyone with a role in `MODERATION_ROLES` to delete + An `allow_mods` bool may also be specified to allow anyone with a role in `MODERATION_ROLES` to delete the message. """ if message.guild is None: @@ -43,16 +87,13 @@ async def wait_for_deletion( log.trace(f"Aborting wait_for_deletion: message {message.id} deleted prematurely.") return - def check(reaction: discord.Reaction, user: discord.Member) -> bool: - """Check that the deletion emoji is reacted by the appropriate user.""" - return ( - reaction.message.id == message.id - and str(reaction.emoji) in deletion_emojis - and ( - user.id in user_ids - or allow_moderation_roles and any(role.id in MODERATION_ROLES for role in user.roles) - ) - ) + check = partial( + reaction_check, + message_id=message.id, + allowed_emoji=deletion_emojis, + allowed_users=user_ids, + allow_mods=allow_mods, + ) with contextlib.suppress(asyncio.TimeoutError): await bot.instance.wait_for('reaction_add', check=check, timeout=timeout) @@ -124,6 +165,44 @@ async def send_attachments( return urls +async def count_unique_users_reaction( + message: discord.Message, + reaction_predicate: Callable[[Reaction], bool] = lambda _: True, + user_predicate: Callable[[User], bool] = lambda _: True, + count_bots: bool = True +) -> int: + """ + Count the amount of unique users who reacted to the message. + + A reaction_predicate function can be passed to check if this reaction should be counted, + another user_predicate to check if the user should also be counted along with a count_bot flag. + """ + unique_users = set() + + for reaction in message.reactions: + if reaction_predicate(reaction): + async for user in reaction.users(): + if (count_bots or not user.bot) and user_predicate(user): + unique_users.add(user.id) + + return len(unique_users) + + +async def pin_no_system_message(message: Message) -> bool: + """Pin the given message, wait a couple of seconds and try to delete the system message.""" + await message.pin() + + # Make sure that we give it enough time to deliver the message + await asyncio.sleep(2) + # Search for the system message in the last 10 messages + async for historical_message in message.channel.history(limit=10): + if historical_message.type == MessageType.pins_add: + await historical_message.delete() + return True + + return False + + def sub_clyde(username: Optional[str]) -> Optional[str]: """ Replace "e"/"E" in any "clyde" in `username` with a Cyrillic "е"/"E" and return the new string. @@ -141,14 +220,14 @@ def sub_clyde(username: Optional[str]) -> Optional[str]: return username # Empty string or None -async def send_denial(ctx: Context, reason: str) -> None: +async def send_denial(ctx: Context, reason: str) -> discord.Message: """Send an embed denying the user with the given reason.""" embed = discord.Embed() embed.colour = discord.Colour.red() embed.title = random.choice(NEGATIVE_REPLIES) embed.description = reason - await ctx.send(embed=embed) + return await ctx.send(embed=embed) def format_user(user: discord.abc.User) -> str: diff --git a/bot/utils/regex.py b/bot/utils/regex.py index 0d2068f90..a8efe1446 100644 --- a/bot/utils/regex.py +++ b/bot/utils/regex.py @@ -5,6 +5,7 @@ INVITE_RE = re.compile( r"discord(?:[\.,]|dot)com(?:\/|slash)invite|" # or discord.com/invite/ r"discordapp(?:[\.,]|dot)com(?:\/|slash)invite|" # or discordapp.com/invite/ r"discord(?:[\.,]|dot)me|" # or discord.me + r"discord(?:[\.,]|dot)li|" # or discord.li r"discord(?:[\.,]|dot)io" # or discord.io. r")(?:[\/]|slash)" # / or 'slash' r"([a-zA-Z0-9\-]+)", # the invite code itself diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py index 4dd036e4f..2dc485f24 100644 --- a/bot/utils/scheduling.py +++ b/bot/utils/scheduling.py @@ -59,14 +59,18 @@ class Scheduler: def schedule_at(self, time: datetime, task_id: t.Hashable, coroutine: t.Coroutine) -> None: """ - Schedule `coroutine` to be executed at the given naïve UTC `time`. + Schedule `coroutine` to be executed at the given `time`. + + If `time` is timezone aware, then use that timezone to calculate now() when subtracting. + If `time` is naïve, then use UTC. If `time` is in the past, schedule `coroutine` immediately. If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere. """ - delay = (time - datetime.utcnow()).total_seconds() + now_datetime = datetime.now(time.tzinfo) if time.tzinfo else datetime.utcnow() + delay = (time - now_datetime).total_seconds() if delay > 0: coroutine = self._await_later(delay, task_id, coroutine) @@ -157,18 +161,18 @@ class Scheduler: self._log.error(f"Error in task #{task_id} {id(done_task)}!", exc_info=exception) -def create_task(*args, **kwargs) -> asyncio.Task: +def create_task(coro: t.Awaitable, *suppressed_exceptions: t.Type[Exception], **kwargs) -> asyncio.Task: """Wrapper for `asyncio.create_task` which logs exceptions raised in the task.""" - task = asyncio.create_task(*args, **kwargs) - task.add_done_callback(_log_task_exception) + task = asyncio.create_task(coro, **kwargs) + task.add_done_callback(partial(_log_task_exception, suppressed_exceptions=suppressed_exceptions)) return task -def _log_task_exception(task: asyncio.Task) -> None: +def _log_task_exception(task: asyncio.Task, *, suppressed_exceptions: t.Tuple[t.Type[Exception]]) -> None: """Retrieve and log the exception raised in `task` if one exists.""" with contextlib.suppress(asyncio.CancelledError): exception = task.exception() # Log the exception if one exists. - if exception: + if exception and not isinstance(exception, suppressed_exceptions): log = logging.getLogger(__name__) log.error(f"Error in task {task.get_name()} {id(task)}!", exc_info=exception) diff --git a/bot/utils/services.py b/bot/utils/services.py index 5949c9e48..db9c93d0f 100644 --- a/bot/utils/services.py +++ b/bot/utils/services.py @@ -47,7 +47,14 @@ async def send_to_paste_service(contents: str, *, extension: str = "") -> Option continue elif "key" in response_json: log.info(f"Successfully uploaded contents to paste service behind key {response_json['key']}.") - return URLs.paste_service.format(key=response_json['key']) + extension + + paste_link = URLs.paste_service.format(key=response_json['key']) + extension + + if extension == '.py': + return paste_link + + return paste_link + "?noredirect" + log.warning( f"Got unexpected JSON response from paste service: {response_json}\n" f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})." diff --git a/bot/utils/time.py b/bot/utils/time.py index 466f0adc2..d55a0e532 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -1,4 +1,3 @@ -import asyncio import datetime import re from typing import Optional @@ -144,22 +143,6 @@ def parse_rfc1123(stamp: str) -> datetime.datetime: return datetime.datetime.strptime(stamp, RFC1123_FORMAT).replace(tzinfo=datetime.timezone.utc) -# Hey, this could actually be used in the off_topic_names and reddit cogs :) -async def wait_until(time: datetime.datetime, start: Optional[datetime.datetime] = None) -> None: - """ - Wait until a given time. - - :param time: A datetime.datetime object to wait until. - :param start: The start from which to calculate the waiting duration. Defaults to UTC time. - """ - delay = time - (start or datetime.datetime.utcnow()) - delay_seconds = delay.total_seconds() - - # Incorporate a small delay so we don't rapid-fire the event due to time precision errors - if delay_seconds > 1.0: - await asyncio.sleep(delay_seconds) - - def format_infraction(timestamp: str) -> str: """Format an infraction timestamp to a more readable ISO 8601 format.""" return dateutil.parser.isoparse(timestamp).strftime(INFRACTION_FORMAT) diff --git a/config-default.yml b/config-default.yml index 8fc1f6718..3afe9ba3c 100644 --- a/config-default.yml +++ b/config-default.yml @@ -1,7 +1,8 @@ bot: - prefix: "!" - sentry_dsn: !ENV "BOT_SENTRY_DSN" - token: !ENV "BOT_TOKEN" + prefix: "!" + sentry_dsn: !ENV "BOT_SENTRY_DSN" + token: !ENV "BOT_TOKEN" + trace_loggers: !ENV "BOT_TRACE_LOGGERS" clean: # Maximum number of messages to traverse for clean commands @@ -46,6 +47,8 @@ style: badge_partner: "<:partner:748666453242413136>" badge_staff: "<:discord_staff:743882896498098226>" badge_verified_bot_developer: "<:verified_bot_dev:743882897299210310>" + bot: "<:bot:812712599464443914>" + verified_bot: "<:verified_bot:811645219220750347>" defcon_shutdown: "<:defcondisabled:470326273952972810>" defcon_unshutdown: "<:defconenabled:470326274213150730>" @@ -53,15 +56,17 @@ style: failmail: "<:failmail:633660039931887616>" - incident_actioned: "<:incident_actioned:719645530128646266>" - incident_investigating: "<:incident_investigating:719645658671480924>" - incident_unactioned: "<:incident_unactioned:719645583245180960>" + incident_actioned: "<:incident_actioned:714221559279255583>" + incident_investigating: "<:incident_investigating:714224190928191551>" + incident_unactioned: "<:incident_unactioned:714223099645526026>" status_dnd: "<:status_dnd:470326272082313216>" status_idle: "<:status_idle:470326266625785866>" status_offline: "<:status_offline:470326266537705472>" status_online: "<:status_online:470326272351010816>" + ducky_dave: "<:ducky_dave:742058418692423772>" + trashcan: "<:trashcan:637136429717389331>" bullet: "\u2022" @@ -70,11 +75,6 @@ style: new: "\U0001F195" pencil: "\u270F" - # emotes used for #reddit - comments: "<:reddit_comments:755845255001014384>" - upvotes: "<:reddit_upvotes:755845219890757644>" - user: "<:reddit_users:755845303822974997>" - ok_hand: ":ok_hand:" icons: @@ -139,6 +139,7 @@ guild: help_dormant: 691405908919451718 help_in_use: 696958401460043776 logs: &LOGS 468520609152892958 + moderators: &MODS_CATEGORY 749736277464842262 modmail: &MODMAIL 714494672835444826 voice: 356013253765234688 @@ -150,7 +151,6 @@ guild: python_events: &PYEVENTS_CHANNEL 729674110270963822 python_news: &PYNEWS_CHANNEL 704372456592506880 reddit: &REDDIT_CHANNEL 458224812528238616 - user_event_announcements: &USER_EVENT_A 592000283102674944 # Development dev_contrib: &DEV_CONTRIB 635950537262759947 @@ -163,15 +163,16 @@ guild: # Python Help: Available cooldown: 720603994149486673 + how_to_get_help: 704250143020417084 # Topical discord_py: 343944376055103488 # Logs attachment_log: &ATTACH_LOG 649243850006855680 - dm_log: 653713721625018428 message_log: &MESSAGE_LOG 467752170159079424 mod_log: &MOD_LOG 282638479504965634 + nomination_archive: 833371042046148738 user_log: 528976905546760203 voice_log: 640292421988646961 @@ -193,15 +194,12 @@ guild: helpers: &HELPERS 385474242440986624 incidents: 714214212200562749 incidents_archive: 720668923636351037 - mods: &MODS 305126844661760000 mod_alerts: 473092532147060736 - mod_appeals: &MOD_APPEALS 808790025688711198 - mod_meta: &MOD_META 775412552795947058 - mod_spam: &MOD_SPAM 620607373828030464 - mod_tools: &MOD_TOOLS 775413915391098921 + nominations: 822920136150745168 nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 staff_lounge: &STAFF_LOUNGE 464905259261755392 + staff_info: &STAFF_INFO 396684402404622347 # Staff announcement channels admin_announcements: &ADMIN_ANNOUNCEMENTS 749736155569848370 @@ -225,17 +223,13 @@ guild: big_brother_logs: &BB_LOGS 468507907357409333 moderation_categories: + - *MODS_CATEGORY - *MODMAIL - *LOGS moderation_channels: - *ADMINS - *ADMIN_SPAM - - *MOD_APPEALS - - *MOD_META - - *MOD_TOOLS - - *MODS - - *MOD_SPAM # Modlog cog ignores events which occur in these channels modlog_blacklist: @@ -264,33 +258,38 @@ guild: admins: &ADMINS_ROLE 267628507062992896 core_developers: 587606783669829632 devops: 409416496733880320 + domain_leads: 807415650778742785 helpers: &HELPERS_ROLE 267630620367257601 - moderators: &MODS_ROLE 267629731250176001 + moderators: &MODS_ROLE 831776746206265384 + mod_team: &MOD_TEAM_ROLE 267629731250176001 owners: &OWNERS_ROLE 267627879762755584 + project_leads: 815701647526330398 # Code Jam jammers: 737249140966162473 team_leaders: 737250302834638889 + # Streaming + video: 764245844798079016 + moderation_roles: - *ADMINS_ROLE + - *MOD_TEAM_ROLE - *MODS_ROLE - *OWNERS_ROLE staff_roles: - *ADMINS_ROLE - *HELPERS_ROLE - - *MODS_ROLE + - *MOD_TEAM_ROLE - *OWNERS_ROLE webhooks: big_brother: 569133704568373283 dev_log: 680501655111729222 - dm_log: 654567640664244225 duck_pond: 637821475327311927 incidents_archive: 720671599790915702 python_news: &PYNEWS_WEBHOOK 704381182279942324 - reddit: 635408384794951680 filter: @@ -321,7 +320,6 @@ filter: - *MESSAGE_LOG - *MOD_LOG - *STAFF_LOUNGE - - *USER_EVENT_A role_whitelist: - *ADMINS_ROLE @@ -419,11 +417,14 @@ anti_spam: max: 3 -reddit: - client_id: !ENV "REDDIT_CLIENT_ID" - secret: !ENV "REDDIT_SECRET" - subreddits: - - 'r/Python' + +metabase: + username: !ENV "METABASE_USERNAME" + password: !ENV "METABASE_PASSWORD" + url: "http://metabase.default.svc.cluster.local/api" + # 14 days, see https://www.metabase.com/docs/latest/operations-guide/environment-variables.html#max_session_age + max_session_age: 20160 + big_brother: @@ -459,15 +460,16 @@ free: help_channels: enable: true - # Minimum interval before allowing a certain user to claim a new help channel - claim_minutes: 15 - # Roles which are allowed to use the command which makes channels dormant cmd_whitelist: - *HELPERS_ROLE - # Allowed duration of inactivity before making a channel dormant - idle_minutes: 30 + # Allowed duration of inactivity by claimant before making a channel dormant + idle_minutes_claimant: 30 + + # Allowed duration of inactivity by others before making a channel dormant + # `idle_minutes_claimant` must also be met, before a channel is closed + idle_minutes_others: 10 # Allowed duration of inactivity when channel is empty (due to deleted messages) # before message making a channel dormant @@ -478,7 +480,7 @@ help_channels: # Maximum number of channels across all 3 categories # Note Discord has a hard limit of 50 channels per category, so this shouldn't be > 50 - max_total_channels: 32 + max_total_channels: 42 # Prefix for help channel names name_prefix: 'help-' @@ -503,19 +505,19 @@ redirect_output: duck_pond: - threshold: 5 + threshold: 7 channel_blacklist: - *ANNOUNCEMENTS - *PYNEWS_CHANNEL - *PYEVENTS_CHANNEL - *MAILING_LISTS - *REDDIT_CHANNEL - - *USER_EVENT_A - *DUCK_POND - *CHANGE_LOG - *STAFF_ANNOUNCEMENTS - *MOD_ANNOUNCEMENTS - *ADMIN_ANNOUNCEMENTS + - *STAFF_INFO python_news: @@ -543,3 +545,7 @@ branding: config: required_keys: ['bot.token'] + + +video_permission: + default_permission_duration: 5 # Default duration for stream command in minutes diff --git a/docker-compose.yml b/docker-compose.yml index 8afdd6ef1..bdfedf5c2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,8 +4,20 @@ version: "3.7" +x-logging: &logging + logging: + driver: "json-file" + options: + max-file: "5" + max-size: "10m" + +x-restart-policy: &restart_policy + restart: always + services: postgres: + << : *logging + << : *restart_policy image: postgres:12-alpine environment: POSTGRES_DB: pysite @@ -13,11 +25,15 @@ services: POSTGRES_USER: pysite redis: + << : *logging + << : *restart_policy image: redis:5.0.9 ports: - "127.0.0.1:6379:6379" snekbox: + << : *logging + << : *restart_policy image: ghcr.io/python-discord/snekbox:latest init: true ipc: none @@ -26,6 +42,8 @@ services: privileged: true web: + << : *logging + << : *restart_policy image: ghcr.io/python-discord/site:latest command: ["run", "--debug"] networks: @@ -46,6 +64,8 @@ services: STATIC_ROOT: /var/www/static bot: + << : *logging + << : *restart_policy build: context: . dockerfile: Dockerfile diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..ba8b7af4b --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1602 @@ +[[package]] +name = "aio-pika" +version = "6.8.0" +description = "Wrapper for the aiormq for asyncio and humans." +category = "main" +optional = false +python-versions = ">3.5.*, <4" + +[package.dependencies] +aiormq = ">=3.2.3,<4" +yarl = "*" + +[package.extras] +develop = ["aiomisc (>=10.1.6,<10.2.0)", "async-generator", "coverage (!=4.3)", "coveralls", "pylava", "pytest", "pytest-cov", "shortuuid", "nox", "sphinx", "sphinx-autobuild", "timeout-decorator", "tox (>=2.4)"] + +[[package]] +name = "aiodns" +version = "2.0.0" +description = "Simple DNS resolver for asyncio" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycares = ">=3.0.0" + +[[package]] +name = "aiohttp" +version = "3.7.4.post0" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +async-timeout = ">=3.0,<4.0" +attrs = ">=17.3.0" +chardet = ">=2.0,<5.0" +multidict = ">=4.5,<7.0" +typing-extensions = ">=3.6.5" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotlipy", "cchardet"] + +[[package]] +name = "aioping" +version = "0.3.1" +description = "Asyncio ping implementation" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +aiodns = "*" +async-timeout = "*" + +[[package]] +name = "aioredis" +version = "1.3.1" +description = "asyncio (PEP 3156) Redis support" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +async-timeout = "*" +hiredis = "*" + +[[package]] +name = "aiormq" +version = "3.3.1" +description = "Pure python AMQP asynchronous client library" +category = "main" +optional = false +python-versions = ">3.5.*" + +[package.dependencies] +pamqp = "2.3.0" +yarl = "*" + +[package.extras] +develop = ["aiomisc (>=11.0,<12.0)", "async-generator", "coverage (!=4.3)", "coveralls", "pylava", "pytest", "pytest-cov", "tox (>=2.4)"] + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "arrow" +version = "1.0.3" +description = "Better dates & times for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +python-dateutil = ">=2.7.0" + +[[package]] +name = "async-rediscache" +version = "0.1.4" +description = "An easy to use asynchronous Redis cache" +category = "main" +optional = false +python-versions = "~=3.7" + +[package.dependencies] +aioredis = ">=1" +fakeredis = {version = ">=1.3.1", optional = true, markers = "extra == \"fakeredis\""} + +[package.extras] +fakeredis = ["fakeredis (>=1.3.1)"] + +[[package]] +name = "async-timeout" +version = "3.0.1" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.5.3" + +[[package]] +name = "attrs" +version = "21.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] + +[[package]] +name = "beautifulsoup4" +version = "4.9.3" +description = "Screen-scraping library" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +soupsieve = {version = ">1.2", markers = "python_version >= \"3.0\""} + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "certifi" +version = "2020.12.5" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "cffi" +version = "1.14.5" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.2.0" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "coloredlogs" +version = "14.3" +description = "Colored terminal output for Python's logging module" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +humanfriendly = ">=7.1" + +[package.extras] +cron = ["capturer (>=2.4)"] + +[[package]] +name = "coverage" +version = "5.5" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "coveralls" +version = "2.2.0" +description = "Show coverage stats online via coveralls.io" +category = "dev" +optional = false +python-versions = ">= 3.5" + +[package.dependencies] +coverage = ">=4.1,<6.0" +docopt = ">=0.6.1" +requests = ">=1.0.0" + +[package.extras] +yaml = ["PyYAML (>=3.10)"] + +[[package]] +name = "deepdiff" +version = "4.3.2" +description = "Deep Difference and Search of any Python object/data." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +ordered-set = ">=3.1.1" + +[package.extras] +murmur = ["mmh3"] + +[[package]] +name = "discord.py" +version = "1.6.0" +description = "A Python wrapper for the Discord API" +category = "main" +optional = false +python-versions = ">=3.5.3" + +[package.dependencies] +aiohttp = ">=3.6.0,<3.8.0" + +[package.extras] +docs = ["sphinx (==3.0.3)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport"] +voice = ["PyNaCl (>=1.3.0,<1.5)"] + +[[package]] +name = "distlib" +version = "0.3.1" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "emoji" +version = "0.6.0" +description = "Emoji for Python" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +dev = ["pytest", "coverage", "coveralls"] + +[[package]] +name = "fakeredis" +version = "1.5.0" +description = "Fake implementation of redis API for testing purposes." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +redis = "<3.6.0" +six = ">=1.12" +sortedcontainers = "*" + +[package.extras] +aioredis = ["aioredis"] +lua = ["lupa"] + +[[package]] +name = "feedparser" +version = "6.0.2" +description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +sgmllib3k = "*" + +[[package]] +name = "filelock" +version = "3.0.12" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "flake8" +version = "3.9.2" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[[package]] +name = "flake8-annotations" +version = "2.6.2" +description = "Flake8 Type Annotation Checks" +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0.0" + +[package.dependencies] +flake8 = ">=3.7,<4.0" + +[[package]] +name = "flake8-bugbear" +version = "20.11.1" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=3.0.0" + +[package.extras] +dev = ["coverage", "black", "hypothesis", "hypothesmith"] + +[[package]] +name = "flake8-docstrings" +version = "1.6.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-import-order" +version = "0.18.1" +description = "Flake8 and pylama plugin that checks the ordering of import statements." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycodestyle = "*" + +[[package]] +name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-string-format" +version = "0.3.0" +description = "string format checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-tidy-imports" +version = "4.3.0" +description = "A flake8 plugin that helps you write tidier imports." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" + +[[package]] +name = "flake8-todo" +version = "0.7" +description = "TODO notes checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycodestyle = ">=2.0.0,<3.0.0" + +[[package]] +name = "fuzzywuzzy" +version = "0.18.0" +description = "Fuzzy string matching in python" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +speedup = ["python-levenshtein (>=0.12)"] + +[[package]] +name = "hiredis" +version = "2.0.0" +description = "Python wrapper for hiredis" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "humanfriendly" +version = "9.1" +description = "Human friendly output for text interfaces using Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +pyreadline = {version = "*", markers = "sys_platform == \"win32\""} + +[[package]] +name = "identify" +version = "2.2.4" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.extras] +license = ["editdistance-s"] + +[[package]] +name = "idna" +version = "3.1" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.4" + +[[package]] +name = "lxml" +version = "4.6.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["beautifulsoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "markdownify" +version = "0.6.1" +description = "Convert HTML to markdown." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +beautifulsoup4 = "*" +six = "*" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "more-itertools" +version = "8.7.0" +description = "More routines for operating on iterables, beyond itertools" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "mslex" +version = "0.3.0" +description = "shlex for windows" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "multidict" +version = "5.1.0" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "nodeenv" +version = "1.6.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "ordered-set" +version = "4.0.2" +description = "A set that remembers its order, and allows looking up its items by their index in that order." +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "pamqp" +version = "2.3.0" +description = "RabbitMQ Focused AMQP low-level library" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +codegen = ["lxml"] + +[[package]] +name = "pep8-naming" +version = "0.11.1" +description = "Check PEP-8 naming conventions, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8-polyfill = ">=1.0.2,<2" + +[[package]] +name = "pre-commit" +version = "2.12.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +toml = "*" +virtualenv = ">=20.0.8" + +[[package]] +name = "psutil" +version = "5.8.0" +description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] + +[[package]] +name = "pycares" +version = "3.2.3" +description = "Python interface for c-ares" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +cffi = ">=1.5.0" + +[package.extras] +idna = ["idna (>=2.1)"] + +[[package]] +name = "pycodestyle" +version = "2.7.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycparser" +version = "2.20" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pydocstyle" +version = "6.0.0" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[[package]] +name = "pyflakes" +version = "2.3.1" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyreadline" +version = "2.1" +description = "A python implmementation of GNU readline." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "0.17.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-frontmatter" +version = "1.0.0" +description = "Parse and manage posts with YAML (or other) frontmatter" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +PyYAML = "*" + +[package.extras] +docs = ["sphinx"] +test = ["pytest", "toml", "pyaml"] + +[[package]] +name = "pyyaml" +version = "5.4.1" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[[package]] +name = "redis" +version = "3.5.3" +description = "Python client for Redis key-value store" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +hiredis = ["hiredis (>=0.1.3)"] + +[[package]] +name = "regex" +version = "2021.4.4" +description = "Alternative regular expression module, to replace re." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "requests" +version = "2.15.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +security = ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] + +[[package]] +name = "sentry-sdk" +version = "0.20.3" +description = "Python client for Sentry (https://sentry.io)" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.10.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +flask = ["flask (>=0.11)", "blinker (>=1.1)"] +pure_eval = ["pure-eval", "executing", "asttokens"] +pyspark = ["pyspark (>=2.4.4)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "sgmllib3k" +version = "1.0.0" +description = "Py3k port of sgmllib." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "snowballstemmer" +version = "2.1.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sortedcontainers" +version = "2.3.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "soupsieve" +version = "2.2.1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "statsd" +version = "3.3.0" +description = "A simple statsd client." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "taskipy" +version = "1.7.0" +description = "tasks runner for python projects" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +mslex = ">=0.3.0,<0.4.0" +psutil = ">=5.7.2,<6.0.0" +toml = ">=0.10.0,<0.11.0" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "typing-extensions" +version = "3.10.0.0" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "urllib3" +version = "1.26.4" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotlipy (>=0.6.0)"] + +[[package]] +name = "virtualenv" +version = "20.4.6" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" + +[package.dependencies] +appdirs = ">=1.4.3,<2" +distlib = ">=0.3.1,<1" +filelock = ">=3.0.0,<4" +six = ">=1.9.0,<2" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] + +[[package]] +name = "yarl" +version = "1.6.3" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "1.1" +python-versions = "3.9.*" +content-hash = "ece3b915901a62911ff7ff4a616b3972e815c0e1c7097c8994163af13cadde0e" + +[metadata.files] +aio-pika = [ + {file = "aio-pika-6.8.0.tar.gz", hash = "sha256:1d4305a5f78af3857310b4fe48348cdcf6c097e0e275ea88c2cd08570531a369"}, + {file = "aio_pika-6.8.0-py3-none-any.whl", hash = "sha256:e69afef8695f47c5d107bbdba21bdb845d5c249acb3be53ef5c2d497b02657c0"}, +] +aiodns = [ + {file = "aiodns-2.0.0-py2.py3-none-any.whl", hash = "sha256:aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de"}, + {file = "aiodns-2.0.0.tar.gz", hash = "sha256:815fdef4607474295d68da46978a54481dd1e7be153c7d60f9e72773cd38d77d"}, +] +aiohttp = [ + {file = "aiohttp-3.7.4.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win32.whl", hash = "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win32.whl", hash = "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win32.whl", hash = "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win_amd64.whl", hash = "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win32.whl", hash = "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win_amd64.whl", hash = "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe"}, + {file = "aiohttp-3.7.4.post0.tar.gz", hash = "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf"}, +] +aioping = [ + {file = "aioping-0.3.1-py3-none-any.whl", hash = "sha256:8900ef2f5a589ba0c12aaa9c2d586f5371820d468d21b374ddb47ef5fc8f297c"}, + {file = "aioping-0.3.1.tar.gz", hash = "sha256:f983d86acab3a04c322731ce88d42c55d04d2842565fc8532fe10c838abfd275"}, +] +aioredis = [ + {file = "aioredis-1.3.1-py3-none-any.whl", hash = "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"}, + {file = "aioredis-1.3.1.tar.gz", hash = "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a"}, +] +aiormq = [ + {file = "aiormq-3.3.1-py3-none-any.whl", hash = "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e"}, + {file = "aiormq-3.3.1.tar.gz", hash = "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573"}, +] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +arrow = [ + {file = "arrow-1.0.3-py3-none-any.whl", hash = "sha256:3515630f11a15c61dcb4cdd245883270dd334c83f3e639824e65a4b79cc48543"}, + {file = "arrow-1.0.3.tar.gz", hash = "sha256:399c9c8ae732270e1aa58ead835a79a40d7be8aa109c579898eb41029b5a231d"}, +] +async-rediscache = [ + {file = "async-rediscache-0.1.4.tar.gz", hash = "sha256:6be8a657d724ccbcfb1946d29a80c3478c5f9ecd2f78a0a26d2f4013a622258f"}, + {file = "async_rediscache-0.1.4-py3-none-any.whl", hash = "sha256:c25e4fff73f64d20645254783c3224a4c49e083e3fab67c44f17af944c5e26af"}, +] +async-timeout = [ + {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, + {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, +] +attrs = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.9.3-py2-none-any.whl", hash = "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35"}, + {file = "beautifulsoup4-4.9.3-py3-none-any.whl", hash = "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666"}, + {file = "beautifulsoup4-4.9.3.tar.gz", hash = "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25"}, +] +certifi = [ + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, +] +cffi = [ + {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, + {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, + {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, + {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, + {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, + {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, + {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, + {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, + {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, + {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, + {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, + {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, + {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, + {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, + {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, + {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, + {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, + {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, + {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, + {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, + {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, + {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, + {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, + {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, + {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, + {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, + {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, + {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, + {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, + {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, + {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, + {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, + {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, + {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, + {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, + {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, + {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, +] +cfgv = [ + {file = "cfgv-3.2.0-py2.py3-none-any.whl", hash = "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d"}, + {file = "cfgv-3.2.0.tar.gz", hash = "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +coloredlogs = [ + {file = "coloredlogs-14.3-py2.py3-none-any.whl", hash = "sha256:e244a892f9d97ffd2c60f15bf1d2582ef7f9ac0f848d132249004184785702b3"}, + {file = "coloredlogs-14.3.tar.gz", hash = "sha256:7ef1a7219870c7f02c218a2f2877ce68f2f8e087bb3a55bd6fbaa2a4362b4d52"}, +] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] +coveralls = [ + {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"}, + {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"}, +] +deepdiff = [ + {file = "deepdiff-4.3.2-py3-none-any.whl", hash = "sha256:59fc1e3e7a28dd0147b0f2b00e3e27181f0f0ef4286b251d5f214a5bcd9a9bc4"}, + {file = "deepdiff-4.3.2.tar.gz", hash = "sha256:91360be1d9d93b1d9c13ae9c5048fa83d9cff17a88eb30afaa0d7ff2d0fee17d"}, +] +"discord.py" = [ + {file = "discord.py-1.6.0-py3-none-any.whl", hash = "sha256:3df148daf6fbcc7ab5b11042368a3cd5f7b730b62f09fb5d3cbceff59bcfbb12"}, + {file = "discord.py-1.6.0.tar.gz", hash = "sha256:ba8be99ff1b8c616f7b6dcb700460d0222b29d4c11048e74366954c465fdd05f"}, +] +distlib = [ + {file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"}, + {file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"}, +] +docopt = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] +emoji = [ + {file = "emoji-0.6.0.tar.gz", hash = "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11"}, +] +fakeredis = [ + {file = "fakeredis-1.5.0-py3-none-any.whl", hash = "sha256:e0416e4941cecd3089b0d901e60c8dc3c944f6384f5e29e2261c0d3c5fa99669"}, + {file = "fakeredis-1.5.0.tar.gz", hash = "sha256:1ac0cef767c37f51718874a33afb5413e69d132988cb6a80c6e6dbeddf8c7623"}, +] +feedparser = [ + {file = "feedparser-6.0.2-py3-none-any.whl", hash = "sha256:f596c4b34fb3e2dc7e6ac3a8191603841e8d5d267210064e94d4238737452ddd"}, + {file = "feedparser-6.0.2.tar.gz", hash = "sha256:1b00a105425f492f3954fd346e5b524ca9cef3a4bbf95b8809470e9857aa1074"}, +] +filelock = [ + {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, + {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, +] +flake8 = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] +flake8-annotations = [ + {file = "flake8-annotations-2.6.2.tar.gz", hash = "sha256:0d6cd2e770b5095f09689c9d84cc054c51b929c41a68969ea1beb4b825cac515"}, + {file = "flake8_annotations-2.6.2-py3-none-any.whl", hash = "sha256:d10c4638231f8a50c0a597c4efce42bd7b7d85df4f620a0ddaca526138936a4f"}, +] +flake8-bugbear = [ + {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"}, + {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"}, +] +flake8-docstrings = [ + {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, + {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, +] +flake8-import-order = [ + {file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"}, + {file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"}, +] +flake8-polyfill = [ + {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, + {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, +] +flake8-string-format = [ + {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, + {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, +] +flake8-tidy-imports = [ + {file = "flake8-tidy-imports-4.3.0.tar.gz", hash = "sha256:e66d46f58ed108f36da920e7781a728dc2d8e4f9269e7e764274105700c0a90c"}, + {file = "flake8_tidy_imports-4.3.0-py3-none-any.whl", hash = "sha256:d6e64cb565ca9474d13d5cb3f838b8deafb5fed15906998d4a674daf55bd6d89"}, +] +flake8-todo = [ + {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, +] +fuzzywuzzy = [ + {file = "fuzzywuzzy-0.18.0-py2.py3-none-any.whl", hash = "sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993"}, + {file = "fuzzywuzzy-0.18.0.tar.gz", hash = "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8"}, +] +hiredis = [ + {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"}, + {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"}, + {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"}, + {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"}, + {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"}, + {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"}, + {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"}, + {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"}, + {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"}, + {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"}, + {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"}, + {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"}, + {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, +] +humanfriendly = [ + {file = "humanfriendly-9.1-py2.py3-none-any.whl", hash = "sha256:d5c731705114b9ad673754f3317d9fa4c23212f36b29bdc4272a892eafc9bc72"}, + {file = "humanfriendly-9.1.tar.gz", hash = "sha256:066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d"}, +] +identify = [ + {file = "identify-2.2.4-py2.py3-none-any.whl", hash = "sha256:ad9f3fa0c2316618dc4d840f627d474ab6de106392a4f00221820200f490f5a8"}, + {file = "identify-2.2.4.tar.gz", hash = "sha256:9bcc312d4e2fa96c7abebcdfb1119563b511b5e3985ac52f60d9116277865b2e"}, +] +idna = [ + {file = "idna-3.1-py3-none-any.whl", hash = "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16"}, + {file = "idna-3.1.tar.gz", hash = "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"}, +] +lxml = [ + {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"}, + {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"}, + {file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"}, + {file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"}, + {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, + {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, + {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, + {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"}, + {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"}, + {file = "lxml-4.6.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354"}, + {file = "lxml-4.6.3-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16"}, + {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"}, + {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"}, + {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d916d31fd85b2f78c76400d625076d9124de3e4bda8b016d25a050cc7d603f24"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617"}, + {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"}, + {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"}, + {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:36108c73739985979bf302006527cf8a20515ce444ba916281d1c43938b8bb96"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92"}, + {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"}, + {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"}, + {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:e1cbd3f19a61e27e011e02f9600837b921ac661f0c40560eefb366e4e4fb275e"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:1b38116b6e628118dea5b2186ee6820ab138dbb1e24a13e478490c7db2f326ae"}, + {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"}, + {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"}, + {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:3082c518be8e97324390614dacd041bb1358c882d77108ca1957ba47738d9d59"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a"}, + {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"}, + {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"}, + {file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"}, +] +markdownify = [ + {file = "markdownify-0.6.1-py3-none-any.whl", hash = "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"}, + {file = "markdownify-0.6.1.tar.gz", hash = "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +more-itertools = [ + {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, + {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, +] +mslex = [ + {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, + {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"}, +] +multidict = [ + {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, + {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, + {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, + {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, + {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, + {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, + {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, + {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, + {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, + {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, + {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, + {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, + {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, +] +nodeenv = [ + {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, + {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, +] +ordered-set = [ + {file = "ordered-set-4.0.2.tar.gz", hash = "sha256:ba93b2df055bca202116ec44b9bead3df33ea63a7d5827ff8e16738b97f33a95"}, +] +pamqp = [ + {file = "pamqp-2.3.0-py2.py3-none-any.whl", hash = "sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02"}, + {file = "pamqp-2.3.0.tar.gz", hash = "sha256:5cd0f5a85e89f20d5f8e19285a1507788031cfca4a9ea6f067e3cf18f5e294e8"}, +] +pep8-naming = [ + {file = "pep8-naming-0.11.1.tar.gz", hash = "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724"}, + {file = "pep8_naming-0.11.1-py2.py3-none-any.whl", hash = "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"}, +] +pre-commit = [ + {file = "pre_commit-2.12.1-py2.py3-none-any.whl", hash = "sha256:70c5ec1f30406250b706eda35e868b87e3e4ba099af8787e3e8b4b01e84f4712"}, + {file = "pre_commit-2.12.1.tar.gz", hash = "sha256:900d3c7e1bf4cf0374bb2893c24c23304952181405b4d88c9c40b72bda1bb8a9"}, +] +psutil = [ + {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"}, + {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"}, + {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"}, + {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"}, + {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"}, + {file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"}, + {file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"}, + {file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"}, + {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"}, + {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"}, + {file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"}, + {file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"}, + {file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"}, + {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"}, + {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"}, + {file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"}, + {file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"}, + {file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"}, + {file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"}, + {file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"}, + {file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"}, + {file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"}, + {file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"}, + {file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"}, + {file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"}, + {file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"}, + {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"}, + {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"}, +] +pycares = [ + {file = "pycares-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ebff743643e54aa70dce0b7098094edefd371641cf79d9c944e9f4a25e9242b0"}, + {file = "pycares-3.2.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:55272411b46787936e8db475b9b6e9b81a8d8cdc253fa8779a45ef979f554fab"}, + {file = "pycares-3.2.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:f33ed0e403f98e746f721aeacde917f1bdc7558cb714d713c264848bddff660f"}, + {file = "pycares-3.2.3-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:72807e0c80b705e21c3a39347c12edf43aa4f80373bb37777facf810169372ed"}, + {file = "pycares-3.2.3-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:a51df0a8b3eaf225e0dae3a737fd6ce6f3cb2a3bc947e884582fdda9a159d55f"}, + {file = "pycares-3.2.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:663b5c7bd0f66436adac7257ee22ccfe185c3e7830b9bada3d19b79870e1d134"}, + {file = "pycares-3.2.3-cp36-cp36m-win32.whl", hash = "sha256:c2b1e19262ce91c3288b1905b0d41f7ad0fff4b258ce37b517aa2c8d22eb82f1"}, + {file = "pycares-3.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:e16399654a6c81cfaee2745857c119c20357b5d93de2f169f506b048b5e75d1d"}, + {file = "pycares-3.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88e5131570d7323b29866aa5ac245a9a5788d64677111daa1bde5817acdf012f"}, + {file = "pycares-3.2.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1552ffd823dc595fa8744c996926097a594f4f518d7c147657234b22cf17649d"}, + {file = "pycares-3.2.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f9e28b917373818817aca746238fcd621ec7e4ae9cbc8615f1a045e234eec298"}, + {file = "pycares-3.2.3-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:206d5a652990f10a1f1f3f62bc23d7fe46d99c2dc4b8b8a5101e5a472986cd02"}, + {file = "pycares-3.2.3-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:b8c9670225cdeeeb2b85ea92a807484622ca59f8f578ec73e8ec292515f35a91"}, + {file = "pycares-3.2.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:6329160885fc318f80692d4d0a83a8854f9144e7a80c4f25245d0c26f11a4b84"}, + {file = "pycares-3.2.3-cp37-cp37m-win32.whl", hash = "sha256:cd0f7fb40e1169f00b26a12793136bf5c711f155e647cd045a0ce6c98a527b57"}, + {file = "pycares-3.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a5d419215543d154587590d9d4485e985387ca10c7d3e1a2e5689dd6c0f20e5f"}, + {file = "pycares-3.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54f1c0642935515f27549f09486e72b6b2b1d51ad27a90ce17b760e9ce5e86d"}, + {file = "pycares-3.2.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6ce80eed538dd6106cd7e6136ceb3af10178d1254f07096a827c12e82e5e45c8"}, + {file = "pycares-3.2.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ed972a04067e91f552da84945d38b94c3984c898f699faa8bb066e9f3a114c32"}, + {file = "pycares-3.2.3-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:99a62b101cfb36ab6ebf19cb1ad60db2f9b080dc52db4ca985fe90924f60c758"}, + {file = "pycares-3.2.3-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:2246adcbc948dd31925c9bff5cc41c06fc640f7d982e6b41b6d09e4f201e5c11"}, + {file = "pycares-3.2.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7fd15d3f32be5548f38f95f4762ca73eef9fd623b101218a35d433ee0d4e3b58"}, + {file = "pycares-3.2.3-cp38-cp38-win32.whl", hash = "sha256:4bb0c708d8713741af7c4649d2f11e47c5f4e43131831243aeb18cff512c5469"}, + {file = "pycares-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:a53d921956d1e985e510ca0ffa84fbd7ecc6ac7d735d8355cba4395765efcd31"}, + {file = "pycares-3.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0312d25fa9d7c242f66115c4b3ae6ed8aedb457513ba33acef31fa265fc602b4"}, + {file = "pycares-3.2.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9960de8254525d9c3b485141809910c39d5eb1bb8119b1453702aacf72234934"}, + {file = "pycares-3.2.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:929f708a7bb4b2548cbbfc2094b2f90c4d8712056cdc0204788b570ab69c8838"}, + {file = "pycares-3.2.3-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4dd1237f01037cf5b90dd599c7fa79d9d8fb2ab2f401e19213d24228b2d17838"}, + {file = "pycares-3.2.3-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:5eea61a74097976502ce377bb75c4fed381d4986bc7fb85e70b691165133d3da"}, + {file = "pycares-3.2.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1c72c0fda4b08924fe04680475350e09b8d210365d950a6dcdde8c449b8d5b98"}, + {file = "pycares-3.2.3-cp39-cp39-win32.whl", hash = "sha256:b1555d51ce29510ffd20f9e0339994dff8c5d1cb093c8e81d5d98f474e345aa7"}, + {file = "pycares-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:43c15138f620ed28e61e51b884490eb8387e5954668f919313753f88dd8134fd"}, + {file = "pycares-3.2.3.tar.gz", hash = "sha256:da1899fde778f9b8736712283eccbf7b654248779b349d139cd28eb30b0fa8cd"}, +] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pycparser = [ + {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, + {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, +] +pydocstyle = [ + {file = "pydocstyle-6.0.0-py3-none-any.whl", hash = "sha256:d4449cf16d7e6709f63192146706933c7a334af7c0f083904799ccb851c50f6d"}, + {file = "pydocstyle-6.0.0.tar.gz", hash = "sha256:164befb520d851dbcf0e029681b91f4f599c62c5cd8933fd54b1bfbd50e89e1f"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] +pyreadline = [ + {file = "pyreadline-2.1.win-amd64.exe", hash = "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b"}, + {file = "pyreadline-2.1.win32.exe", hash = "sha256:65540c21bfe14405a3a77e4c085ecfce88724743a4ead47c66b84defcf82c32e"}, + {file = "pyreadline-2.1.zip", hash = "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] +python-dotenv = [ + {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, + {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"}, +] +python-frontmatter = [ + {file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"}, + {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"}, +] +pyyaml = [ + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, +] +redis = [ + {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, + {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, +] +regex = [ + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, +] +requests = [ + {file = "requests-2.15.1-py2.py3-none-any.whl", hash = "sha256:ff753b2196cd18b1bbeddc9dcd5c864056599f7a7d9a4fb5677e723efa2b7fb9"}, + {file = "requests-2.15.1.tar.gz", hash = "sha256:e5659b9315a0610505e050bb7190bf6fa2ccee1ac295f2b760ef9d8a03ebbb2e"}, +] +sentry-sdk = [ + {file = "sentry-sdk-0.20.3.tar.gz", hash = "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237"}, + {file = "sentry_sdk-0.20.3-py2.py3-none-any.whl", hash = "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b"}, +] +sgmllib3k = [ + {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, + {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, +] +sortedcontainers = [ + {file = "sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f"}, + {file = "sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1"}, +] +soupsieve = [ + {file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"}, + {file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"}, +] +statsd = [ + {file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"}, + {file = "statsd-3.3.0.tar.gz", hash = "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f"}, +] +taskipy = [ + {file = "taskipy-1.7.0-py3-none-any.whl", hash = "sha256:9e284c10898e9dee01a3e72220b94b192b1daa0f560271503a6df1da53d03844"}, + {file = "taskipy-1.7.0.tar.gz", hash = "sha256:960e480b1004971e76454ecd1a0484e640744a30073a1069894a311467f85ed8"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +typing-extensions = [ + {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, + {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, + {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, +] +urllib3 = [ + {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, + {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, +] +virtualenv = [ + {file = "virtualenv-20.4.6-py2.py3-none-any.whl", hash = "sha256:307a555cf21e1550885c82120eccaf5acedf42978fd362d32ba8410f9593f543"}, + {file = "virtualenv-20.4.6.tar.gz", hash = "sha256:72cf267afc04bf9c86ec932329b7e94db6a0331ae9847576daaa7ca3c86b29a4"}, +] +yarl = [ + {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366"}, + {file = "yarl-1.6.3-cp36-cp36m-win32.whl", hash = "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721"}, + {file = "yarl-1.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643"}, + {file = "yarl-1.6.3-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970"}, + {file = "yarl-1.6.3-cp37-cp37m-win32.whl", hash = "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e"}, + {file = "yarl-1.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50"}, + {file = "yarl-1.6.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2"}, + {file = "yarl-1.6.3-cp38-cp38-win32.whl", hash = "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896"}, + {file = "yarl-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a"}, + {file = "yarl-1.6.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4"}, + {file = "yarl-1.6.3-cp39-cp39-win32.whl", hash = "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424"}, + {file = "yarl-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6"}, + {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, +] diff --git a/Pipfile b/pyproject.toml index 0a94fb888..320bf88cc 100644 --- a/Pipfile +++ b/pyproject.toml @@ -1,36 +1,40 @@ -[[source]] -url = "https://pypi.python.org/simple" -verify_ssl = true -name = "pypi" +[tool.poetry] +name = "bot" +version = "1.0.0" +description = "The community bot for the Python Discord community." +authors = ["Python Discord <[email protected]>"] +license = "MIT" -[packages] +[tool.poetry.dependencies] +python = "3.9.*" aio-pika = "~=6.1" aiodns = "~=2.0" aiohttp = "~=3.7" aioping = "~=0.3.1" aioredis = "~=1.3.1" -"async-rediscache[fakeredis]" = "~=0.1.2" +arrow = "~=1.0.3" +async-rediscache = { version = "~=0.1.2", extras = ["fakeredis"] } beautifulsoup4 = "~=4.9" -colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"} +colorama = { version = "~=0.4.3", markers = "sys_platform == 'win32'" } coloredlogs = "~=14.0" deepdiff = "~=4.0" "discord.py" = "~=1.6.0" -feedparser = "~=5.2" +emoji = "~=0.6" +feedparser = "~=6.0.2" fuzzywuzzy = "~=0.17" lxml = "~=4.4" -markdownify = "==0.5.3" +markdownify = "==0.6.1" more_itertools = "~=8.2" python-dateutil = "~=2.8" +python-frontmatter = "~=1.0.0" pyyaml = "~=5.1" -requests = "~=2.22" +regex = "==2021.4.4" sentry-sdk = "~=0.19" -sphinx = "~=2.2" statsd = "~=3.3" -arrow = "~=0.17" -emoji = "~=0.6" -[dev-packages] +[tool.poetry.dev-dependencies] coverage = "~=5.0" +coveralls = "~=2.1" flake8 = "~=3.8" flake8-annotations = "~=2.0" flake8-bugbear = "~=20.1" @@ -41,12 +45,14 @@ flake8-tidy-imports = "~=4.0" flake8-todo = "~=0.7" pep8-naming = "~=0.9" pre-commit = "~=2.1" -coveralls = "~=2.1" +taskipy = "~=1.7.0" +python-dotenv = "~=0.17.1" -[requires] -python_version = "3.8" +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" -[scripts] +[tool.taskipy.tasks] start = "python -m bot" lint = "pre-commit run --all-files" precommit = "pre-commit install" diff --git a/tests/README.md b/tests/README.md index 4f62edd68..1a17c09bd 100644 --- a/tests/README.md +++ b/tests/README.md @@ -12,13 +12,13 @@ We are using the following modules and packages for our unit tests: - [unittest.mock](https://docs.python.org/3/library/unittest.mock.html) (standard library) - [coverage.py](https://coverage.readthedocs.io/en/stable/) -To ensure the results you obtain on your personal machine are comparable to those generated in the Azure pipeline, please make sure to run your tests with the virtual environment defined by our [Pipfile](/Pipfile). To run your tests with `pipenv`, we've provided two "scripts" shortcuts: +To ensure the results you obtain on your personal machine are comparable to those generated in the CI, please make sure to run your tests with the virtual environment defined by our [Poetry Project](/pyproject.toml). To run your tests with `poetry`, we've provided two "scripts" shortcuts: -- `pipenv run test` will run `unittest` with `coverage.py` -- `pipenv run test path/to/test.py` will run a specific test. -- `pipenv run report` will generate a coverage report of the tests you've run with `pipenv run test`. If you append the `-m` flag to this command, the report will include the lines and branches not covered by tests in addition to the test coverage report. +- `poetry run task test` will run `unittest` with `coverage.py` +- `poetry run task test path/to/test.py` will run a specific test. +- `poetry run task report` will generate a coverage report of the tests you've run with `poetry run task test`. If you append the `-m` flag to this command, the report will include the lines and branches not covered by tests in addition to the test coverage report. -If you want a coverage report, make sure to run the tests with `pipenv run test` *first*. +If you want a coverage report, make sure to run the tests with `poetry run task test` *first*. ## Writing tests @@ -114,7 +114,7 @@ class BotCogTests(unittest.TestCase): ### Mocking coroutines -By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. In anticipation of the `AsyncMock` that will be [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest), we have added an `AsyncMock` helper to [`helpers.py`](/tests/helpers.py). Do note that this drop-in replacement only implements an asynchronous `__call__` method, not the additional assertions that will come with the new `AsyncMock` type in Python 3.8. +By default, the `unittest.mock.Mock` and `unittest.mock.MagicMock` classes cannot mock coroutines, since the `__call__` method they provide is synchronous. The [`AsyncMock`](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.AsyncMock) that has been [introduced in Python 3.8](https://docs.python.org/3.9/whatsnew/3.8.html#unittest) is an asynchronous version of `MagicMock` that can be used anywhere a coroutine is expected. ### Special mocks for some `discord.py` types diff --git a/tests/bot/exts/backend/test_error_handler.py b/tests/bot/exts/backend/test_error_handler.py new file mode 100644 index 000000000..bd4fb5942 --- /dev/null +++ b/tests/bot/exts/backend/test_error_handler.py @@ -0,0 +1,550 @@ +import unittest +from unittest.mock import AsyncMock, MagicMock, call, patch + +from discord.ext.commands import errors + +from bot.api import ResponseCodeError +from bot.errors import InvalidInfractedUser, LockedResourceError +from bot.exts.backend.error_handler import ErrorHandler, setup +from bot.exts.info.tags import Tags +from bot.exts.moderation.silence import Silence +from bot.utils.checks import InWhitelistCheckFailure +from tests.helpers import MockBot, MockContext, MockGuild, MockRole + + +class ErrorHandlerTests(unittest.IsolatedAsyncioTestCase): + """Tests for error handler functionality.""" + + def setUp(self): + self.bot = MockBot() + self.ctx = MockContext(bot=self.bot) + + async def test_error_handler_already_handled(self): + """Should not do anything when error is already handled by local error handler.""" + self.ctx.reset_mock() + cog = ErrorHandler(self.bot) + error = errors.CommandError() + error.handled = "foo" + self.assertIsNone(await cog.on_command_error(self.ctx, error)) + self.ctx.send.assert_not_awaited() + + async def test_error_handler_command_not_found_error_not_invoked_by_handler(self): + """Should try first (un)silence channel, when fail, try to get tag.""" + error = errors.CommandNotFound() + test_cases = ( + { + "try_silence_return": True, + "called_try_get_tag": False + }, + { + "try_silence_return": False, + "called_try_get_tag": False + }, + { + "try_silence_return": False, + "called_try_get_tag": True + } + ) + cog = ErrorHandler(self.bot) + cog.try_silence = AsyncMock() + cog.try_get_tag = AsyncMock() + + for case in test_cases: + with self.subTest(try_silence_return=case["try_silence_return"], try_get_tag=case["called_try_get_tag"]): + self.ctx.reset_mock() + cog.try_silence.reset_mock(return_value=True) + cog.try_get_tag.reset_mock() + + cog.try_silence.return_value = case["try_silence_return"] + self.ctx.channel.id = 1234 + + self.assertIsNone(await cog.on_command_error(self.ctx, error)) + + if case["try_silence_return"]: + cog.try_get_tag.assert_not_awaited() + cog.try_silence.assert_awaited_once() + else: + cog.try_silence.assert_awaited_once() + cog.try_get_tag.assert_awaited_once() + + self.ctx.send.assert_not_awaited() + + async def test_error_handler_command_not_found_error_invoked_by_handler(self): + """Should do nothing when error is `CommandNotFound` and have attribute `invoked_from_error_handler`.""" + ctx = MockContext(bot=self.bot, invoked_from_error_handler=True) + + cog = ErrorHandler(self.bot) + cog.try_silence = AsyncMock() + cog.try_get_tag = AsyncMock() + + error = errors.CommandNotFound() + + self.assertIsNone(await cog.on_command_error(ctx, error)) + + cog.try_silence.assert_not_awaited() + cog.try_get_tag.assert_not_awaited() + self.ctx.send.assert_not_awaited() + + async def test_error_handler_user_input_error(self): + """Should await `ErrorHandler.handle_user_input_error` when error is `UserInputError`.""" + self.ctx.reset_mock() + cog = ErrorHandler(self.bot) + cog.handle_user_input_error = AsyncMock() + error = errors.UserInputError() + self.assertIsNone(await cog.on_command_error(self.ctx, error)) + cog.handle_user_input_error.assert_awaited_once_with(self.ctx, error) + + async def test_error_handler_check_failure(self): + """Should await `ErrorHandler.handle_check_failure` when error is `CheckFailure`.""" + self.ctx.reset_mock() + cog = ErrorHandler(self.bot) + cog.handle_check_failure = AsyncMock() + error = errors.CheckFailure() + self.assertIsNone(await cog.on_command_error(self.ctx, error)) + cog.handle_check_failure.assert_awaited_once_with(self.ctx, error) + + async def test_error_handler_command_on_cooldown(self): + """Should send error with `ctx.send` when error is `CommandOnCooldown`.""" + self.ctx.reset_mock() + cog = ErrorHandler(self.bot) + error = errors.CommandOnCooldown(10, 9) + self.assertIsNone(await cog.on_command_error(self.ctx, error)) + self.ctx.send.assert_awaited_once_with(error) + + async def test_error_handler_command_invoke_error(self): + """Should call `handle_api_error` or `handle_unexpected_error` depending on original error.""" + cog = ErrorHandler(self.bot) + cog.handle_api_error = AsyncMock() + cog.handle_unexpected_error = AsyncMock() + test_cases = ( + { + "args": (self.ctx, errors.CommandInvokeError(ResponseCodeError(AsyncMock()))), + "expect_mock_call": cog.handle_api_error + }, + { + "args": (self.ctx, errors.CommandInvokeError(TypeError)), + "expect_mock_call": cog.handle_unexpected_error + }, + { + "args": (self.ctx, errors.CommandInvokeError(LockedResourceError("abc", "test"))), + "expect_mock_call": "send" + }, + { + "args": (self.ctx, errors.CommandInvokeError(InvalidInfractedUser(self.ctx.author))), + "expect_mock_call": "send" + } + ) + + for case in test_cases: + with self.subTest(args=case["args"], expect_mock_call=case["expect_mock_call"]): + self.ctx.send.reset_mock() + self.assertIsNone(await cog.on_command_error(*case["args"])) + if case["expect_mock_call"] == "send": + self.ctx.send.assert_awaited_once() + else: + case["expect_mock_call"].assert_awaited_once_with( + self.ctx, case["args"][1].original + ) + + async def test_error_handler_conversion_error(self): + """Should call `handle_api_error` or `handle_unexpected_error` depending on original error.""" + cog = ErrorHandler(self.bot) + cog.handle_api_error = AsyncMock() + cog.handle_unexpected_error = AsyncMock() + cases = ( + { + "error": errors.ConversionError(AsyncMock(), ResponseCodeError(AsyncMock())), + "mock_function_to_call": cog.handle_api_error + }, + { + "error": errors.ConversionError(AsyncMock(), TypeError), + "mock_function_to_call": cog.handle_unexpected_error + } + ) + + for case in cases: + with self.subTest(**case): + self.assertIsNone(await cog.on_command_error(self.ctx, case["error"])) + case["mock_function_to_call"].assert_awaited_once_with(self.ctx, case["error"].original) + + async def test_error_handler_two_other_errors(self): + """Should call `handle_unexpected_error` if error is `MaxConcurrencyReached` or `ExtensionError`.""" + cog = ErrorHandler(self.bot) + cog.handle_unexpected_error = AsyncMock() + errs = ( + errors.MaxConcurrencyReached(1, MagicMock()), + errors.ExtensionError(name="foo") + ) + + for err in errs: + with self.subTest(error=err): + cog.handle_unexpected_error.reset_mock() + self.assertIsNone(await cog.on_command_error(self.ctx, err)) + cog.handle_unexpected_error.assert_awaited_once_with(self.ctx, err) + + @patch("bot.exts.backend.error_handler.log") + async def test_error_handler_other_errors(self, log_mock): + """Should `log.debug` other errors.""" + cog = ErrorHandler(self.bot) + error = errors.DisabledCommand() # Use this just as a other error + self.assertIsNone(await cog.on_command_error(self.ctx, error)) + log_mock.debug.assert_called_once() + + +class TrySilenceTests(unittest.IsolatedAsyncioTestCase): + """Test for helper functions that handle `CommandNotFound` error.""" + + def setUp(self): + self.bot = MockBot() + self.silence = Silence(self.bot) + self.bot.get_command.return_value = self.silence.silence + self.ctx = MockContext(bot=self.bot) + self.cog = ErrorHandler(self.bot) + + async def test_try_silence_context_invoked_from_error_handler(self): + """Should set `Context.invoked_from_error_handler` to `True`.""" + self.ctx.invoked_with = "foo" + await self.cog.try_silence(self.ctx) + self.assertTrue(hasattr(self.ctx, "invoked_from_error_handler")) + self.assertTrue(self.ctx.invoked_from_error_handler) + + async def test_try_silence_get_command(self): + """Should call `get_command` with `silence`.""" + self.ctx.invoked_with = "foo" + await self.cog.try_silence(self.ctx) + self.bot.get_command.assert_called_once_with("silence") + + async def test_try_silence_no_permissions_to_run(self): + """Should return `False` because missing permissions.""" + self.ctx.invoked_with = "foo" + self.bot.get_command.return_value.can_run = AsyncMock(return_value=False) + self.assertFalse(await self.cog.try_silence(self.ctx)) + + async def test_try_silence_no_permissions_to_run_command_error(self): + """Should return `False` because `CommandError` raised (no permissions).""" + self.ctx.invoked_with = "foo" + self.bot.get_command.return_value.can_run = AsyncMock(side_effect=errors.CommandError()) + self.assertFalse(await self.cog.try_silence(self.ctx)) + + async def test_try_silence_silencing(self): + """Should run silence command with correct arguments.""" + self.bot.get_command.return_value.can_run = AsyncMock(return_value=True) + test_cases = ("shh", "shhh", "shhhhhh", "shhhhhhhhhhhhhhhhhhh") + + for case in test_cases: + with self.subTest(message=case): + self.ctx.reset_mock() + self.ctx.invoked_with = case + self.assertTrue(await self.cog.try_silence(self.ctx)) + self.ctx.invoke.assert_awaited_once_with( + self.bot.get_command.return_value, + duration=min(case.count("h")*2, 15) + ) + + async def test_try_silence_unsilence(self): + """Should call unsilence command.""" + self.silence.silence.can_run = AsyncMock(return_value=True) + test_cases = ("unshh", "unshhhhh", "unshhhhhhhhh") + + for case in test_cases: + with self.subTest(message=case): + self.bot.get_command.side_effect = (self.silence.silence, self.silence.unsilence) + self.ctx.reset_mock() + self.ctx.invoked_with = case + self.assertTrue(await self.cog.try_silence(self.ctx)) + self.ctx.invoke.assert_awaited_once_with(self.silence.unsilence) + + async def test_try_silence_no_match(self): + """Should return `False` when message don't match.""" + self.ctx.invoked_with = "foo" + self.assertFalse(await self.cog.try_silence(self.ctx)) + + +class TryGetTagTests(unittest.IsolatedAsyncioTestCase): + """Tests for `try_get_tag` function.""" + + def setUp(self): + self.bot = MockBot() + self.ctx = MockContext() + self.tag = Tags(self.bot) + self.cog = ErrorHandler(self.bot) + self.bot.get_command.return_value = self.tag.get_command + + async def test_try_get_tag_get_command(self): + """Should call `Bot.get_command` with `tags get` argument.""" + self.bot.get_command.reset_mock() + self.ctx.invoked_with = "foo" + await self.cog.try_get_tag(self.ctx) + self.bot.get_command.assert_called_once_with("tags get") + + async def test_try_get_tag_invoked_from_error_handler(self): + """`self.ctx` should have `invoked_from_error_handler` `True`.""" + self.ctx.invoked_from_error_handler = False + self.ctx.invoked_with = "foo" + await self.cog.try_get_tag(self.ctx) + self.assertTrue(self.ctx.invoked_from_error_handler) + + async def test_try_get_tag_no_permissions(self): + """Test how to handle checks failing.""" + self.tag.get_command.can_run = AsyncMock(return_value=False) + self.ctx.invoked_with = "foo" + self.assertIsNone(await self.cog.try_get_tag(self.ctx)) + + async def test_try_get_tag_command_error(self): + """Should call `on_command_error` when `CommandError` raised.""" + err = errors.CommandError() + self.tag.get_command.can_run = AsyncMock(side_effect=err) + self.cog.on_command_error = AsyncMock() + self.ctx.invoked_with = "foo" + self.assertIsNone(await self.cog.try_get_tag(self.ctx)) + self.cog.on_command_error.assert_awaited_once_with(self.ctx, err) + + @patch("bot.exts.backend.error_handler.TagNameConverter") + async def test_try_get_tag_convert_success(self, tag_converter): + """Converting tag should successful.""" + self.ctx.invoked_with = "foo" + tag_converter.convert = AsyncMock(return_value="foo") + self.assertIsNone(await self.cog.try_get_tag(self.ctx)) + tag_converter.convert.assert_awaited_once_with(self.ctx, "foo") + self.ctx.invoke.assert_awaited_once() + + @patch("bot.exts.backend.error_handler.TagNameConverter") + async def test_try_get_tag_convert_fail(self, tag_converter): + """Converting tag should raise `BadArgument`.""" + self.ctx.reset_mock() + self.ctx.invoked_with = "bar" + tag_converter.convert = AsyncMock(side_effect=errors.BadArgument()) + self.assertIsNone(await self.cog.try_get_tag(self.ctx)) + self.ctx.invoke.assert_not_awaited() + + async def test_try_get_tag_ctx_invoke(self): + """Should call `ctx.invoke` with proper args/kwargs.""" + self.ctx.reset_mock() + self.ctx.invoked_with = "foo" + self.assertIsNone(await self.cog.try_get_tag(self.ctx)) + self.ctx.invoke.assert_awaited_once_with(self.tag.get_command, tag_name="foo") + + async def test_dont_call_suggestion_tag_sent(self): + """Should never call command suggestion if tag is already sent.""" + self.ctx.invoked_with = "foo" + self.ctx.invoke = AsyncMock(return_value=True) + self.cog.send_command_suggestion = AsyncMock() + + await self.cog.try_get_tag(self.ctx) + self.cog.send_command_suggestion.assert_not_awaited() + + @patch("bot.exts.backend.error_handler.MODERATION_ROLES", new=[1234]) + async def test_dont_call_suggestion_if_user_mod(self): + """Should not call command suggestion if user is a mod.""" + self.ctx.invoked_with = "foo" + self.ctx.invoke = AsyncMock(return_value=False) + self.ctx.author.roles = [MockRole(id=1234)] + self.cog.send_command_suggestion = AsyncMock() + + await self.cog.try_get_tag(self.ctx) + self.cog.send_command_suggestion.assert_not_awaited() + + async def test_call_suggestion(self): + """Should call command suggestion if user is not a mod.""" + self.ctx.invoked_with = "foo" + self.ctx.invoke = AsyncMock(return_value=False) + self.cog.send_command_suggestion = AsyncMock() + + await self.cog.try_get_tag(self.ctx) + self.cog.send_command_suggestion.assert_awaited_once_with(self.ctx, "foo") + + +class IndividualErrorHandlerTests(unittest.IsolatedAsyncioTestCase): + """Individual error categories handler tests.""" + + def setUp(self): + self.bot = MockBot() + self.ctx = MockContext(bot=self.bot) + self.cog = ErrorHandler(self.bot) + + async def test_handle_input_error_handler_errors(self): + """Should handle each error probably.""" + test_cases = ( + { + "error": errors.MissingRequiredArgument(MagicMock()), + "call_prepared": True + }, + { + "error": errors.TooManyArguments(), + "call_prepared": True + }, + { + "error": errors.BadArgument(), + "call_prepared": True + }, + { + "error": errors.BadUnionArgument(MagicMock(), MagicMock(), MagicMock()), + "call_prepared": True + }, + { + "error": errors.ArgumentParsingError(), + "call_prepared": False + }, + { + "error": errors.UserInputError(), + "call_prepared": True + } + ) + + for case in test_cases: + with self.subTest(error=case["error"], call_prepared=case["call_prepared"]): + self.ctx.reset_mock() + self.assertIsNone(await self.cog.handle_user_input_error(self.ctx, case["error"])) + self.ctx.send.assert_awaited_once() + if case["call_prepared"]: + self.ctx.send_help.assert_awaited_once() + else: + self.ctx.send_help.assert_not_awaited() + + async def test_handle_check_failure_errors(self): + """Should await `ctx.send` when error is check failure.""" + test_cases = ( + { + "error": errors.BotMissingPermissions(MagicMock()), + "call_ctx_send": True + }, + { + "error": errors.BotMissingRole(MagicMock()), + "call_ctx_send": True + }, + { + "error": errors.BotMissingAnyRole(MagicMock()), + "call_ctx_send": True + }, + { + "error": errors.NoPrivateMessage(), + "call_ctx_send": True + }, + { + "error": InWhitelistCheckFailure(1234), + "call_ctx_send": True + }, + { + "error": ResponseCodeError(MagicMock()), + "call_ctx_send": False + } + ) + + for case in test_cases: + with self.subTest(error=case["error"], call_ctx_send=case["call_ctx_send"]): + self.ctx.reset_mock() + await self.cog.handle_check_failure(self.ctx, case["error"]) + if case["call_ctx_send"]: + self.ctx.send.assert_awaited_once() + else: + self.ctx.send.assert_not_awaited() + + @patch("bot.exts.backend.error_handler.log") + async def test_handle_api_error(self, log_mock): + """Should `ctx.send` on HTTP error codes, `log.debug|warning` depends on code.""" + test_cases = ( + { + "error": ResponseCodeError(AsyncMock(status=400)), + "log_level": "debug" + }, + { + "error": ResponseCodeError(AsyncMock(status=404)), + "log_level": "debug" + }, + { + "error": ResponseCodeError(AsyncMock(status=550)), + "log_level": "warning" + }, + { + "error": ResponseCodeError(AsyncMock(status=1000)), + "log_level": "warning" + } + ) + + for case in test_cases: + with self.subTest(error=case["error"], log_level=case["log_level"]): + self.ctx.reset_mock() + log_mock.reset_mock() + await self.cog.handle_api_error(self.ctx, case["error"]) + self.ctx.send.assert_awaited_once() + if case["log_level"] == "warning": + log_mock.warning.assert_called_once() + else: + log_mock.debug.assert_called_once() + + @patch("bot.exts.backend.error_handler.push_scope") + @patch("bot.exts.backend.error_handler.log") + async def test_handle_unexpected_error(self, log_mock, push_scope_mock): + """Should `ctx.send` this error, error log this and sent to Sentry.""" + for case in (None, MockGuild()): + with self.subTest(guild=case): + self.ctx.reset_mock() + log_mock.reset_mock() + push_scope_mock.reset_mock() + + self.ctx.guild = case + await self.cog.handle_unexpected_error(self.ctx, errors.CommandError()) + + self.ctx.send.assert_awaited_once() + log_mock.error.assert_called_once() + push_scope_mock.assert_called_once() + + set_tag_calls = [ + call("command", self.ctx.command.qualified_name), + call("message_id", self.ctx.message.id), + call("channel_id", self.ctx.channel.id), + ] + set_extra_calls = [ + call("full_message", self.ctx.message.content) + ] + if case: + url = ( + f"https://discordapp.com/channels/" + f"{self.ctx.guild.id}/{self.ctx.channel.id}/{self.ctx.message.id}" + ) + set_extra_calls.append(call("jump_to", url)) + + push_scope_mock.set_tag.has_calls(set_tag_calls) + push_scope_mock.set_extra.has_calls(set_extra_calls) + + +class OtherErrorHandlerTests(unittest.IsolatedAsyncioTestCase): + """Other `ErrorHandler` tests.""" + + def setUp(self): + self.bot = MockBot() + self.ctx = MockContext() + + async def test_get_help_command_command_specified(self): + """Should return coroutine of help command of specified command.""" + self.ctx.command = "foo" + result = ErrorHandler.get_help_command(self.ctx) + expected = self.ctx.send_help("foo") + self.assertEqual(result.__qualname__, expected.__qualname__) + self.assertEqual(result.cr_frame.f_locals, expected.cr_frame.f_locals) + + # Await coroutines to avoid warnings + await result + await expected + + async def test_get_help_command_no_command_specified(self): + """Should return coroutine of help command.""" + self.ctx.command = None + result = ErrorHandler.get_help_command(self.ctx) + expected = self.ctx.send_help() + self.assertEqual(result.__qualname__, expected.__qualname__) + self.assertEqual(result.cr_frame.f_locals, expected.cr_frame.f_locals) + + # Await coroutines to avoid warnings + await result + await expected + + +class ErrorHandlerSetupTests(unittest.TestCase): + """Tests for `ErrorHandler` `setup` function.""" + + def test_setup(self): + """Should call `bot.add_cog` with `ErrorHandler`.""" + bot = MockBot() + setup(bot) + bot.add_cog.assert_called_once() diff --git a/tests/bot/exts/info/doc/__init__.py b/tests/bot/exts/info/doc/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/tests/bot/exts/info/doc/__init__.py diff --git a/tests/bot/exts/info/doc/test_parsing.py b/tests/bot/exts/info/doc/test_parsing.py new file mode 100644 index 000000000..1663d8491 --- /dev/null +++ b/tests/bot/exts/info/doc/test_parsing.py @@ -0,0 +1,66 @@ +from unittest import TestCase + +from bot.exts.info.doc import _parsing as parsing + + +class SignatureSplitter(TestCase): + + def test_basic_split(self): + test_cases = ( + ("0,0,0", ["0", "0", "0"]), + ("0,a=0,a=0", ["0", "a=0", "a=0"]), + ) + self._run_tests(test_cases) + + def test_commas_ignored_in_brackets(self): + test_cases = ( + ("0,[0,0],0,[0,0],0", ["0", "[0,0]", "0", "[0,0]", "0"]), + ("(0,),0,(0,(0,),0),0", ["(0,)", "0", "(0,(0,),0)", "0"]), + ) + self._run_tests(test_cases) + + def test_mixed_brackets(self): + tests_cases = ( + ("[0,{0},0],0,{0:0},0", ["[0,{0},0]", "0", "{0:0}", "0"]), + ("([0],0,0),0,(0,0),0", ["([0],0,0)", "0", "(0,0)", "0"]), + ("([(0,),(0,)],0),0", ["([(0,),(0,)],0)", "0"]), + ) + self._run_tests(tests_cases) + + def test_string_contents_ignored(self): + test_cases = ( + ("'0,0',0,',',0", ["'0,0'", "0", "','", "0"]), + ("0,[']',0],0", ["0", "[']',0]", "0"]), + ("{0,0,'}}',0,'{'},0", ["{0,0,'}}',0,'{'}", "0"]), + ) + self._run_tests(test_cases) + + def test_mixed_quotes(self): + test_cases = ( + ("\"0',0',\",'0,0',0", ["\"0',0',\"", "'0,0'", "0"]), + ("\",',\",'\",',0", ["\",',\"", "'\",'", "0"]), + ) + self._run_tests(test_cases) + + def test_quote_escaped(self): + test_cases = ( + (r"'\',','\\',0", [r"'\','", r"'\\'", "0"]), + (r"'0\',0\\\'\\',0", [r"'0\',0\\\'\\'", "0"]), + ) + self._run_tests(test_cases) + + def test_real_signatures(self): + test_cases = ( + ("start, stop[, step]", ["start", " stop[, step]"]), + ("object=b'', encoding='utf-8', errors='strict'", ["object=b''", " encoding='utf-8'", " errors='strict'"]), + ( + "typename, field_names, *, rename=False, defaults=None, module=None", + ["typename", " field_names", " *", " rename=False", " defaults=None", " module=None"] + ), + ) + self._run_tests(test_cases) + + def _run_tests(self, test_cases): + for input_string, expected_output in test_cases: + with self.subTest(input_string=input_string): + self.assertEqual(list(parsing._split_parameters(input_string)), expected_output) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index 80731c9f0..770660fe3 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -281,8 +281,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should use the string representation of the user if they don't have a nick.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() + user.public_flags = unittest.mock.MagicMock(verified_bot=False) user.nick = None user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") + user.colour = 0 embed = await self.cog.create_user_embed(ctx, user) @@ -296,8 +298,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should use the nick if it's available.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) user = helpers.MockMember() + user.public_flags = unittest.mock.MagicMock(verified_bot=False) user.nick = "Cat lover" user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock") + user.colour = 0 embed = await self.cog.create_user_embed(ctx, user) @@ -311,10 +315,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """Created `!user` embeds should not contain mention of the @everyone-role.""" ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1)) admins_role = helpers.MockRole(name='Admins') - admins_role.colour = 100 # A `MockMember` has the @Everyone role by default; we add the Admins to that. - user = helpers.MockMember(roles=[admins_role], top_role=admins_role) + user = helpers.MockMember(roles=[admins_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) @@ -332,12 +335,11 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=50)) moderators_role = helpers.MockRole(name='Moderators') - moderators_role.colour = 100 infraction_counts.return_value = ("Infractions", "expanded infractions info") nomination_counts.return_value = ("Nominations", "nomination info") - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) + user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) @@ -367,11 +369,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=100)) moderators_role = helpers.MockRole(name='Moderators') - moderators_role.colour = 100 infraction_counts.return_value = ("Infractions", "basic infractions info") - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) + user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) infraction_counts.assert_called_once_with(user) @@ -407,12 +408,11 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): ctx = helpers.MockContext() moderators_role = helpers.MockRole(name='Moderators') - moderators_role.colour = 100 - user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role) + user = helpers.MockMember(id=314, roles=[moderators_role], colour=100) embed = await self.cog.create_user_embed(ctx, user) - self.assertEqual(embed.colour, discord.Colour(moderators_role.colour)) + self.assertEqual(embed.colour, discord.Colour(100)) @unittest.mock.patch( f"{COG_PATH}.basic_user_infraction_counts", @@ -422,7 +422,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed should be created with a blurple colour if the user has no assigned roles.""" ctx = helpers.MockContext() - user = helpers.MockMember(id=217) + user = helpers.MockMember(id=217, colour=discord.Colour.default()) embed = await self.cog.create_user_embed(ctx, user) self.assertEqual(embed.colour, discord.Colour.blurple()) @@ -435,7 +435,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase): """The embed thumbnail should be set to the user's avatar in `png` format.""" ctx = helpers.MockContext() - user = helpers.MockMember(id=217) + user = helpers.MockMember(id=217, colour=0) user.avatar_url_as.return_value = "avatar url" embed = await self.cog.create_user_embed(ctx, user) diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py index 08f39cd50..b9d527770 100644 --- a/tests/bot/exts/moderation/infraction/test_infractions.py +++ b/tests/bot/exts/moderation/infraction/test_infractions.py @@ -74,7 +74,7 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): """Should call voice ban applying function without expiry.""" self.cog.apply_voice_ban = AsyncMock() self.assertIsNone(await self.cog.voiceban(self.cog, self.ctx, self.user, reason="foobar")) - self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar") + self.cog.apply_voice_ban.assert_awaited_once_with(self.ctx, self.user, "foobar", expires_at=None) async def test_temporary_voice_ban(self): """Should call voice ban applying function with expiry.""" @@ -184,7 +184,7 @@ class VoiceBanTests(unittest.IsolatedAsyncioTestCase): user = MockUser() await self.cog.voiceban(self.cog, self.ctx, user, reason=None) - post_infraction_mock.assert_called_once_with(self.ctx, user, "voice_ban", None, active=True) + post_infraction_mock.assert_called_once_with(self.ctx, user, "voice_ban", None, active=True, expires_at=None) apply_infraction_mock.assert_called_once_with(self.cog, self.ctx, infraction, user, ANY) # Test action diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py index c42111f3f..4af84dde5 100644 --- a/tests/bot/test_converters.py +++ b/tests/bot/test_converters.py @@ -10,9 +10,9 @@ from bot.converters import ( Duration, HushDurationConverter, ISODateTime, + PackageName, TagContentConverter, TagNameConverter, - ValidPythonIdentifier, ) @@ -78,24 +78,23 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase): with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): await TagNameConverter.convert(self.context, invalid_name) - async def test_valid_python_identifier_for_valid(self): - """ValidPythonIdentifier returns valid identifiers unchanged.""" - test_values = ('foo', 'lemon') + async def test_package_name_for_valid(self): + """PackageName returns valid package names unchanged.""" + test_values = ('foo', 'le_mon', 'num83r') for name in test_values: with self.subTest(identifier=name): - conversion = await ValidPythonIdentifier.convert(self.context, name) + conversion = await PackageName.convert(self.context, name) self.assertEqual(name, conversion) - async def test_valid_python_identifier_for_invalid(self): - """ValidPythonIdentifier raises the proper exception for invalid identifiers.""" - test_values = ('nested.stuff', '#####') + async def test_package_name_for_invalid(self): + """PackageName raises the proper exception for invalid package names.""" + test_values = ('text_with_a_dot.', 'UpperCaseName', 'dashed-name') for name in test_values: with self.subTest(identifier=name): - exception_message = f'`{name}` is not a valid Python identifier' - with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): - await ValidPythonIdentifier.convert(self.context, name) + with self.assertRaises(BadArgument): + await PackageName.convert(self.context, name) async def test_duration_converter_for_valid(self): """Duration returns the correct `datetime` for valid duration strings.""" diff --git a/tests/bot/utils/test_services.py b/tests/bot/utils/test_services.py index 1b48f6560..3b71022db 100644 --- a/tests/bot/utils/test_services.py +++ b/tests/bot/utils/test_services.py @@ -30,9 +30,9 @@ class PasteTests(unittest.IsolatedAsyncioTestCase): """Url with specified extension is returned on successful requests.""" key = "paste_key" test_cases = ( - (f"https://paste_service.com/{key}.txt", "txt"), + (f"https://paste_service.com/{key}.txt?noredirect", "txt"), (f"https://paste_service.com/{key}.py", "py"), - (f"https://paste_service.com/{key}", ""), + (f"https://paste_service.com/{key}?noredirect", ""), ) response = MagicMock( json=AsyncMock(return_value={"key": key}) diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py index 694d3a40f..115ddfb0d 100644 --- a/tests/bot/utils/test_time.py +++ b/tests/bot/utils/test_time.py @@ -1,7 +1,5 @@ -import asyncio import unittest from datetime import datetime, timezone -from unittest.mock import AsyncMock, patch from dateutil.relativedelta import relativedelta @@ -56,17 +54,6 @@ class TimeTests(unittest.TestCase): """Testing format_infraction.""" self.assertEqual(time.format_infraction('2019-12-12T00:01:00Z'), '2019-12-12 00:01') - @patch('asyncio.sleep', new_callable=AsyncMock) - def test_wait_until(self, mock): - """Testing wait_until.""" - start = datetime(2019, 1, 1, 0, 0) - then = datetime(2019, 1, 1, 0, 10) - - # No return value - self.assertIs(asyncio.run(time.wait_until(then, start)), None) - - mock.assert_called_once_with(10 * 60) - def test_format_infraction_with_duration_none_expiry(self): """format_infraction_with_duration should work for None expiry.""" test_cases = ( diff --git a/tests/helpers.py b/tests/helpers.py index 496363ae3..e3dc5fe5b 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -385,6 +385,7 @@ message_instance = discord.Message(state=state, channel=channel, data=message_da # Create a Context instance to get a realistic MagicMock of `discord.ext.commands.Context` context_instance = Context(message=unittest.mock.MagicMock(), prefix=unittest.mock.MagicMock()) +context_instance.invoked_from_error_handler = None class MockContext(CustomMockMixin, unittest.mock.MagicMock): @@ -402,6 +403,7 @@ class MockContext(CustomMockMixin, unittest.mock.MagicMock): self.guild = kwargs.get('guild', MockGuild()) self.author = kwargs.get('author', MockMember()) self.channel = kwargs.get('channel', MockTextChannel()) + self.invoked_from_error_handler = kwargs.get('invoked_from_error_handler', False) attachment_instance = discord.Attachment(data=unittest.mock.MagicMock(id=1), state=unittest.mock.MagicMock()) |