aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Matteo Bertucci <[email protected]>2021-04-17 18:24:06 +0200
committerGravatar Matteo Bertucci <[email protected]>2021-04-17 18:24:06 +0200
commitb8fb422501c408945e70037427b9d85a702ca0c6 (patch)
tree4c3b2f4f40f833aee010030fe356cc119f07f6af
parentMerge branch 'master' into feat/1365/add-bot-badges-to-user (diff)
parentMerge pull request #1528 from python-discord/mbaruh/allow_eval (diff)
Merge remote-tracking branch 'origin/main' into feat/1365/add-bot-badges-to-user
-rw-r--r--.gitattributes1
-rw-r--r--.github/CODEOWNERS19
-rw-r--r--.github/FUNDING.yml2
-rw-r--r--.github/workflows/build.yml4
-rw-r--r--.github/workflows/deploy.yml3
-rw-r--r--.github/workflows/lint-test.yml2
-rw-r--r--.github/workflows/sentry_release.yml4
-rw-r--r--.pre-commit-config.yaml2
-rw-r--r--CODE_OF_CONDUCT.md3
-rw-r--r--CONTRIBUTING.md124
-rw-r--r--LICENSE-THIRD-PARTY30
-rw-r--r--Pipfile14
-rw-r--r--Pipfile.lock891
-rw-r--r--README.md14
-rw-r--r--SECURITY.md3
-rw-r--r--bot/__main__.py26
-rw-r--r--bot/bot.py36
-rw-r--r--bot/constants.py37
-rw-r--r--bot/converters.py86
-rw-r--r--bot/decorators.py80
-rw-r--r--bot/errors.py6
-rw-r--r--bot/exts/backend/branding/__init__.py6
-rw-r--r--bot/exts/backend/branding/_cog.py895
-rw-r--r--bot/exts/backend/branding/_constants.py51
-rw-r--r--bot/exts/backend/branding/_decorators.py27
-rw-r--r--bot/exts/backend/branding/_errors.py2
-rw-r--r--bot/exts/backend/branding/_repository.py240
-rw-r--r--bot/exts/backend/branding/_seasons.py175
-rw-r--r--bot/exts/backend/error_handler.py13
-rw-r--r--bot/exts/backend/logging.py2
-rw-r--r--bot/exts/filters/antispam.py19
-rw-r--r--bot/exts/filters/filtering.py88
-rw-r--r--bot/exts/filters/webhook_remover.py2
-rw-r--r--bot/exts/fun/off_topic_names.py18
-rw-r--r--bot/exts/help_channels/_caches.py17
-rw-r--r--bot/exts/help_channels/_channel.py92
-rw-r--r--bot/exts/help_channels/_cog.py117
-rw-r--r--bot/exts/help_channels/_message.py76
-rw-r--r--bot/exts/help_channels/_name.py12
-rw-r--r--bot/exts/help_channels/_stats.py13
-rw-r--r--bot/exts/info/codeblock/_parsing.py3
-rw-r--r--bot/exts/info/doc.py485
-rw-r--r--bot/exts/info/doc/__init__.py16
-rw-r--r--bot/exts/info/doc/_batch_parser.py186
-rw-r--r--bot/exts/info/doc/_cog.py442
-rw-r--r--bot/exts/info/doc/_html.py136
-rw-r--r--bot/exts/info/doc/_inventory_parser.py126
-rw-r--r--bot/exts/info/doc/_markdown.py58
-rw-r--r--bot/exts/info/doc/_parsing.py256
-rw-r--r--bot/exts/info/doc/_redis_cache.py70
-rw-r--r--bot/exts/info/information.py28
-rw-r--r--bot/exts/info/pypi.py68
-rw-r--r--bot/exts/info/source.py10
-rw-r--r--bot/exts/info/tags.py7
-rw-r--r--bot/exts/moderation/defcon.py315
-rw-r--r--bot/exts/moderation/dm_relay.py160
-rw-r--r--bot/exts/moderation/infraction/_scheduler.py6
-rw-r--r--bot/exts/moderation/infraction/_utils.py46
-rw-r--r--bot/exts/moderation/infraction/infractions.py6
-rw-r--r--bot/exts/moderation/infraction/superstarify.py4
-rw-r--r--bot/exts/moderation/slowmode.py4
-rw-r--r--bot/exts/moderation/stream.py179
-rw-r--r--bot/exts/moderation/watchchannels/_watchchannel.py86
-rw-r--r--bot/exts/recruitment/__init__.py0
-rw-r--r--bot/exts/recruitment/talentpool/__init__.py8
-rw-r--r--bot/exts/recruitment/talentpool/_cog.py (renamed from bot/exts/moderation/watchchannels/talentpool.py)232
-rw-r--r--bot/exts/recruitment/talentpool/_review.py335
-rw-r--r--bot/exts/utils/clean.py8
-rw-r--r--bot/exts/utils/internal.py4
-rw-r--r--bot/exts/utils/snekbox.py10
-rw-r--r--bot/exts/utils/utils.py35
-rw-r--r--bot/pagination.py36
-rw-r--r--bot/resources/elements.json119
-rw-r--r--bot/resources/foods.json52
-rw-r--r--bot/resources/stars.json2
-rw-r--r--bot/resources/tags/comparison.md12
-rw-r--r--bot/resources/tags/customchecks.md21
-rw-r--r--bot/resources/tags/customhelp.md3
-rw-r--r--bot/resources/tags/empty-json.md11
-rw-r--r--bot/resources/tags/inline.md15
-rw-r--r--bot/resources/tags/intents.md19
-rw-r--r--bot/resources/tags/off-topic.md2
-rw-r--r--bot/resources/tags/pep8.md6
-rw-r--r--bot/resources/tags/ytdl.md8
-rw-r--r--bot/utils/checks.py8
-rw-r--r--bot/utils/function.py72
-rw-r--r--bot/utils/lock.py37
-rw-r--r--bot/utils/messages.py70
-rw-r--r--bot/utils/scheduling.py18
-rw-r--r--bot/utils/services.py9
-rw-r--r--bot/utils/time.py50
-rw-r--r--config-default.yml61
-rw-r--r--docker-compose.yml5
-rw-r--r--tests/bot/exts/info/doc/__init__.py0
-rw-r--r--tests/bot/exts/info/doc/test_parsing.py66
-rw-r--r--tests/bot/exts/info/test_information.py20
-rw-r--r--tests/bot/exts/moderation/infraction/test_infractions.py2
-rw-r--r--tests/bot/exts/moderation/infraction/test_utils.py12
-rw-r--r--tests/bot/test_converters.py21
-rw-r--r--tests/bot/utils/test_services.py4
100 files changed, 4712 insertions, 2630 deletions
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 000000000..176a458f9
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+* text=auto
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 7217cb443..1df05e990 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -4,14 +4,15 @@
**/bot/exts/moderation/*silence.py @MarkKoz
bot/exts/info/codeblock/** @MarkKoz
bot/exts/utils/extensions.py @MarkKoz
-bot/exts/utils/snekbox.py @MarkKoz @Akarys42
+bot/exts/utils/snekbox.py @MarkKoz @Akarys42 @jb3
bot/exts/help_channels/** @MarkKoz @Akarys42
-bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129
-bot/exts/info/** @Akarys42 @Den4200
-bot/exts/info/information.py @mbaruh
-bot/exts/filters/** @mbaruh
+bot/exts/moderation/** @Akarys42 @mbaruh @Den4200 @ks129 @jb3
+bot/exts/info/** @Akarys42 @Den4200 @jb3
+bot/exts/info/information.py @mbaruh @jb3
+bot/exts/filters/** @mbaruh @jb3
bot/exts/fun/** @ks129
-bot/exts/utils/** @ks129
+bot/exts/utils/** @ks129 @jb3
+bot/exts/recruitment/** @wookie184
# Rules
bot/rules/** @mbaruh
@@ -29,9 +30,9 @@ tests/bot/exts/test_cogs.py @MarkKoz
tests/** @Akarys42
# CI & Docker
-.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200
-Dockerfile @MarkKoz @Akarys42 @Den4200
-docker-compose.yml @MarkKoz @Akarys42 @Den4200
+.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 @jb3
+Dockerfile @MarkKoz @Akarys42 @Den4200 @jb3
+docker-compose.yml @MarkKoz @Akarys42 @Den4200 @jb3
# Tools
Pipfile* @Akarys42
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
deleted file mode 100644
index 6d9919ef2..000000000
--- a/.github/FUNDING.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-patreon: python_discord
-custom: https://www.redbubble.com/people/pythondiscord
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 6c97e8784..84a671917 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -4,7 +4,7 @@ on:
workflow_run:
workflows: ["Lint & Test"]
branches:
- - master
+ - main
types:
- completed
@@ -39,7 +39,7 @@ jobs:
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
- password: ${{ secrets.GHCR_TOKEN }}
+ password: ${{ secrets.GITHUB_TOKEN }}
# Build and push the container to the GitHub Container
# Repository. The container will be tagged as "latest"
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 5a4aede30..8b809b777 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -4,12 +4,13 @@ on:
workflow_run:
workflows: ["Build"]
branches:
- - master
+ - main
types:
- completed
jobs:
build:
+ environment: production
if: github.event.workflow_run.conclusion == 'success'
name: Build & Push
runs-on: ubuntu-latest
diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml
index 6fa8e8333..95bed2e14 100644
--- a/.github/workflows/lint-test.yml
+++ b/.github/workflows/lint-test.yml
@@ -3,7 +3,7 @@ name: Lint & Test
on:
push:
branches:
- - master
+ - main
pull_request:
diff --git a/.github/workflows/sentry_release.yml b/.github/workflows/sentry_release.yml
index b8d92e90a..f6a1e1f0e 100644
--- a/.github/workflows/sentry_release.yml
+++ b/.github/workflows/sentry_release.yml
@@ -3,14 +3,14 @@ name: Create Sentry release
on:
push:
branches:
- - master
+ - main
jobs:
create_sentry_release:
runs-on: ubuntu-latest
steps:
- name: Checkout code
- uses: actions/checkout@master
+ uses: actions/checkout@main
- name: Create a Sentry.io release
uses: tclindner/[email protected]
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 1597592ca..52500a282 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -7,8 +7,6 @@ repos:
- id: check-yaml
args: [--unsafe] # Required due to custom constructors (e.g. !ENV)
- id: end-of-file-fixer
- - id: mixed-line-ending
- args: [--fix=lf]
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
- repo: https://github.com/pre-commit/pygrep-hooks
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..57ccd80e7
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,3 @@
+# Code of Conduct
+
+The Python Discord Code of Conduct can be found [on our website](https://pydis.com/coc).
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index be591d17e..f20b53162 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,123 +1,3 @@
-# Contributing to one of Our Projects
+# Contributing Guidelines
-Our projects are open-source and are automatically deployed whenever commits are pushed to the `master` branch on each repository, so we've created a set of guidelines in order to keep everything clean and in working order.
-
-Note that contributions may be rejected on the basis of a contributor failing to follow these guidelines.
-
-## Rules
-
-1. **No force-pushes** or modifying the Git history in any way.
-2. If you have direct access to the repository, **create a branch for your changes** and create a pull request for that branch. If not, create a branch on a fork of the repository and create a pull request from there.
- * It's common practice for a repository to reject direct pushes to `master`, so make branching a habit!
- * If PRing from your own fork, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process.
-3. **Adhere to the prevailing code style**, which we enforce using [`flake8`](http://flake8.pycqa.org/en/latest/index.html) and [`pre-commit`](https://pre-commit.com/).
- * Run `flake8` and `pre-commit` against your code [**before** you push it](https://soundcloud.com/lemonsaurusrex/lint-before-you-push). Your commit will be rejected by the build server if it fails to lint.
- * [Git Hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) are a powerful git feature for executing custom scripts when certain important git actions occur. The pre-commit hook is the first hook executed during the commit process and can be used to check the code being committed & abort the commit if issues, such as linting failures, are detected. While git hooks can seem daunting to configure, the `pre-commit` framework abstracts this process away from you and is provided as a dev dependency for this project. Run `pipenv run precommit` when setting up the project and you'll never have to worry about committing code that fails linting.
-4. **Make great commits**. A well structured git log is key to a project's maintainability; it efficiently provides insight into when and *why* things were done for future maintainers of the project.
- * Commits should be as narrow in scope as possible. Commits that span hundreds of lines across multiple unrelated functions and/or files are very hard for maintainers to follow. After about a week they'll probably be hard for you to follow too.
- * Avoid making minor commits for fixing typos or linting errors. Since you've already set up a `pre-commit` hook to run the linting pipeline before a commit, you shouldn't be committing linting issues anyway.
- * A more in-depth guide to writing great commit messages can be found in Chris Beam's [*How to Write a Git Commit Message*](https://chris.beams.io/posts/git-commit/)
-5. **Avoid frequent pushes to the main repository**. This goes for PRs opened against your fork as well. Our test build pipelines are triggered every time a push to the repository (or PR) is made. Try to batch your commits until you've finished working for that session, or you've reached a point where collaborators need your commits to continue their own work. This also provides you the opportunity to amend commits for minor changes rather than having to commit them on their own because you've already pushed.
- * This includes merging master into your branch. Try to leave merging from master for after your PR passes review; a maintainer will bring your PR up to date before merging. Exceptions to this include: resolving merge conflicts, needing something that was pushed to master for your branch, or something was pushed to master that could potentionally affect the functionality of what you're writing.
-6. **Don't fight the framework**. Every framework has its flaws, but the frameworks we've picked out have been carefully chosen for their particular merits. If you can avoid it, please resist reimplementing swathes of framework logic - the work has already been done for you!
-7. If someone is working on an issue or pull request, **do not open your own pull request for the same task**. Instead, collaborate with the author(s) of the existing pull request. Duplicate PRs opened without communicating with the other author(s) and/or PyDis staff will be closed. Communication is key, and there's no point in two separate implementations of the same thing.
- * One option is to fork the other contributor's repository and submit your changes to their branch with your own pull request. We suggest following these guidelines when interacting with their repository as well.
- * The author(s) of inactive PRs and claimed issues will be be pinged after a week of inactivity for an update. Continued inactivity may result in the issue being released back to the community and/or PR closure.
-8. **Work as a team** and collaborate wherever possible. Keep things friendly and help each other out - these are shared projects and nobody likes to have their feet trodden on.
-9. All static content, such as images or audio, **must be licensed for open public use**.
- * Static content must be hosted by a service designed to do so. Failing to do so is known as "leeching" and is frowned upon, as it generates extra bandwidth costs to the host without providing benefit. It would be best if appropriately licensed content is added to the repository itself so it can be served by PyDis' infrastructure.
-
-Above all, the needs of our community should come before the wants of an individual. Work together, build solutions to problems and try to do so in a way that people can learn from easily. Abuse of our trust may result in the loss of your Contributor role.
-
-## Changes to this Arrangement
-
-All projects evolve over time, and this contribution guide is no different. This document is open to pull requests or changes by contributors. If you believe you have something valuable to add or change, please don't hesitate to do so in a PR.
-
-## Supplemental Information
-### Developer Environment
-Instructions for setting the bot developer environment can be found on the [PyDis wiki](https://pythondiscord.com/pages/contributing/bot/)
-
-To provide a standalone development environment for this project, docker compose is utilized to pull the current version of the [site backend](https://github.com/python-discord/site). While appropriate for bot-only contributions, any contributions that necessitate backend changes will require the site repository to be appropriately configured as well. Instructions for setting up the site environment can be found on the [PyDis site](https://pythondiscord.com/pages/contributing/site/).
-
-When pulling down changes from GitHub, remember to sync your environment using `pipenv sync --dev` to ensure you're using the most up-to-date versions the project's dependencies.
-
-### Type Hinting
-[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5. Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types.
-
-For example:
-
-```py
-import typing as t
-
-
-def foo(input_1: int, input_2: t.Dict[str, str]) -> bool:
- ...
-```
-
-Tells us that `foo` accepts an `int` and a `dict`, with `str` keys and values, and returns a `bool`.
-
-All function declarations should be type hinted in code contributed to the PyDis organization.
-
-For more information, see *[PEP 483](https://www.python.org/dev/peps/pep-0483/) - The Theory of Type Hints* and Python's documentation for the [`typing`](https://docs.python.org/3/library/typing.html) module.
-
-### AutoDoc Formatting Directives
-Many documentation packages provide support for automatic documentation generation from the codebase's docstrings. These tools utilize special formatting directives to enable richer formatting in the generated documentation.
-
-For example:
-
-```py
-import typing as t
-
-
-def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool:
- """
- Does some things with some stuff.
-
- :param bar: Some input
- :param baz: Optional, some dictionary with string keys and values
-
- :return: Some boolean
- """
- ...
-```
-
-Since PyDis does not utilize automatic documentation generation, use of this syntax should not be used in code contributed to the organization. Should the purpose and type of the input variables not be easily discernable from the variable name and type annotation, a prose explanation can be used. Explicit references to variables, functions, classes, etc. should be wrapped with backticks (`` ` ``).
-
-For example, the above docstring would become:
-
-```py
-import typing as t
-
-
-def foo(bar: int, baz: t.Optional[t.Dict[str, str]] = None) -> bool:
- """
- Does some things with some stuff.
-
- This function takes an index, `bar` and checks for its presence in the database `baz`, passed as a dictionary. Returns `False` if `baz` is not passed.
- """
- ...
-```
-
-### Logging Levels
-The project currently defines [`logging`](https://docs.python.org/3/library/logging.html) levels as follows, from lowest to highest severity:
-* **TRACE:** These events should be used to provide a *verbose* trace of every step of a complex process. This is essentially the `logging` equivalent of sprinkling `print` statements throughout the code.
- * **Note:** This is a PyDis-implemented logging level.
-* **DEBUG:** These events should add context to what's happening in a development setup to make it easier to follow what's going while working on a project. This is in the same vein as **TRACE** logging but at a much lower level of verbosity.
-* **INFO:** These events are normal and don't need direct attention but are worth keeping track of in production, like checking which cogs were loaded during a start-up.
-* **WARNING:** These events are out of the ordinary and should be fixed, but have not caused a failure.
- * **NOTE:** Events at this logging level and higher should be reserved for events that require the attention of the DevOps team.
-* **ERROR:** These events have caused a failure in a specific part of the application and require urgent attention.
-* **CRITICAL:** These events have caused the whole application to fail and require immediate intervention.
-
-Ensure that log messages are succinct. Should you want to pass additional useful information that would otherwise make the log message overly verbose the `logging` module accepts an `extra` kwarg, which can be used to pass a dictionary. This is used to populate the `__dict__` of the `LogRecord` created for the logging event with user-defined attributes that can be accessed by a log handler. Additional information and caveats may be found [in Python's `logging` documentation](https://docs.python.org/3/library/logging.html#logging.Logger.debug).
-
-### Work in Progress (WIP) PRs
-Github [provides a PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) that allows the PR author to mark it as a WIP. This provides both a visual and functional indicator that the contents of the PR are in a draft state and not yet ready for formal review.
-
-This feature should be utilized in place of the traditional method of prepending `[WIP]` to the PR title.
-
-As stated earlier, **ensure that "Allow edits from maintainers" is checked**. This gives permission for maintainers to commit changes directly to your fork, speeding up the review process.
-
-## Footnotes
-
-This document was inspired by the [Glowstone contribution guidelines](https://github.com/GlowstoneMC/Glowstone/blob/dev/docs/CONTRIBUTING.md).
+The Contributing Guidelines for Python Discord projects can be found [on our website](https://pydis.com/contributing.md).
diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY
index eacd9b952..ab715630d 100644
--- a/LICENSE-THIRD-PARTY
+++ b/LICENSE-THIRD-PARTY
@@ -35,6 +35,36 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------------------------------
+ BSD 2-Clause License
+Applies to:
+ - Copyright (c) 2007-2020 by the Sphinx team (see AUTHORS file). All rights reserved.
+ - bot/cogs/doc/inventory_parser.py: _load_v1, _load_v2 and ZlibStreamReader.__aiter__.
+---------------------------------------------------------------------------------------------------
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+---------------------------------------------------------------------------------------------------
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
Applies to:
- Copyright © 2001-2020 Python Software Foundation. All rights reserved.
diff --git a/Pipfile b/Pipfile
index efdd46522..e924f5ddb 100644
--- a/Pipfile
+++ b/Pipfile
@@ -6,31 +6,32 @@ name = "pypi"
[packages]
aio-pika = "~=6.1"
aiodns = "~=2.0"
-aiohttp = "~=3.5"
+aiohttp = "~=3.7"
aioping = "~=0.3.1"
aioredis = "~=1.3.1"
+arrow = "~=1.0.3"
"async-rediscache[fakeredis]" = "~=0.1.2"
beautifulsoup4 = "~=4.9"
colorama = {version = "~=0.4.3",sys_platform = "== 'win32'"}
coloredlogs = "~=14.0"
deepdiff = "~=4.0"
"discord.py" = "~=1.6.0"
+emoji = "~=0.6"
feedparser = "~=5.2"
fuzzywuzzy = "~=0.17"
lxml = "~=4.4"
-markdownify = "==0.5.3"
+markdownify = "==0.6.1"
more_itertools = "~=8.2"
python-dateutil = "~=2.8"
+python-frontmatter = "~=1.0.0"
pyyaml = "~=5.1"
-requests = "~=2.22"
+regex = "==2021.4.4"
sentry-sdk = "~=0.19"
-sphinx = "~=2.2"
statsd = "~=3.3"
-arrow = "~=0.17"
-emoji = "~=0.6"
[dev-packages]
coverage = "~=5.0"
+coveralls = "~=2.1"
flake8 = "~=3.8"
flake8-annotations = "~=2.0"
flake8-bugbear = "~=20.1"
@@ -41,7 +42,6 @@ flake8-tidy-imports = "~=4.0"
flake8-todo = "~=0.7"
pep8-naming = "~=0.9"
pre-commit = "~=2.1"
-coveralls = "~=2.1"
[requires]
python_version = "3.8"
diff --git a/Pipfile.lock b/Pipfile.lock
index 636d07b1a..1e1a8167b 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "26c8089f17d6d6bac11dbed366b1b46818b4546f243af756a106a32af5d9d8f6"
+ "sha256": "e35c9bad81b01152ad3e10b85f1abf5866aa87b9d87e03bc30bdb9d37668ccae"
},
"pipfile-spec": 6,
"requires": {
@@ -18,11 +18,11 @@
"default": {
"aio-pika": {
"hashes": [
- "sha256:9773440a89840941ac3099a7720bf9d51e8764a484066b82ede4d395660ff430",
- "sha256:a8065be3c722eb8f9fff8c0e7590729e7782202cdb9363d9830d7d5d47b45c7c"
+ "sha256:1d4305a5f78af3857310b4fe48348cdcf6c097e0e275ea88c2cd08570531a369",
+ "sha256:e69afef8695f47c5d107bbdba21bdb845d5c249acb3be53ef5c2d497b02657c0"
],
"index": "pypi",
- "version": "==6.7.1"
+ "version": "==6.8.0"
},
"aiodns": {
"hashes": [
@@ -34,46 +34,46 @@
},
"aiohttp": {
"hashes": [
- "sha256:0b795072bb1bf87b8620120a6373a3c61bfcb8da7e5c2377f4bb23ff4f0b62c9",
- "sha256:0d438c8ca703b1b714e82ed5b7a4412c82577040dadff479c08405e2a715564f",
- "sha256:16a3cb5df5c56f696234ea9e65e227d1ebe9c18aa774d36ff42f532139066a5f",
- "sha256:1edfd82a98c5161497bbb111b2b70c0813102ad7e0aa81cbeb34e64c93863005",
- "sha256:2406dc1dda01c7f6060ab586e4601f18affb7a6b965c50a8c90ff07569cf782a",
- "sha256:2858b2504c8697beb9357be01dc47ef86438cc1cb36ecb6991796d19475faa3e",
- "sha256:2a7b7640167ab536c3cb90cfc3977c7094f1c5890d7eeede8b273c175c3910fd",
- "sha256:3228b7a51e3ed533f5472f54f70fd0b0a64c48dc1649a0f0e809bec312934d7a",
- "sha256:328b552513d4f95b0a2eea4c8573e112866107227661834652a8984766aa7656",
- "sha256:39f4b0a6ae22a1c567cb0630c30dd082481f95c13ca528dc501a7766b9c718c0",
- "sha256:3b0036c978cbcc4a4512278e98e3e6d9e6b834dc973206162eddf98b586ef1c6",
- "sha256:3ea8c252d8df5e9166bcf3d9edced2af132f4ead8ac422eac723c5781063709a",
- "sha256:41608c0acbe0899c852281978492f9ce2c6fbfaf60aff0cefc54a7c4516b822c",
- "sha256:59d11674964b74a81b149d4ceaff2b674b3b0e4d0f10f0be1533e49c4a28408b",
- "sha256:5e479df4b2d0f8f02133b7e4430098699450e1b2a826438af6bec9a400530957",
- "sha256:684850fb1e3e55c9220aad007f8386d8e3e477c4ec9211ae54d968ecdca8c6f9",
- "sha256:6ccc43d68b81c424e46192a778f97da94ee0630337c9bbe5b2ecc9b0c1c59001",
- "sha256:6d42debaf55450643146fabe4b6817bb2a55b23698b0434107e892a43117285e",
- "sha256:710376bf67d8ff4500a31d0c207b8941ff4fba5de6890a701d71680474fe2a60",
- "sha256:756ae7efddd68d4ea7d89c636b703e14a0c686688d42f588b90778a3c2fc0564",
- "sha256:77149002d9386fae303a4a162e6bce75cc2161347ad2ba06c2f0182561875d45",
- "sha256:78e2f18a82b88cbc37d22365cf8d2b879a492faedb3f2975adb4ed8dfe994d3a",
- "sha256:7d9b42127a6c0bdcc25c3dcf252bb3ddc70454fac593b1b6933ae091396deb13",
- "sha256:8389d6044ee4e2037dca83e3f6994738550f6ee8cfb746762283fad9b932868f",
- "sha256:9c1a81af067e72261c9cbe33ea792893e83bc6aa987bfbd6fdc1e5e7b22777c4",
- "sha256:c1e0920909d916d3375c7a1fdb0b1c78e46170e8bb42792312b6eb6676b2f87f",
- "sha256:c68fdf21c6f3573ae19c7ee65f9ff185649a060c9a06535e9c3a0ee0bbac9235",
- "sha256:c733ef3bdcfe52a1a75564389bad4064352274036e7e234730526d155f04d914",
- "sha256:c9c58b0b84055d8bc27b7df5a9d141df4ee6ff59821f922dd73155861282f6a3",
- "sha256:d03abec50df423b026a5aa09656bd9d37f1e6a49271f123f31f9b8aed5dc3ea3",
- "sha256:d2cfac21e31e841d60dc28c0ec7d4ec47a35c608cb8906435d47ef83ffb22150",
- "sha256:dcc119db14757b0c7bce64042158307b9b1c76471e655751a61b57f5a0e4d78e",
- "sha256:df3a7b258cc230a65245167a202dd07320a5af05f3d41da1488ba0fa05bc9347",
- "sha256:df48a623c58180874d7407b4d9ec06a19b84ed47f60a3884345b1a5099c1818b",
- "sha256:e1b95972a0ae3f248a899cdbac92ba2e01d731225f566569311043ce2226f5e7",
- "sha256:f326b3c1bbfda5b9308252ee0dcb30b612ee92b0e105d4abec70335fab5b1245",
- "sha256:f411cb22115cb15452d099fec0ee636b06cf81bfb40ed9c02d30c8dc2bc2e3d1"
+ "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe",
+ "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe",
+ "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5",
+ "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8",
+ "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd",
+ "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb",
+ "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c",
+ "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87",
+ "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0",
+ "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290",
+ "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5",
+ "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287",
+ "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde",
+ "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf",
+ "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8",
+ "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16",
+ "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf",
+ "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809",
+ "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213",
+ "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f",
+ "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013",
+ "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b",
+ "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9",
+ "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5",
+ "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb",
+ "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df",
+ "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4",
+ "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439",
+ "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f",
+ "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22",
+ "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f",
+ "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5",
+ "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970",
+ "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009",
+ "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc",
+ "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a",
+ "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95"
],
"index": "pypi",
- "version": "==3.7.3"
+ "version": "==3.7.4.post0"
},
"aioping": {
"hashes": [
@@ -99,20 +99,13 @@
"markers": "python_version >= '3.6'",
"version": "==3.3.1"
},
- "alabaster": {
- "hashes": [
- "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
- "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
- ],
- "version": "==0.7.12"
- },
"arrow": {
"hashes": [
- "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5",
- "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"
+ "sha256:3515630f11a15c61dcb4cdd245883270dd334c83f3e639824e65a4b79cc48543",
+ "sha256:399c9c8ae732270e1aa58ead835a79a40d7be8aa109c579898eb41029b5a231d"
],
"index": "pypi",
- "version": "==0.17.0"
+ "version": "==1.0.3"
},
"async-rediscache": {
"extras": [
@@ -142,14 +135,6 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.3.0"
},
- "babel": {
- "hashes": [
- "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5",
- "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.9.0"
- },
"beautifulsoup4": {
"hashes": [
"sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35",
@@ -168,51 +153,53 @@
},
"cffi": {
"hashes": [
- "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e",
- "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d",
- "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a",
- "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec",
- "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362",
- "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668",
- "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c",
- "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b",
- "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06",
- "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698",
- "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2",
- "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c",
- "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7",
- "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009",
- "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03",
- "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b",
- "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909",
- "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53",
- "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35",
- "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26",
- "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b",
- "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01",
- "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb",
- "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293",
- "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd",
- "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d",
- "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3",
- "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d",
- "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e",
- "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca",
- "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d",
- "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775",
- "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375",
- "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b",
- "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b",
- "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f"
- ],
- "version": "==1.14.4"
+ "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813",
+ "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06",
+ "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea",
+ "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee",
+ "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396",
+ "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73",
+ "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315",
+ "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1",
+ "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49",
+ "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892",
+ "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482",
+ "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058",
+ "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5",
+ "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53",
+ "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045",
+ "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3",
+ "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5",
+ "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e",
+ "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c",
+ "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369",
+ "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827",
+ "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053",
+ "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa",
+ "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4",
+ "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322",
+ "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132",
+ "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62",
+ "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa",
+ "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0",
+ "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396",
+ "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e",
+ "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991",
+ "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6",
+ "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1",
+ "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406",
+ "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d",
+ "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"
+ ],
+ "version": "==1.14.5"
},
"chardet": {
"hashes": [
- "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
- "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
+ "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
],
- "version": "==3.0.4"
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
+ "version": "==4.0.0"
},
"colorama": {
"hashes": [
@@ -246,14 +233,6 @@
"index": "pypi",
"version": "==1.6.0"
},
- "docutils": {
- "hashes": [
- "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
- "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
- "version": "==0.16"
- },
"emoji": {
"hashes": [
"sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11"
@@ -263,10 +242,10 @@
},
"fakeredis": {
"hashes": [
- "sha256:01cb47d2286825a171fb49c0e445b1fa9307087e07cbb3d027ea10dbff108b6a",
- "sha256:2c6041cf0225889bc403f3949838b2c53470a95a9e2d4272422937786f5f8f73"
+ "sha256:1ac0cef767c37f51718874a33afb5413e69d132988cb6a80c6e6dbeddf8c7623",
+ "sha256:e0416e4941cecd3089b0d901e60c8dc3c944f6384f5e29e2261c0d3c5fa99669"
],
- "version": "==1.4.5"
+ "version": "==1.5.0"
},
"feedparser": {
"hashes": [
@@ -287,55 +266,50 @@
},
"hiredis": {
"hashes": [
- "sha256:06a039208f83744a702279b894c8cf24c14fd63c59cd917dcde168b79eef0680",
- "sha256:0a909bf501459062aa1552be1461456518f367379fdc9fdb1f2ca5e4a1fdd7c0",
- "sha256:18402d9e54fb278cb9a8c638df6f1550aca36a009d47ecf5aa263a38600f35b0",
- "sha256:1e4cbbc3858ec7e680006e5ca590d89a5e083235988f26a004acf7244389ac01",
- "sha256:23344e3c2177baf6975fbfa361ed92eb7d36d08f454636e5054b3faa7c2aff8a",
- "sha256:289b31885b4996ce04cadfd5fc03d034dce8e2a8234479f7c9e23b9e245db06b",
- "sha256:2c1c570ae7bf1bab304f29427e2475fe1856814312c4a1cf1cd0ee133f07a3c6",
- "sha256:2c227c0ed371771ffda256034427320870e8ea2e4fd0c0a618c766e7c49aad73",
- "sha256:3bb9b63d319402cead8bbd9dd55dca3b667d2997e9a0d8a1f9b6cc274db4baee",
- "sha256:3ef2183de67b59930d2db8b8e8d4d58e00a50fcc5e92f4f678f6eed7a1c72d55",
- "sha256:43b8ed3dbfd9171e44c554cb4acf4ee4505caa84c5e341858b50ea27dd2b6e12",
- "sha256:47bcf3c5e6c1e87ceb86cdda2ee983fa0fe56a999e6185099b3c93a223f2fa9b",
- "sha256:5263db1e2e1e8ae30500cdd75a979ff99dcc184201e6b4b820d0de74834d2323",
- "sha256:5b1451727f02e7acbdf6aae4e06d75f66ee82966ff9114550381c3271a90f56c",
- "sha256:6996883a8a6ff9117cbb3d6f5b0dcbbae6fb9e31e1a3e4e2f95e0214d9a1c655",
- "sha256:6c96f64a54f030366657a54bb90b3093afc9c16c8e0dfa29fc0d6dbe169103a5",
- "sha256:7332d5c3e35154cd234fd79573736ddcf7a0ade7a986db35b6196b9171493e75",
- "sha256:7885b6f32c4a898e825bb7f56f36a02781ac4a951c63e4169f0afcf9c8c30dfb",
- "sha256:7b0f63f10a166583ab744a58baad04e0f52cfea1ac27bfa1b0c21a48d1003c23",
- "sha256:819f95d4eba3f9e484dd115ab7ab72845cf766b84286a00d4ecf76d33f1edca1",
- "sha256:8968eeaa4d37a38f8ca1f9dbe53526b69628edc9c42229a5b2f56d98bb828c1f",
- "sha256:89ebf69cb19a33d625db72d2ac589d26e936b8f7628531269accf4a3196e7872",
- "sha256:8daecd778c1da45b8bd54fd41ffcd471a86beed3d8e57a43acf7a8d63bba4058",
- "sha256:955ba8ea73cf3ed8bd2f963b4cb9f8f0dcb27becd2f4b3dd536fd24c45533454",
- "sha256:964f18a59f5a64c0170f684c417f4fe3e695a536612e13074c4dd5d1c6d7c882",
- "sha256:969843fbdfbf56cdb71da6f0bdf50f9985b8b8aeb630102945306cf10a9c6af2",
- "sha256:996021ef33e0f50b97ff2d6b5f422a0fe5577de21a8873b58a779a5ddd1c3132",
- "sha256:9e9c9078a7ce07e6fce366bd818be89365a35d2e4b163268f0ca9ba7e13bb2f6",
- "sha256:a04901757cb0fb0f5602ac11dda48f5510f94372144d06c2563ba56c480b467c",
- "sha256:a7bf1492429f18d205f3a818da3ff1f242f60aa59006e53dee00b4ef592a3363",
- "sha256:aa0af2deb166a5e26e0d554b824605e660039b161e37ed4f01b8d04beec184f3",
- "sha256:abfb15a6a7822f0fae681785cb38860e7a2cb1616a708d53df557b3d76c5bfd4",
- "sha256:b253fe4df2afea4dfa6b1fa8c5fef212aff8bcaaeb4207e81eed05cb5e4a7919",
- "sha256:b27f082f47d23cffc4cf1388b84fdc45c4ef6015f906cd7e0d988d9e35d36349",
- "sha256:b33aea449e7f46738811fbc6f0b3177c6777a572207412bbbf6f525ffed001ae",
- "sha256:b44f9421c4505c548435244d74037618f452844c5d3c67719d8a55e2613549da",
- "sha256:bcc371151d1512201d0214c36c0c150b1dc64f19c2b1a8c9cb1d7c7c15ebd93f",
- "sha256:c2851deeabd96d3f6283e9c6b26e0bfed4de2dc6fb15edf913e78b79fc5909ed",
- "sha256:cdfd501c7ac5b198c15df800a3a34c38345f5182e5f80770caf362bccca65628",
- "sha256:d2c0caffa47606d6d7c8af94ba42547bd2a441f06c74fd90a1ffe328524a6c64",
- "sha256:dcb2db95e629962db5a355047fb8aefb012df6c8ae608930d391619dbd96fd86",
- "sha256:e0eeb9c112fec2031927a1745788a181d0eecbacbed941fc5c4f7bc3f7b273bf",
- "sha256:e154891263306200260d7f3051982774d7b9ef35af3509d5adbbe539afd2610c",
- "sha256:e2e023a42dcbab8ed31f97c2bcdb980b7fbe0ada34037d87ba9d799664b58ded",
- "sha256:e64be68255234bb489a574c4f2f8df7029c98c81ec4d160d6cd836e7f0679390",
- "sha256:e82d6b930e02e80e5109b678c663a9ed210680ded81c1abaf54635d88d1da298"
+ "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e",
+ "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27",
+ "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163",
+ "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc",
+ "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26",
+ "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e",
+ "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579",
+ "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a",
+ "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048",
+ "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87",
+ "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63",
+ "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54",
+ "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05",
+ "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb",
+ "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea",
+ "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5",
+ "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e",
+ "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc",
+ "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99",
+ "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a",
+ "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581",
+ "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426",
+ "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db",
+ "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a",
+ "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a",
+ "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d",
+ "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443",
+ "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79",
+ "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d",
+ "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9",
+ "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d",
+ "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485",
+ "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5",
+ "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048",
+ "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0",
+ "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6",
+ "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41",
+ "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298",
+ "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce",
+ "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0",
+ "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"
],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==1.1.0"
+ "markers": "python_version >= '3.6'",
+ "version": "==2.0.0"
},
"humanfriendly": {
"hashes": [
@@ -347,125 +321,69 @@
},
"idna": {
"hashes": [
- "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
- "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.10"
- },
- "imagesize": {
- "hashes": [
- "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
- "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
+ "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16",
+ "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"
],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==1.2.0"
- },
- "jinja2": {
- "hashes": [
- "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0",
- "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
- "version": "==2.11.2"
+ "markers": "python_version >= '3.4'",
+ "version": "==3.1"
},
"lxml": {
"hashes": [
- "sha256:0448576c148c129594d890265b1a83b9cd76fd1f0a6a04620753d9a6bcfd0a4d",
- "sha256:127f76864468d6630e1b453d3ffbbd04b024c674f55cf0a30dc2595137892d37",
- "sha256:1471cee35eba321827d7d53d104e7b8c593ea3ad376aa2df89533ce8e1b24a01",
- "sha256:2363c35637d2d9d6f26f60a208819e7eafc4305ce39dc1d5005eccc4593331c2",
- "sha256:2e5cc908fe43fe1aa299e58046ad66981131a66aea3129aac7770c37f590a644",
- "sha256:2e6fd1b8acd005bd71e6c94f30c055594bbd0aa02ef51a22bbfa961ab63b2d75",
- "sha256:366cb750140f221523fa062d641393092813b81e15d0e25d9f7c6025f910ee80",
- "sha256:42ebca24ba2a21065fb546f3e6bd0c58c3fe9ac298f3a320147029a4850f51a2",
- "sha256:4e751e77006da34643ab782e4a5cc21ea7b755551db202bc4d3a423b307db780",
- "sha256:4fb85c447e288df535b17ebdebf0ec1cf3a3f1a8eba7e79169f4f37af43c6b98",
- "sha256:50c348995b47b5a4e330362cf39fc503b4a43b14a91c34c83b955e1805c8e308",
- "sha256:535332fe9d00c3cd455bd3dd7d4bacab86e2d564bdf7606079160fa6251caacf",
- "sha256:535f067002b0fd1a4e5296a8f1bf88193080ff992a195e66964ef2a6cfec5388",
- "sha256:5be4a2e212bb6aa045e37f7d48e3e1e4b6fd259882ed5a00786f82e8c37ce77d",
- "sha256:60a20bfc3bd234d54d49c388950195d23a5583d4108e1a1d47c9eef8d8c042b3",
- "sha256:648914abafe67f11be7d93c1a546068f8eff3c5fa938e1f94509e4a5d682b2d8",
- "sha256:681d75e1a38a69f1e64ab82fe4b1ed3fd758717bed735fb9aeaa124143f051af",
- "sha256:68a5d77e440df94011214b7db907ec8f19e439507a70c958f750c18d88f995d2",
- "sha256:69a63f83e88138ab7642d8f61418cf3180a4d8cd13995df87725cb8b893e950e",
- "sha256:6e4183800f16f3679076dfa8abf2db3083919d7e30764a069fb66b2b9eff9939",
- "sha256:6fd8d5903c2e53f49e99359b063df27fdf7acb89a52b6a12494208bf61345a03",
- "sha256:791394449e98243839fa822a637177dd42a95f4883ad3dec2a0ce6ac99fb0a9d",
- "sha256:7a7669ff50f41225ca5d6ee0a1ec8413f3a0d8aa2b109f86d540887b7ec0d72a",
- "sha256:7e9eac1e526386df7c70ef253b792a0a12dd86d833b1d329e038c7a235dfceb5",
- "sha256:7ee8af0b9f7de635c61cdd5b8534b76c52cd03536f29f51151b377f76e214a1a",
- "sha256:8246f30ca34dc712ab07e51dc34fea883c00b7ccb0e614651e49da2c49a30711",
- "sha256:8c88b599e226994ad4db29d93bc149aa1aff3dc3a4355dd5757569ba78632bdf",
- "sha256:923963e989ffbceaa210ac37afc9b906acebe945d2723e9679b643513837b089",
- "sha256:94d55bd03d8671686e3f012577d9caa5421a07286dd351dfef64791cf7c6c505",
- "sha256:97db258793d193c7b62d4e2586c6ed98d51086e93f9a3af2b2034af01450a74b",
- "sha256:a9d6bc8642e2c67db33f1247a77c53476f3a166e09067c0474facb045756087f",
- "sha256:cd11c7e8d21af997ee8079037fff88f16fda188a9776eb4b81c7e4c9c0a7d7fc",
- "sha256:d8d3d4713f0c28bdc6c806a278d998546e8efc3498949e3ace6e117462ac0a5e",
- "sha256:e0bfe9bb028974a481410432dbe1b182e8191d5d40382e5b8ff39cdd2e5c5931",
- "sha256:f4822c0660c3754f1a41a655e37cb4dbbc9be3d35b125a37fab6f82d47674ebc",
- "sha256:f83d281bb2a6217cd806f4cf0ddded436790e66f393e124dfe9731f6b3fb9afe",
- "sha256:fc37870d6716b137e80d19241d0e2cff7a7643b925dfa49b4c8ebd1295eb506e"
+ "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d",
+ "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3",
+ "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2",
+ "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f",
+ "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927",
+ "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3",
+ "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7",
+ "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f",
+ "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade",
+ "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468",
+ "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b",
+ "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4",
+ "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83",
+ "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04",
+ "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791",
+ "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51",
+ "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1",
+ "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a",
+ "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f",
+ "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee",
+ "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec",
+ "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969",
+ "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28",
+ "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a",
+ "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa",
+ "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106",
+ "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d",
+ "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4",
+ "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0",
+ "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4",
+ "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2",
+ "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0",
+ "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654",
+ "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2",
+ "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23",
+ "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"
],
"index": "pypi",
- "version": "==4.6.2"
+ "version": "==4.6.3"
},
"markdownify": {
"hashes": [
- "sha256:30be8340724e706c9e811c27fe8c1542cf74a15b46827924fff5c54b40dd9b0d",
- "sha256:a69588194fd76634f0139d6801b820fd652dc5eeba9530e90d323dfdc0155252"
+ "sha256:31d7c13ac2ada8bfc7535a25fee6622ca720e1b5f2d4a9cbc429d167c21f886d",
+ "sha256:7489fd5c601536996a376c4afbcd1dd034db7690af807120681461e82fbc0acc"
],
"index": "pypi",
- "version": "==0.5.3"
- },
- "markupsafe": {
- "hashes": [
- "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
- "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
- "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
- "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
- "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
- "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
- "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
- "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
- "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
- "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
- "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
- "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
- "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
- "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
- "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
- "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
- "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
- "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
- "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
- "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
- "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
- "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
- "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
- "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
- "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
- "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
- "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
- "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
- "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
- "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
- "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
- "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
- "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==1.1.1"
+ "version": "==0.6.1"
},
"more-itertools": {
"hashes": [
- "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330",
- "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"
+ "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced",
+ "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"
],
"index": "pypi",
- "version": "==8.6.0"
+ "version": "==8.7.0"
},
"multidict": {
"hashes": [
@@ -517,14 +435,6 @@
"markers": "python_version >= '3.5'",
"version": "==4.0.2"
},
- "packaging": {
- "hashes": [
- "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858",
- "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"
- ],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==20.8"
- },
"pamqp": {
"hashes": [
"sha256:2f81b5c186f668a67f165193925b6bfd83db4363a6222f599517f29ecee60b02",
@@ -574,22 +484,6 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.20"
},
- "pygments": {
- "hashes": [
- "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435",
- "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"
- ],
- "markers": "python_version >= '3.5'",
- "version": "==2.7.4"
- },
- "pyparsing": {
- "hashes": [
- "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
- "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
- ],
- "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",
- "version": "==2.4.7"
- },
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
@@ -598,31 +492,48 @@
"index": "pypi",
"version": "==2.8.1"
},
- "pytz": {
+ "python-frontmatter": {
"hashes": [
- "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4",
- "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"
+ "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08",
+ "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"
],
- "version": "==2020.5"
+ "index": "pypi",
+ "version": "==1.0.0"
},
"pyyaml": {
"hashes": [
- "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
- "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
- "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
- "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",
- "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
- "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
- "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
- "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
- "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
- "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a",
- "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
- "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
- "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
+ "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf",
+ "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696",
+ "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393",
+ "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77",
+ "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922",
+ "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5",
+ "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8",
+ "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10",
+ "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc",
+ "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018",
+ "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e",
+ "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253",
+ "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347",
+ "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183",
+ "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541",
+ "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb",
+ "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185",
+ "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc",
+ "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db",
+ "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa",
+ "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46",
+ "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122",
+ "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b",
+ "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63",
+ "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df",
+ "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc",
+ "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247",
+ "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6",
+ "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"
],
"index": "pypi",
- "version": "==5.3.1"
+ "version": "==5.4.1"
},
"redis": {
"hashes": [
@@ -632,21 +543,60 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==3.5.3"
},
- "requests": {
- "hashes": [
- "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
- "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
+ "regex": {
+ "hashes": [
+ "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5",
+ "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79",
+ "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31",
+ "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500",
+ "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11",
+ "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14",
+ "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3",
+ "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439",
+ "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c",
+ "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82",
+ "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711",
+ "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093",
+ "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a",
+ "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb",
+ "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8",
+ "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17",
+ "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000",
+ "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d",
+ "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480",
+ "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc",
+ "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0",
+ "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9",
+ "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765",
+ "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e",
+ "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a",
+ "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07",
+ "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f",
+ "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac",
+ "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7",
+ "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed",
+ "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968",
+ "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7",
+ "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2",
+ "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4",
+ "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87",
+ "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8",
+ "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10",
+ "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29",
+ "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605",
+ "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6",
+ "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"
],
"index": "pypi",
- "version": "==2.25.1"
+ "version": "==2021.4.4"
},
"sentry-sdk": {
"hashes": [
- "sha256:0a711ec952441c2ec89b8f5d226c33bc697914f46e876b44a4edd3e7864cf4d0",
- "sha256:737a094e49a529dd0fdcaafa9e97cf7c3d5eb964bd229821d640bc77f3502b3f"
+ "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237",
+ "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b"
],
"index": "pypi",
- "version": "==0.19.5"
+ "version": "==0.20.3"
},
"six": {
"hashes": [
@@ -656,13 +606,6 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
"version": "==1.15.0"
},
- "snowballstemmer": {
- "hashes": [
- "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0",
- "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"
- ],
- "version": "==2.0.0"
- },
"sortedcontainers": {
"hashes": [
"sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f",
@@ -672,67 +615,11 @@
},
"soupsieve": {
"hashes": [
- "sha256:4bb21a6ee4707bf43b61230e80740e71bfe56e55d1f1f50924b087bb2975c851",
- "sha256:6dc52924dc0bc710a5d16794e6b3480b2c7c08b07729505feab2b2c16661ff6e"
+ "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc",
+ "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"
],
"markers": "python_version >= '3.0'",
- "version": "==2.1"
- },
- "sphinx": {
- "hashes": [
- "sha256:b4c750d546ab6d7e05bdff6ac24db8ae3e8b8253a3569b754e445110a0a12b66",
- "sha256:fc312670b56cb54920d6cc2ced455a22a547910de10b3142276495ced49231cb"
- ],
- "index": "pypi",
- "version": "==2.4.4"
- },
- "sphinxcontrib-applehelp": {
- "hashes": [
- "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
- "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
- ],
- "markers": "python_version >= '3.5'",
- "version": "==1.0.2"
- },
- "sphinxcontrib-devhelp": {
- "hashes": [
- "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
- "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
- ],
- "markers": "python_version >= '3.5'",
- "version": "==1.0.2"
- },
- "sphinxcontrib-htmlhelp": {
- "hashes": [
- "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
- "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
- ],
- "markers": "python_version >= '3.5'",
- "version": "==1.0.3"
- },
- "sphinxcontrib-jsmath": {
- "hashes": [
- "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
- "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
- ],
- "markers": "python_version >= '3.5'",
- "version": "==1.0.1"
- },
- "sphinxcontrib-qthelp": {
- "hashes": [
- "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
- "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
- ],
- "markers": "python_version >= '3.5'",
- "version": "==1.0.3"
- },
- "sphinxcontrib-serializinghtml": {
- "hashes": [
- "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
- "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
- ],
- "markers": "python_version >= '3.5'",
- "version": "==1.1.4"
+ "version": "==2.2.1"
},
"statsd": {
"hashes": [
@@ -752,11 +639,11 @@
},
"urllib3": {
"hashes": [
- "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
- "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
+ "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df",
+ "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
- "version": "==1.26.2"
+ "version": "==1.26.4"
},
"yarl": {
"hashes": [
@@ -835,65 +722,69 @@
},
"chardet": {
"hashes": [
- "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
- "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
+ "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
],
- "version": "==3.0.4"
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
+ "version": "==4.0.0"
},
"coverage": {
"hashes": [
- "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297",
- "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1",
- "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497",
- "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606",
- "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528",
- "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b",
- "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4",
- "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830",
- "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1",
- "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f",
- "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d",
- "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3",
- "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8",
- "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500",
- "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7",
- "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb",
- "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b",
- "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059",
- "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b",
- "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72",
- "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36",
- "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277",
- "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c",
- "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631",
- "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff",
- "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8",
- "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec",
- "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b",
- "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7",
- "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105",
- "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b",
- "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c",
- "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b",
- "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98",
- "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4",
- "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879",
- "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f",
- "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4",
- "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044",
- "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e",
- "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899",
- "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f",
- "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448",
- "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714",
- "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2",
- "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d",
- "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd",
- "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7",
- "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae"
+ "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c",
+ "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6",
+ "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45",
+ "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a",
+ "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03",
+ "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529",
+ "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a",
+ "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a",
+ "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2",
+ "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6",
+ "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759",
+ "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53",
+ "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a",
+ "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4",
+ "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff",
+ "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502",
+ "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793",
+ "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb",
+ "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905",
+ "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821",
+ "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b",
+ "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81",
+ "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0",
+ "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b",
+ "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3",
+ "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184",
+ "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701",
+ "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a",
+ "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82",
+ "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638",
+ "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5",
+ "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083",
+ "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6",
+ "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90",
+ "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465",
+ "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a",
+ "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3",
+ "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e",
+ "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066",
+ "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf",
+ "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b",
+ "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae",
+ "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669",
+ "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873",
+ "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b",
+ "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6",
+ "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb",
+ "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160",
+ "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c",
+ "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079",
+ "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d",
+ "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"
],
"index": "pypi",
- "version": "==5.3.1"
+ "version": "==5.5"
},
"coveralls": {
"hashes": [
@@ -925,19 +816,19 @@
},
"flake8": {
"hashes": [
- "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839",
- "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"
+ "sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff",
+ "sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0"
],
"index": "pypi",
- "version": "==3.8.4"
+ "version": "==3.9.0"
},
"flake8-annotations": {
"hashes": [
- "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055",
- "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e"
+ "sha256:0d6cd2e770b5095f09689c9d84cc054c51b929c41a68969ea1beb4b825cac515",
+ "sha256:d10c4638231f8a50c0a597c4efce42bd7b7d85df4f620a0ddaca526138936a4f"
],
"index": "pypi",
- "version": "==2.5.0"
+ "version": "==2.6.2"
},
"flake8-bugbear": {
"hashes": [
@@ -949,11 +840,11 @@
},
"flake8-docstrings": {
"hashes": [
- "sha256:3d5a31c7ec6b7367ea6506a87ec293b94a0a46c0bce2bb4975b7f1d09b6f3717",
- "sha256:a256ba91bc52307bef1de59e2a009c3cf61c3d0952dbe035d6ff7208940c2edc"
+ "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde",
+ "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"
],
"index": "pypi",
- "version": "==1.5.0"
+ "version": "==1.6.0"
},
"flake8-import-order": {
"hashes": [
@@ -995,19 +886,19 @@
},
"identify": {
"hashes": [
- "sha256:18994e850ba50c37bcaed4832be8b354d6a06c8fb31f54e0e7ece76d32f69bc8",
- "sha256:892473bf12e655884132a3a32aca737a3cbefaa34a850ff52d501773a45837bc"
+ "sha256:398cb92a7599da0b433c65301a1b62b9b1f4bb8248719b84736af6c0b22289d6",
+ "sha256:4537474817e0bbb8cea3e5b7504b7de6d44e3f169a90846cbc6adb0fc8294502"
],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==1.5.12"
+ "markers": "python_full_version >= '3.6.1'",
+ "version": "==2.2.3"
},
"idna": {
"hashes": [
- "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
- "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
+ "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16",
+ "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"
],
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.10"
+ "markers": "python_version >= '3.4'",
+ "version": "==3.1"
},
"mccabe": {
"hashes": [
@@ -1018,10 +909,10 @@
},
"nodeenv": {
"hashes": [
- "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9",
- "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"
+ "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b",
+ "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"
],
- "version": "==1.5.0"
+ "version": "==1.6.0"
},
"pep8-naming": {
"hashes": [
@@ -1033,61 +924,77 @@
},
"pre-commit": {
"hashes": [
- "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0",
- "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4"
+ "sha256:029d53cb83c241fe7d66eeee1e24db426f42c858f15a38d20bcefd8d8e05c9da",
+ "sha256:46b6ffbab37986c47d0a35e40906ae029376deed89a0eb2e446fb6e67b220427"
],
"index": "pypi",
- "version": "==2.9.3"
+ "version": "==2.12.0"
},
"pycodestyle": {
"hashes": [
- "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
- "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
+ "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068",
+ "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.6.0"
+ "version": "==2.7.0"
},
"pydocstyle": {
"hashes": [
- "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325",
- "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678"
+ "sha256:164befb520d851dbcf0e029681b91f4f599c62c5cd8933fd54b1bfbd50e89e1f",
+ "sha256:d4449cf16d7e6709f63192146706933c7a334af7c0f083904799ccb851c50f6d"
],
- "markers": "python_version >= '3.5'",
- "version": "==5.1.1"
+ "markers": "python_version >= '3.6'",
+ "version": "==6.0.0"
},
"pyflakes": {
"hashes": [
- "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
- "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
+ "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3",
+ "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.2.0"
+ "version": "==2.3.1"
},
"pyyaml": {
"hashes": [
- "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
- "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
- "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
- "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",
- "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
- "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
- "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
- "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
- "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
- "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a",
- "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
- "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
- "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
+ "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf",
+ "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696",
+ "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393",
+ "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77",
+ "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922",
+ "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5",
+ "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8",
+ "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10",
+ "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc",
+ "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018",
+ "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e",
+ "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253",
+ "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347",
+ "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183",
+ "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541",
+ "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb",
+ "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185",
+ "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc",
+ "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db",
+ "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa",
+ "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46",
+ "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122",
+ "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b",
+ "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63",
+ "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df",
+ "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc",
+ "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247",
+ "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6",
+ "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"
],
"index": "pypi",
- "version": "==5.3.1"
+ "version": "==5.4.1"
},
"requests": {
"hashes": [
"sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
"sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
],
- "index": "pypi",
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.25.1"
},
"six": {
@@ -1100,10 +1007,10 @@
},
"snowballstemmer": {
"hashes": [
- "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0",
- "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"
+ "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2",
+ "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"
],
- "version": "==2.0.0"
+ "version": "==2.1.0"
},
"toml": {
"hashes": [
@@ -1115,19 +1022,19 @@
},
"urllib3": {
"hashes": [
- "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
- "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
+ "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df",
+ "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
- "version": "==1.26.2"
+ "version": "==1.26.4"
},
"virtualenv": {
"hashes": [
- "sha256:0c111a2236b191422b37fe8c28b8c828ced39aab4bf5627fa5c331aeffb570d9",
- "sha256:14b34341e742bdca219e10708198e704e8a7064dd32f474fc16aca68ac53a306"
+ "sha256:49ec4eb4c224c6f7dd81bb6d0a28a09ecae5894f4e593c89b0db0885f565a107",
+ "sha256:83f95875d382c7abafe06bd2a4cdd1b363e1bb77e02f155ebe8ac082a916b37c"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==20.3.1"
+ "version": "==20.4.3"
}
}
}
diff --git a/README.md b/README.md
index ac45e6340..9df905dc8 100644
--- a/README.md
+++ b/README.md
@@ -12,11 +12,11 @@ and other tools to help keep the server running like a well-oiled machine.
Read the [Contributing Guide](https://pythondiscord.com/pages/contributing/bot/) on our website if you're interested in helping out.
-[1]: https://github.com/python-discord/bot/workflows/Lint%20&%20Test/badge.svg?branch=master
-[2]: https://github.com/python-discord/bot/actions?query=workflow%3A%22Lint+%26+Test%22+branch%3Amaster
-[3]: https://github.com/python-discord/bot/workflows/Build/badge.svg?branch=master
-[4]: https://github.com/python-discord/bot/actions?query=workflow%3ABuild+branch%3Amaster
-[5]: https://github.com/python-discord/bot/workflows/Deploy/badge.svg?branch=master
-[6]: https://github.com/python-discord/bot/actions?query=workflow%3ADeploy+branch%3Amaster
-[7]: https://raw.githubusercontent.com/python-discord/branding/master/logos/badge/badge_github.svg
+[1]: https://github.com/python-discord/bot/workflows/Lint%20&%20Test/badge.svg?branch=main
+[2]: https://github.com/python-discord/bot/actions?query=workflow%3A%22Lint+%26+Test%22+branch%3Amain
+[3]: https://github.com/python-discord/bot/workflows/Build/badge.svg?branch=main
+[4]: https://github.com/python-discord/bot/actions?query=workflow%3ABuild+branch%3Amain
+[5]: https://github.com/python-discord/bot/workflows/Deploy/badge.svg?branch=main
+[6]: https://github.com/python-discord/bot/actions?query=workflow%3ADeploy+branch%3Amain
+[7]: https://raw.githubusercontent.com/python-discord/branding/main/logos/badge/badge_github.svg
[8]: https://discord.gg/python
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..fa5a88a39
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,3 @@
+# Security Notice
+
+The Security Notice for Python Discord projects can be found [on our website](https://pydis.com/security.md).
diff --git a/bot/__main__.py b/bot/__main__.py
index 257216fa7..9317563c8 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -1,10 +1,28 @@
+import logging
+
+import aiohttp
+
import bot
from bot import constants
-from bot.bot import Bot
+from bot.bot import Bot, StartupError
from bot.log import setup_sentry
setup_sentry()
-bot.instance = Bot.create()
-bot.instance.load_extensions()
-bot.instance.run(constants.Bot.token)
+try:
+ bot.instance = Bot.create()
+ bot.instance.load_extensions()
+ bot.instance.run(constants.Bot.token)
+except StartupError as e:
+ message = "Unknown Startup Error Occurred."
+ if isinstance(e.exception, (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError)):
+ message = "Could not connect to site API. Is it running?"
+ elif isinstance(e.exception, OSError):
+ message = "Could not connect to Redis. Is it running?"
+
+ # The exception is logged with an empty message so the actual message is visible at the bottom
+ log = logging.getLogger("bot")
+ log.fatal("", exc_info=e.exception)
+ log.fatal(message)
+
+ exit(69)
diff --git a/bot/bot.py b/bot/bot.py
index d5f108575..914da9c98 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -19,6 +19,14 @@ log = logging.getLogger('bot')
LOCALHOST = "127.0.0.1"
+class StartupError(Exception):
+ """Exception class for startup errors."""
+
+ def __init__(self, base: Exception):
+ super().__init__()
+ self.exception = base
+
+
class Bot(commands.Bot):
"""A subclass of `discord.ext.commands.Bot` with an aiohttp session and an API client."""
@@ -81,13 +89,29 @@ class Bot(commands.Bot):
for item in full_cache:
self.insert_item_into_filter_list_cache(item)
+ async def ping_services(self) -> None:
+ """A helper to make sure all the services the bot relies on are available on startup."""
+ # Connect Site/API
+ attempts = 0
+ while True:
+ try:
+ log.info(f"Attempting site connection: {attempts + 1}/{constants.URLs.connect_max_retries}")
+ await self.api_client.get("healthcheck")
+ break
+
+ except (aiohttp.ClientConnectorError, aiohttp.ServerDisconnectedError):
+ attempts += 1
+ if attempts == constants.URLs.connect_max_retries:
+ raise
+ await asyncio.sleep(constants.URLs.connect_cooldown)
+
@classmethod
def create(cls) -> "Bot":
"""Create and return an instance of a Bot."""
loop = asyncio.get_event_loop()
allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES]
- intents = discord.Intents().all()
+ intents = discord.Intents.all()
intents.presences = False
intents.dm_typing = False
intents.dm_reactions = False
@@ -223,6 +247,11 @@ class Bot(commands.Bot):
# here. Normally, this shouldn't happen.
await self.redis_session.connect()
+ try:
+ await self.ping_services()
+ except Exception as e:
+ raise StartupError(e)
+
# Build the FilterList cache
await self.cache_filter_list_data()
@@ -318,5 +347,8 @@ def _create_redis_session(loop: asyncio.AbstractEventLoop) -> RedisSession:
use_fakeredis=constants.Redis.use_fakeredis,
global_namespace="bot",
)
- loop.run_until_complete(redis_session.connect())
+ try:
+ loop.run_until_complete(redis_session.connect())
+ except OSError as e:
+ raise StartupError(e)
return redis_session
diff --git a/bot/constants.py b/bot/constants.py
index 91d425b1d..dc9cd4dfb 100644
--- a/bot/constants.py
+++ b/bot/constants.py
@@ -282,9 +282,9 @@ class Emojis(metaclass=YAMLGetter):
badge_verified_bot: str
bot: str
- defcon_disabled: str # noqa: E704
- defcon_enabled: str # noqa: E704
- defcon_updated: str # noqa: E704
+ defcon_shutdown: str # noqa: E704
+ defcon_unshutdown: str # noqa: E704
+ defcon_update: str # noqa: E704
failmail: str
@@ -321,13 +321,14 @@ class Icons(metaclass=YAMLGetter):
crown_red: str
defcon_denied: str # noqa: E704
- defcon_disabled: str # noqa: E704
- defcon_enabled: str # noqa: E704
- defcon_updated: str # noqa: E704
+ defcon_shutdown: str # noqa: E704
+ defcon_unshutdown: str # noqa: E704
+ defcon_update: str # noqa: E704
filtering: str
green_checkmark: str
+ green_questionmark: str
guild_update: str
hash_blurple: str
@@ -389,6 +390,7 @@ class Categories(metaclass=YAMLGetter):
help_available: int
help_dormant: int
help_in_use: int
+ moderators: int
modmail: int
voice: int
@@ -403,7 +405,6 @@ class Channels(metaclass=YAMLGetter):
python_events: int
python_news: int
reddit: int
- user_event_announcements: int
dev_contrib: int
dev_core: int
@@ -413,9 +414,9 @@ class Channels(metaclass=YAMLGetter):
python_general: int
cooldown: int
+ how_to_get_help: int
attachment_log: int
- dm_log: int
message_log: int
mod_log: int
user_log: int
@@ -436,9 +437,9 @@ class Channels(metaclass=YAMLGetter):
helpers: int
incidents: int
incidents_archive: int
- mods: int
mod_alerts: int
- mod_spam: int
+ nominations: int
+ nomination_voting: int
organisation: int
admin_announcements: int
@@ -466,7 +467,6 @@ class Webhooks(metaclass=YAMLGetter):
big_brother: int
dev_log: int
- dm_log: int
duck_pond: int
incidents_archive: int
reddit: int
@@ -485,12 +485,16 @@ class Roles(metaclass=YAMLGetter):
python_community: int
sprinters: int
voice_verified: int
+ video: int
admins: int
core_developers: int
+ devops: int
+ domain_leads: int
helpers: int
moderators: int
owners: int
+ project_leads: int
jammers: int
team_leaders: int
@@ -532,6 +536,8 @@ class URLs(metaclass=YAMLGetter):
github_bot_repo: str
# Base site vars
+ connect_max_retries: int
+ connect_cooldown: int
site: str
site_api: str
site_schema: str
@@ -590,7 +596,8 @@ class HelpChannels(metaclass=YAMLGetter):
enable: bool
claim_minutes: int
cmd_whitelist: List[int]
- idle_minutes: int
+ idle_minutes_claimant: int
+ idle_minutes_others: int
deleted_idle_minutes: int
max_available: int
max_total_channels: int
@@ -658,6 +665,12 @@ class Event(Enum):
voice_state_update = "voice_state_update"
+class VideoPermission(metaclass=YAMLGetter):
+ section = "video_permission"
+
+ default_permission_duration: int
+
+
# Debug mode
DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local")
diff --git a/bot/converters.py b/bot/converters.py
index 0d9a519df..3bf05cfb3 100644
--- a/bot/converters.py
+++ b/bot/converters.py
@@ -15,7 +15,9 @@ from discord.utils import DISCORD_EPOCH, snowflake_time
from bot.api import ResponseCodeError
from bot.constants import URLs
+from bot.exts.info.doc import _inventory_parser
from bot.utils.regex import INVITE_RE
+from bot.utils.time import parse_duration_string
log = logging.getLogger(__name__)
@@ -126,22 +128,20 @@ class ValidFilterListType(Converter):
return list_type
-class ValidPythonIdentifier(Converter):
+class PackageName(Converter):
"""
- A converter that checks whether the given string is a valid Python identifier.
+ A converter that checks whether the given string is a valid package name.
- This is used to have package names that correspond to how you would use the package in your
- code, e.g. `import package`.
-
- Raises `BadArgument` if the argument is not a valid Python identifier, and simply passes through
- the given argument otherwise.
+ Package names are used for stats and are restricted to the a-z and _ characters.
"""
- @staticmethod
- async def convert(ctx: Context, argument: str) -> str:
- """Checks whether the given string is a valid Python identifier."""
- if not argument.isidentifier():
- raise BadArgument(f"`{argument}` is not a valid Python identifier")
+ PACKAGE_NAME_RE = re.compile(r"[^a-z0-9_]")
+
+ @classmethod
+ async def convert(cls, ctx: Context, argument: str) -> str:
+ """Checks whether the given string is a valid package name."""
+ if cls.PACKAGE_NAME_RE.search(argument):
+ raise BadArgument("The provided package name is not valid; please only use the _, 0-9, and a-z characters.")
return argument
@@ -177,6 +177,27 @@ class ValidURL(Converter):
return url
+class Inventory(Converter):
+ """
+ Represents an Intersphinx inventory URL.
+
+ This converter checks whether intersphinx accepts the given inventory URL, and raises
+ `BadArgument` if that is not the case or if the url is unreachable.
+
+ Otherwise, it returns the url and the fetched inventory dict in a tuple.
+ """
+
+ @staticmethod
+ async def convert(ctx: Context, url: str) -> t.Tuple[str, _inventory_parser.InventoryDict]:
+ """Convert url to Intersphinx inventory URL."""
+ await ctx.trigger_typing()
+ if (inventory := await _inventory_parser.fetch_inventory(url)) is None:
+ raise BadArgument(
+ f"Failed to fetch inventory file after {_inventory_parser.FAILED_REQUEST_ATTEMPTS} attempts."
+ )
+ return url, inventory
+
+
class Snowflake(IDConverter):
"""
Converts to an int if the argument is a valid Discord snowflake.
@@ -301,16 +322,6 @@ class TagContentConverter(Converter):
class DurationDelta(Converter):
"""Convert duration strings into dateutil.relativedelta.relativedelta objects."""
- duration_parser = re.compile(
- r"((?P<years>\d+?) ?(years|year|Y|y) ?)?"
- r"((?P<months>\d+?) ?(months|month|m) ?)?"
- r"((?P<weeks>\d+?) ?(weeks|week|W|w) ?)?"
- r"((?P<days>\d+?) ?(days|day|D|d) ?)?"
- r"((?P<hours>\d+?) ?(hours|hour|H|h) ?)?"
- r"((?P<minutes>\d+?) ?(minutes|minute|M) ?)?"
- r"((?P<seconds>\d+?) ?(seconds|second|S|s))?"
- )
-
async def convert(self, ctx: Context, duration: str) -> relativedelta:
"""
Converts a `duration` string to a relativedelta object.
@@ -326,13 +337,9 @@ class DurationDelta(Converter):
The units need to be provided in descending order of magnitude.
"""
- match = self.duration_parser.fullmatch(duration)
- if not match:
+ if not (delta := parse_duration_string(duration)):
raise BadArgument(f"`{duration}` is not a valid duration string.")
- duration_dict = {unit: int(amount) for unit, amount in match.groupdict(default=0).items()}
- delta = relativedelta(**duration_dict)
-
return delta
@@ -357,27 +364,38 @@ class Duration(DurationDelta):
class OffTopicName(Converter):
"""A converter that ensures an added off-topic name is valid."""
+ ALLOWED_CHARACTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-"
+
+ @classmethod
+ def translate_name(cls, name: str, *, from_unicode: bool = True) -> str:
+ """
+ Translates `name` into a format that is allowed in discord channel names.
+
+ If `from_unicode` is True, the name is translated from a discord-safe format, back to normalized text.
+ """
+ if from_unicode:
+ table = str.maketrans(cls.ALLOWED_CHARACTERS, '𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-')
+ else:
+ table = str.maketrans('𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-', cls.ALLOWED_CHARACTERS)
+
+ return name.translate(table)
+
async def convert(self, ctx: Context, argument: str) -> str:
"""Attempt to replace any invalid characters with their approximate Unicode equivalent."""
- allowed_characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!?'`-"
-
# Chain multiple words to a single one
argument = "-".join(argument.split())
if not (2 <= len(argument) <= 96):
raise BadArgument("Channel name must be between 2 and 96 chars long")
- elif not all(c.isalnum() or c in allowed_characters for c in argument):
+ elif not all(c.isalnum() or c in self.ALLOWED_CHARACTERS for c in argument):
raise BadArgument(
"Channel name must only consist of "
"alphanumeric characters, minus signs or apostrophes."
)
# Replace invalid characters with unicode alternatives.
- table = str.maketrans(
- allowed_characters, '𝖠𝖡𝖢𝖣𝖤𝖥𝖦𝖧𝖨𝖩𝖪𝖫𝖬𝖭𝖮𝖯𝖰𝖱𝖲𝖳𝖴𝖵𝖶𝖷𝖸𝖹ǃ?’’-'
- )
- return argument.translate(table)
+ return self.translate_name(argument)
class ISODateTime(Converter):
diff --git a/bot/decorators.py b/bot/decorators.py
index 063c8f878..e971a5bd3 100644
--- a/bot/decorators.py
+++ b/bot/decorators.py
@@ -1,16 +1,18 @@
import asyncio
+import functools
import logging
+import types
import typing as t
from contextlib import suppress
-from functools import wraps
from discord import Member, NotFound
from discord.ext import commands
from discord.ext.commands import Cog, Context
-from bot.constants import Channels, RedirectOutput
+from bot.constants import Channels, DEBUG_MODE, RedirectOutput
from bot.utils import function
-from bot.utils.checks import in_whitelist_check
+from bot.utils.checks import ContextCheckFailure, in_whitelist_check
+from bot.utils.function import command_wraps
log = logging.getLogger(__name__)
@@ -43,6 +45,49 @@ def in_whitelist(
return commands.check(predicate)
+class NotInBlacklistCheckFailure(ContextCheckFailure):
+ """Raised when the 'not_in_blacklist' check fails."""
+
+
+def not_in_blacklist(
+ *,
+ channels: t.Container[int] = (),
+ categories: t.Container[int] = (),
+ roles: t.Container[int] = (),
+ override_roles: t.Container[int] = (),
+ redirect: t.Optional[int] = Channels.bot_commands,
+ fail_silently: bool = False,
+) -> t.Callable:
+ """
+ Check if a command was not issued in a blacklisted context.
+
+ The blacklists that can be provided are:
+
+ - `channels`: a container with channel ids for blacklisted channels
+ - `categories`: a container with category ids for blacklisted categories
+ - `roles`: a container with role ids for blacklisted roles
+
+ If the command was invoked in a context that was blacklisted, the member is either
+ redirected to the `redirect` channel that was passed (default: #bot-commands) or simply
+ told that they're not allowed to use this particular command (if `None` was passed).
+
+ The blacklist can be overridden through the roles specified in `override_roles`.
+ """
+ def predicate(ctx: Context) -> bool:
+ """Check if command was issued in a blacklisted context."""
+ not_blacklisted = not in_whitelist_check(ctx, channels, categories, roles, fail_silently=True)
+ overridden = in_whitelist_check(ctx, roles=override_roles, fail_silently=True)
+
+ success = not_blacklisted or overridden
+
+ if not success and not fail_silently:
+ raise NotInBlacklistCheckFailure(redirect)
+
+ return success
+
+ return commands.check(predicate)
+
+
def has_no_roles(*roles: t.Union[str, int]) -> t.Callable:
"""
Returns True if the user does not have any of the roles specified.
@@ -70,8 +115,8 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N
This decorator must go before (below) the `command` decorator.
"""
- def wrap(func: t.Callable) -> t.Callable:
- @wraps(func)
+ def wrap(func: types.FunctionType) -> types.FunctionType:
+ @command_wraps(func)
async def inner(self: Cog, ctx: Context, *args, **kwargs) -> None:
if ctx.channel.id == destination_channel:
log.trace(f"Command {ctx.command.name} was invoked in destination_channel, not redirecting")
@@ -105,7 +150,6 @@ def redirect_output(destination_channel: int, bypass_roles: t.Container[int] = N
with suppress(NotFound):
await ctx.message.delete()
log.trace("Redirect output: Deleted invocation message")
-
return inner
return wrap
@@ -122,8 +166,8 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable:
This decorator must go before (below) the `command` decorator.
"""
- def decorator(func: t.Callable) -> t.Callable:
- @wraps(func)
+ def decorator(func: types.FunctionType) -> types.FunctionType:
+ @command_wraps(func)
async def wrapper(*args, **kwargs) -> None:
log.trace(f"{func.__name__}: respect role hierarchy decorator called")
@@ -153,3 +197,23 @@ def respect_role_hierarchy(member_arg: function.Argument) -> t.Callable:
await func(*args, **kwargs)
return wrapper
return decorator
+
+
+def mock_in_debug(return_value: t.Any) -> t.Callable:
+ """
+ Short-circuit function execution if in debug mode and return `return_value`.
+
+ The original function name, and the incoming args and kwargs are DEBUG level logged
+ upon each call. This is useful for expensive operations, i.e. media asset uploads
+ that are prone to rate-limits but need to be tested extensively.
+ """
+ def decorator(func: t.Callable) -> t.Callable:
+ @functools.wraps(func)
+ async def wrapped(*args, **kwargs) -> t.Any:
+ """Short-circuit and log if in debug mode."""
+ if DEBUG_MODE:
+ log.debug(f"Function {func.__name__} called with args: {args}, kwargs: {kwargs}")
+ return return_value
+ return await func(*args, **kwargs)
+ return wrapped
+ return decorator
diff --git a/bot/errors.py b/bot/errors.py
index ab0adcd42..3544c6320 100644
--- a/bot/errors.py
+++ b/bot/errors.py
@@ -35,3 +35,9 @@ class InvalidInfractedUser(Exception):
self.reason = reason
super().__init__(reason)
+
+
+class BrandingMisconfiguration(RuntimeError):
+ """Raised by the Branding cog when a misconfigured event is encountered."""
+
+ pass
diff --git a/bot/exts/backend/branding/__init__.py b/bot/exts/backend/branding/__init__.py
index 81ea3bf49..20a747b7f 100644
--- a/bot/exts/backend/branding/__init__.py
+++ b/bot/exts/backend/branding/__init__.py
@@ -1,7 +1,7 @@
from bot.bot import Bot
-from bot.exts.backend.branding._cog import BrandingManager
+from bot.exts.backend.branding._cog import Branding
def setup(bot: Bot) -> None:
- """Loads BrandingManager cog."""
- bot.add_cog(BrandingManager(bot))
+ """Load Branding cog."""
+ bot.add_cog(Branding(bot))
diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py
index 20df83a89..47c379a34 100644
--- a/bot/exts/backend/branding/_cog.py
+++ b/bot/exts/backend/branding/_cog.py
@@ -1,566 +1,653 @@
import asyncio
-import itertools
+import contextlib
import logging
import random
import typing as t
-from datetime import datetime, time, timedelta
+from datetime import timedelta
+from enum import Enum
+from operator import attrgetter
-import arrow
import async_timeout
import discord
+from arrow import Arrow
from async_rediscache import RedisCache
-from discord.ext import commands
+from discord.ext import commands, tasks
from bot.bot import Bot
-from bot.constants import Branding, Colours, Emojis, Guild, MODERATION_ROLES
-from bot.exts.backend.branding import _constants, _decorators, _errors, _seasons
+from bot.constants import Branding as BrandingConfig, Channels, Colours, Guild, MODERATION_ROLES
+from bot.decorators import mock_in_debug
+from bot.exts.backend.branding._repository import BrandingRepository, Event, RemoteObject
log = logging.getLogger(__name__)
-class GitHubFile(t.NamedTuple):
+class AssetType(Enum):
"""
- Represents a remote file on GitHub.
+ Recognised Discord guild asset types.
- The `sha` hash is kept so that we can determine that a file has changed,
- despite its filename remaining unchanged.
+ The value of each member corresponds exactly to a kwarg that can be passed to `Guild.edit`.
"""
- download_url: str
- path: str
- sha: str
+ BANNER = "banner"
+ ICON = "icon"
-def pretty_files(files: t.Iterable[GitHubFile]) -> str:
- """Provide a human-friendly representation of `files`."""
- return "\n".join(file.path for file in files)
+def compound_hash(objects: t.Iterable[RemoteObject]) -> str:
+ """
+ Join SHA attributes of `objects` into a single string.
+
+ Compound hashes are cached to check for change in any of the member `objects`.
+ """
+ return "-".join(item.sha for item in objects)
+
+
+def make_embed(title: str, description: str, *, success: bool) -> discord.Embed:
+ """
+ Construct simple response embed.
+
+ If `success` is True, use green colour, otherwise red.
+
+ For both `title` and `description`, empty string are valid values ~ fields will be empty.
+ """
+ colour = Colours.soft_green if success else Colours.soft_red
+ return discord.Embed(title=title[:256], description=description[:2048], colour=colour)
-def time_until_midnight() -> timedelta:
+def extract_event_duration(event: Event) -> str:
"""
- Determine amount of time until the next-up UTC midnight.
+ Extract a human-readable, year-agnostic duration string from `event`.
- The exact `midnight` moment is actually delayed to 5 seconds after, in order
- to avoid potential problems due to imprecise sleep.
+ In the case that `event` is a fallback event, resolves to 'Fallback'.
+
+ For 1-day events, only the single date is shown, instead of a period.
"""
- now = datetime.utcnow()
- tomorrow = now + timedelta(days=1)
- midnight = datetime.combine(tomorrow, time(second=5))
+ if event.meta.is_fallback:
+ return "Fallback"
+
+ fmt = "%B %d" # Ex: August 23
+ start_date = event.meta.start_date.strftime(fmt)
+ end_date = event.meta.end_date.strftime(fmt)
+
+ if start_date == end_date:
+ return start_date
- return midnight - now
+ return f"{start_date} - {end_date}"
-class BrandingManager(commands.Cog):
+def extract_event_name(event: Event) -> str:
"""
- Manages the guild's branding.
-
- The purpose of this cog is to help automate the synchronization of the branding
- repository with the guild. It is capable of discovering assets in the repository
- via GitHub's API, resolving download urls for them, and delegating
- to the `bot` instance to upload them to the guild.
-
- BrandingManager is designed to be entirely autonomous. Its `daemon` background task awakens
- once a day (see `time_until_midnight`) to detect new seasons, or to cycle icons within a single
- season. The daemon can be turned on and off via the `daemon` cmd group. The value set via
- its `start` and `stop` commands is persisted across sessions. If turned on, the daemon will
- automatically start on the next bot start-up. Otherwise, it will wait to be started manually.
-
- All supported operations, e.g. setting seasons, applying the branding, or cycling icons, can
- also be invoked manually, via the following API:
-
- branding list
- - Show all available seasons
-
- branding set <season_name>
- - Set the cog's internal state to represent `season_name`, if it exists
- - If no `season_name` is given, set chronologically current season
- - This will not automatically apply the season's branding to the guild,
- the cog's state can be detached from the guild
- - Seasons can therefore be 'previewed' using this command
-
- branding info
- - View detailed information about resolved assets for current season
-
- branding refresh
- - Refresh internal state, i.e. synchronize with branding repository
-
- branding apply
- - Apply the current internal state to the guild, i.e. upload the assets
-
- branding cycle
- - If there are multiple available icons for current season, randomly pick
- and apply the next one
-
- The daemon calls these methods autonomously as appropriate. The use of this cog
- is locked to moderation roles. As it performs media asset uploads, it is prone to
- rate-limits - the `apply` command should be used with caution. The `set` command can,
- however, be used freely to 'preview' seasonal branding and check whether paths have been
- resolved as appropriate.
-
- While the bot is in debug mode, it will 'mock' asset uploads by logging the passed
- download urls and pretending that the upload was successful. Make use of this
- to test this cog's behaviour.
+ Extract title-cased event name from the path of `event`.
+
+ An event with a path of 'events/black_history_month' will resolve to 'Black History Month'.
"""
+ name = event.path.split("/")[-1] # Inner-most directory name.
+ words = name.split("_") # Words from snake case.
+
+ return " ".join(word.title() for word in words)
+
+
+class Branding(commands.Cog):
+ """
+ Guild branding management.
+
+ Extension responsible for automatic synchronisation of the guild's branding with the branding repository.
+ Event definitions and assets are automatically discovered and applied as appropriate.
- current_season: t.Type[_seasons.SeasonBase]
+ All state is stored in Redis. The cog should therefore seamlessly transition across restarts and maintain
+ a consistent icon rotation schedule for events with multiple icon assets.
- banner: t.Optional[GitHubFile]
+ By caching hashes of banner & icon assets, we discover changes in currently applied assets and always keep
+ the latest version applied.
- available_icons: t.List[GitHubFile]
- remaining_icons: t.List[GitHubFile]
+ The command interface allows moderators+ to control the daemon or request asset synchronisation, while
+ regular users can see information about the current event and the overall event schedule.
+ """
- days_since_cycle: t.Iterator
+ # RedisCache[
+ # "daemon_active": bool | If True, daemon starts on start-up. Controlled via commands.
+ # "event_path": str | Current event's path in the branding repo.
+ # "event_description": str | Current event's Markdown description.
+ # "event_duration": str | Current event's human-readable date range.
+ # "banner_hash": str | SHA of the currently applied banner.
+ # "icons_hash": str | Compound SHA of all icons in current rotation.
+ # "last_rotation_timestamp": float | POSIX UTC timestamp.
+ # ]
+ cache_information = RedisCache()
- daemon: t.Optional[asyncio.Task]
+ # Icons in current rotation. Keys (str) are download URLs, values (int) track the amount of times each
+ # icon has been used in the current rotation.
+ cache_icons = RedisCache()
- # Branding configuration
- branding_configuration = RedisCache()
+ # All available event names & durations. Cached by the daemon nightly; read by the calendar command.
+ cache_events = RedisCache()
def __init__(self, bot: Bot) -> None:
+ """Instantiate repository abstraction & allow daemon to start."""
+ self.bot = bot
+ self.repository = BrandingRepository(bot)
+
+ self.bot.loop.create_task(self.maybe_start_daemon()) # Start depending on cache.
+
+ # region: Internal logic & state management
+
+ @mock_in_debug(return_value=True) # Mocked in development environment to prevent API spam.
+ async def apply_asset(self, asset_type: AssetType, download_url: str) -> bool:
"""
- Assign safe default values on init.
+ Download asset from `download_url` and apply it to PyDis as `asset_type`.
- At this point, we don't have information about currently available branding.
- Most of these attributes will be overwritten once the daemon connects, or once
- the `refresh` command is used.
+ Return a boolean indicating whether the application was successful.
"""
- self.bot = bot
- self.current_season = _seasons.get_current_season()
+ log.info(f"Applying '{asset_type.value}' asset to the guild.")
+
+ try:
+ file = await self.repository.fetch_file(download_url)
+ except Exception:
+ log.exception(f"Failed to fetch '{asset_type.value}' asset.")
+ return False
- self.banner = None
+ await self.bot.wait_until_guild_available()
+ pydis: discord.Guild = self.bot.get_guild(Guild.id)
- self.available_icons = []
- self.remaining_icons = []
+ timeout = 10 # Seconds.
+ try:
+ with async_timeout.timeout(timeout): # Raise after `timeout` seconds.
+ await pydis.edit(**{asset_type.value: file})
+ except discord.HTTPException:
+ log.exception("Asset upload to Discord failed.")
+ return False
+ except asyncio.TimeoutError:
+ log.error(f"Asset upload to Discord timed out after {timeout} seconds.")
+ return False
+ else:
+ log.trace("Asset uploaded successfully.")
+ return True
- self.days_since_cycle = itertools.cycle([None])
+ async def apply_banner(self, banner: RemoteObject) -> bool:
+ """
+ Apply `banner` to the guild and cache its hash if successful.
- self.daemon = None
- self._startup_task = self.bot.loop.create_task(self._initial_start_daemon())
+ Banners should always be applied via this method to ensure that the last hash is cached.
- async def _initial_start_daemon(self) -> None:
- """Checks is daemon active and when is, start it at cog load."""
- if await self.branding_configuration.get("daemon_active"):
- self.daemon = self.bot.loop.create_task(self._daemon_func())
+ Return a boolean indicating whether the application was successful.
+ """
+ success = await self.apply_asset(AssetType.BANNER, banner.download_url)
- @property
- def _daemon_running(self) -> bool:
- """True if the daemon is currently active, False otherwise."""
- return self.daemon is not None and not self.daemon.done()
+ if success:
+ await self.cache_information.set("banner_hash", banner.sha)
- async def _daemon_func(self) -> None:
+ return success
+
+ async def rotate_icons(self) -> bool:
"""
- Manage all automated behaviour of the BrandingManager cog.
+ Choose and apply the next-up icon in rotation.
+
+ We keep track of the amount of times each icon has been used. The values in `cache_icons` can be understood
+ to be iteration IDs. When an icon is chosen & applied, we bump its count, pushing it into the next iteration.
- Once a day, the daemon will perform the following tasks:
- - Update `current_season`
- - Poll GitHub API to see if the available branding for `current_season` has changed
- - Update assets if changes are detected (banner, guild icon, bot avatar, bot nickname)
- - Check whether it's time to cycle guild icons
+ Once the current iteration (lowest count in the cache) depletes, we move onto the next iteration.
- The internal loop runs once when activated, then periodically at the time
- given by `time_until_midnight`.
+ In the case that there is only 1 icon in the rotation and has already been applied, do nothing.
- All method calls in the internal loop are considered safe, i.e. no errors propagate
- to the daemon's loop. The daemon itself does not perform any error handling on its own.
+ Return a boolean indicating whether a new icon was applied successfully.
"""
- await self.bot.wait_until_guild_available()
+ log.debug("Rotating icons.")
- while True:
- self.current_season = _seasons.get_current_season()
- branding_changed = await self.refresh()
+ state = await self.cache_icons.to_dict()
+ log.trace(f"Total icons in rotation: {len(state)}.")
- if branding_changed:
- await self.apply()
+ if not state: # This would only happen if rotation not initiated, but we can handle gracefully.
+ log.warning("Attempted icon rotation with an empty icon cache. This indicates wrong logic.")
+ return False
- elif next(self.days_since_cycle) == Branding.cycle_frequency:
- await self.cycle()
+ if len(state) == 1 and 1 in state.values():
+ log.debug("Aborting icon rotation: only 1 icon is available and has already been applied.")
+ return False
- until_midnight = time_until_midnight()
- await asyncio.sleep(until_midnight.total_seconds())
+ current_iteration = min(state.values()) # Choose iteration to draw from.
+ options = [download_url for download_url, times_used in state.items() if times_used == current_iteration]
- async def _info_embed(self) -> discord.Embed:
- """Make an informative embed representing current season."""
- info_embed = discord.Embed(description=self.current_season.description, colour=self.current_season.colour)
+ log.trace(f"Choosing from {len(options)} icons in iteration {current_iteration}.")
+ next_icon = random.choice(options)
- # If we're in a non-evergreen season, also show active months
- if self.current_season is not _seasons.SeasonBase:
- title = f"{self.current_season.season_name} ({', '.join(str(m) for m in self.current_season.months)})"
- else:
- title = self.current_season.season_name
+ success = await self.apply_asset(AssetType.ICON, next_icon)
+
+ if success:
+ await self.cache_icons.increment(next_icon) # Push the icon into the next iteration.
+
+ timestamp = Arrow.utcnow().timestamp()
+ await self.cache_information.set("last_rotation_timestamp", timestamp)
- # Use the author field to show the season's name and avatar if available
- info_embed.set_author(name=title)
+ return success
+
+ async def maybe_rotate_icons(self) -> None:
+ """
+ Call `rotate_icons` if the configured amount of time has passed since last rotation.
+
+ We offset the calculated time difference into the future to avoid off-by-a-little-bit errors. Because there
+ is work to be done before the timestamp is read and written, the next read will likely commence slightly
+ under 24 hours after the last write.
+ """
+ log.debug("Checking whether it's time for icons to rotate.")
- banner = self.banner.path if self.banner is not None else "Unavailable"
- info_embed.add_field(name="Banner", value=banner, inline=False)
+ last_rotation_timestamp = await self.cache_information.get("last_rotation_timestamp")
- icons = pretty_files(self.available_icons) or "Unavailable"
- info_embed.add_field(name="Available icons", value=icons, inline=False)
+ if last_rotation_timestamp is None: # Maiden case ~ never rotated.
+ await self.rotate_icons()
+ return
- # Only display cycle frequency if we're actually cycling
- if len(self.available_icons) > 1 and Branding.cycle_frequency:
- info_embed.set_footer(text=f"Icon cycle frequency: {Branding.cycle_frequency}")
+ last_rotation = Arrow.utcfromtimestamp(last_rotation_timestamp)
+ difference = (Arrow.utcnow() - last_rotation) + timedelta(minutes=5)
- return info_embed
+ log.trace(f"Icons last rotated at {last_rotation} (difference: {difference}).")
- async def _reset_remaining_icons(self) -> None:
- """Set `remaining_icons` to a shuffled copy of `available_icons`."""
- self.remaining_icons = random.sample(self.available_icons, k=len(self.available_icons))
+ if difference.days >= BrandingConfig.cycle_frequency:
+ await self.rotate_icons()
- async def _reset_days_since_cycle(self) -> None:
+ async def initiate_icon_rotation(self, available_icons: t.List[RemoteObject]) -> None:
"""
- Reset the `days_since_cycle` iterator based on configured frequency.
+ Set up a new icon rotation.
- If the current season only has 1 icon, or if `Branding.cycle_frequency` is falsey,
- the iterator will always yield None. This signals that the icon shouldn't be cycled.
+ This function should be called whenever available icons change. This is generally the case when we enter
+ a new event, but potentially also when the assets of an on-going event change. In such cases, a reset
+ of `cache_icons` is necessary, because it contains download URLs which may have gotten stale.
- Otherwise, it will yield ints in range [1, `Branding.cycle_frequency`] indefinitely.
- When the iterator yields a value equal to `Branding.cycle_frequency`, it is time to cycle.
+ This function does not upload a new icon!
"""
- if len(self.available_icons) > 1 and Branding.cycle_frequency:
- sequence = range(1, Branding.cycle_frequency + 1)
- else:
- sequence = [None]
+ log.debug("Initiating new icon rotation.")
+
+ await self.cache_icons.clear()
+
+ new_state = {icon.download_url: 0 for icon in available_icons}
+ await self.cache_icons.update(new_state)
+
+ log.trace(f"Icon rotation initiated for {len(new_state)} icons.")
- self.days_since_cycle = itertools.cycle(sequence)
+ await self.cache_information.set("icons_hash", compound_hash(available_icons))
- async def _get_files(self, path: str, include_dirs: bool = False) -> t.Dict[str, GitHubFile]:
+ async def send_info_embed(self, channel_id: int, *, is_notification: bool) -> None:
"""
- Get files at `path` in the branding repository.
+ Send the currently cached event description to `channel_id`.
- If `include_dirs` is False (default), only returns files at `path`.
- Otherwise, will return both files and directories. Never returns symlinks.
+ When `is_notification` holds, a short contextual message for the #changelog channel is added.
- Return dict mapping from filename to corresponding `GitHubFile` instance.
- This may return an empty dict if the response status is non-200,
- or if the target directory is empty.
+ We read event information from `cache_information`. The caller is therefore responsible for making
+ sure that the cache is up-to-date before calling this function.
"""
- url = f"{_constants.BRANDING_URL}/{path}"
- async with self.bot.http_session.get(
- url, headers=_constants.HEADERS, params=_constants.PARAMS
- ) as resp:
- # Short-circuit if we get non-200 response
- if resp.status != _constants.STATUS_OK:
- log.error(f"GitHub API returned non-200 response: {resp}")
- return {}
- directory = await resp.json() # Directory at `path`
+ log.debug(f"Sending event information event to channel: {channel_id} ({is_notification=}).")
- allowed_types = {"file", "dir"} if include_dirs else {"file"}
- return {
- file["name"]: GitHubFile(file["download_url"], file["path"], file["sha"])
- for file in directory
- if file["type"] in allowed_types
- }
+ await self.bot.wait_until_guild_available()
+ channel: t.Optional[discord.TextChannel] = self.bot.get_channel(channel_id)
+
+ if channel is None:
+ log.warning(f"Cannot send event information: channel {channel_id} not found!")
+ return
+
+ log.trace(f"Destination channel: #{channel.name}.")
+
+ description = await self.cache_information.get("event_description")
+ duration = await self.cache_information.get("event_duration")
+
+ if None in (description, duration):
+ content = None
+ embed = make_embed("No event in cache", "Is the daemon enabled?", success=False)
- async def refresh(self) -> bool:
+ else:
+ content = "Python Discord is entering a new event!" if is_notification else None
+ embed = discord.Embed(description=description[:2048], colour=discord.Colour.blurple())
+ embed.set_footer(text=duration[:2048])
+
+ await channel.send(content=content, embed=embed)
+
+ async def enter_event(self, event: Event) -> t.Tuple[bool, bool]:
"""
- Synchronize available assets with branding repository.
+ Apply `event` assets and update information cache.
+
+ We cache `event` information to ensure that we:
+ * Remember which event we're currently in across restarts
+ * Provide an on-demand informational embed without re-querying the branding repository
- If the current season is not the evergreen, and lacks at least one asset,
- we use the evergreen seasonal dir as fallback for missing assets.
+ An event change should always be handled via this function, as it ensures that the cache is populated.
- Finally, if neither the seasonal nor fallback branding directories contain
- an asset, it will simply be ignored.
+ The #changelog notification is omitted when `event` is fallback, or already applied.
- Return True if the branding has changed. This will be the case when we enter
- a new season, or when something changes in the current seasons's directory
- in the branding repository.
+ Return a 2-tuple indicating whether the banner, and the icon, were applied successfully.
"""
- old_branding = (self.banner, self.available_icons)
- seasonal_dir = await self._get_files(self.current_season.branding_path, include_dirs=True)
+ log.info(f"Entering event: '{event.path}'.")
- # Only make a call to the fallback directory if there is something to be gained
- branding_incomplete = any(
- asset not in seasonal_dir
- for asset in (_constants.FILE_BANNER, _constants.FILE_AVATAR, _constants.SERVER_ICONS)
- )
- if branding_incomplete and self.current_season is not _seasons.SeasonBase:
- fallback_dir = await self._get_files(
- _seasons.SeasonBase.branding_path, include_dirs=True
- )
- else:
- fallback_dir = {}
+ banner_success = await self.apply_banner(event.banner) # Only one asset ~ apply directly.
- # Resolve assets in this directory, None is a safe value
- self.banner = (
- seasonal_dir.get(_constants.FILE_BANNER)
- or fallback_dir.get(_constants.FILE_BANNER)
- )
+ await self.initiate_icon_rotation(event.icons) # Prepare a new rotation.
+ icon_success = await self.rotate_icons() # Apply an icon from the new rotation.
+
+ # This will only be False in the case of a manual same-event re-synchronisation.
+ event_changed = event.path != await self.cache_information.get("event_path")
- # Now resolve server icons by making a call to the proper sub-directory
- if _constants.SERVER_ICONS in seasonal_dir:
- icons_dir = await self._get_files(
- f"{self.current_season.branding_path}/{_constants.SERVER_ICONS}"
- )
- self.available_icons = list(icons_dir.values())
+ # Cache event identity to avoid re-entry in case of restart.
+ await self.cache_information.set("event_path", event.path)
- elif _constants.SERVER_ICONS in fallback_dir:
- icons_dir = await self._get_files(
- f"{_seasons.SeasonBase.branding_path}/{_constants.SERVER_ICONS}"
- )
- self.available_icons = list(icons_dir.values())
+ # Cache information shown in the 'about' embed.
+ await self.populate_cache_event_description(event)
+ # Notify guild of new event ~ this reads the information that we cached above.
+ if event_changed and not event.meta.is_fallback:
+ await self.send_info_embed(Channels.change_log, is_notification=True)
else:
- self.available_icons = [] # This should never be the case, but an empty list is a safe value
+ log.trace("Omitting #changelog notification. Event has not changed, or new event is fallback.")
- # GitHubFile instances carry a `sha` attr so this will pick up if a file changes
- branding_changed = old_branding != (self.banner, self.available_icons)
+ return banner_success, icon_success
- if branding_changed:
- log.info(f"New branding detected (season: {self.current_season.season_name})")
- await self._reset_remaining_icons()
- await self._reset_days_since_cycle()
+ async def synchronise(self) -> t.Tuple[bool, bool]:
+ """
+ Fetch the current event and delegate to `enter_event`.
- return branding_changed
+ This is a convenience function to force synchronisation via a command. It should generally only be used
+ in a recovery scenario. In the usual case, the daemon already has an `Event` instance and can pass it
+ to `enter_event` directly.
- async def cycle(self) -> bool:
+ Return a 2-tuple indicating whether the banner, and the icon, were applied successfully.
"""
- Apply the next-up server icon.
+ log.debug("Synchronise: fetching current event.")
- Returns True if an icon is available and successfully gets applied, False otherwise.
- """
- if not self.available_icons:
- log.info("Cannot cycle: no icons for this season")
- return False
+ current_event, available_events = await self.repository.get_current_event()
- if not self.remaining_icons:
- log.info("Reset & shuffle remaining icons")
- await self._reset_remaining_icons()
+ await self.populate_cache_events(available_events)
- next_up = self.remaining_icons.pop(0)
- success = await self.set_icon(next_up.download_url)
+ if current_event is None:
+ log.error("Failed to fetch event. Cannot synchronise!")
+ return False, False
- return success
+ return await self.enter_event(current_event)
- async def apply(self) -> t.List[str]:
+ async def populate_cache_events(self, events: t.List[Event]) -> None:
"""
- Apply current branding to the guild and bot.
+ Clear `cache_events` and re-populate with names and durations of `events`.
- This delegates to the bot instance to do all the work. We only provide download urls
- for available assets. Assets unavailable in the branding repo will be ignored.
+ For each event, we store its name and duration string. This is the information presented to users in the
+ calendar command. If a format change is needed, it has to be done here.
- Returns a list of names of all failed assets. An asset is considered failed
- if it isn't found in the branding repo, or if something goes wrong while the
- bot is trying to apply it.
-
- An empty list denotes that all assets have been applied successfully.
+ The cache does not store the fallback event, as it is not shown in the calendar.
"""
- report = {asset: False for asset in ("banner", "icon")}
+ log.debug("Populating events cache.")
- if self.banner is not None:
- report["banner"] = await self.set_banner(self.banner.download_url)
+ await self.cache_events.clear()
- report["icon"] = await self.cycle()
+ no_fallback = [event for event in events if not event.meta.is_fallback]
+ chronological_events = sorted(no_fallback, key=attrgetter("meta.start_date"))
- failed_assets = [asset for asset, succeeded in report.items() if not succeeded]
- return failed_assets
+ log.trace(f"Writing {len(chronological_events)} events (fallback omitted).")
- @commands.has_any_role(*MODERATION_ROLES)
- @commands.group(name="branding")
- async def branding_cmds(self, ctx: commands.Context) -> None:
- """Manual branding control."""
- if not ctx.invoked_subcommand:
- await ctx.send_help(ctx.command)
+ with contextlib.suppress(ValueError): # Cache raises when updated with an empty dict.
+ await self.cache_events.update({
+ extract_event_name(event): extract_event_duration(event)
+ for event in chronological_events
+ })
- @branding_cmds.command(name="list", aliases=["ls"])
- async def branding_list(self, ctx: commands.Context) -> None:
- """List all available seasons and branding sources."""
- embed = discord.Embed(title="Available seasons", colour=Colours.soft_green)
+ async def populate_cache_event_description(self, event: Event) -> None:
+ """
+ Cache `event` description & duration.
+
+ This should be called when entering a new event, and can be called periodically to ensure that the cache
+ holds fresh information in the case that the event remains the same, but its description changes.
- for season in _seasons.get_all_seasons():
- if season is _seasons.SeasonBase:
- active_when = "always"
- else:
- active_when = f"in {', '.join(str(m) for m in season.months)}"
+ The duration is stored formatted for the frontend. It is not intended to be used programmatically.
+ """
+ log.debug("Caching event description & duration.")
- description = (
- f"Active {active_when}\n"
- f"Branding: {season.branding_path}"
- )
- embed.add_field(name=season.season_name, value=description, inline=False)
+ await self.cache_information.set("event_description", event.meta.description)
+ await self.cache_information.set("event_duration", extract_event_duration(event))
- await ctx.send(embed=embed)
+ # endregion
+ # region: Daemon
- @branding_cmds.command(name="set")
- async def branding_set(self, ctx: commands.Context, *, season_name: t.Optional[str] = None) -> None:
+ async def maybe_start_daemon(self) -> None:
"""
- Manually set season, or reset to current if none given.
+ Start the daemon depending on cache state.
- Season search is a case-less comparison against both seasonal class name,
- and its `season_name` attr.
+ The daemon will only start if it has been explicitly enabled via a command.
+ """
+ log.debug("Checking whether daemon should start.")
- This only pre-loads the cog's internal state to the chosen season, but does not
- automatically apply the branding. As that is an expensive operation, the `apply`
- command must be called explicitly after this command finishes.
+ should_begin: t.Optional[bool] = await self.cache_information.get("daemon_active") # None if never set!
- This means that this command can be used to 'preview' a season gathering info
- about its available assets, without applying them to the guild.
+ if should_begin:
+ self.daemon_loop.start()
- If the daemon is running, it will automatically reset the season to current when
- it wakes up. The season set via this command can therefore remain 'detached' from
- what it should be - the daemon will make sure that it's set back properly.
+ def cog_unload(self) -> None:
"""
- if season_name is None:
- new_season = _seasons.get_current_season()
- else:
- new_season = _seasons.get_season(season_name)
- if new_season is None:
- raise _errors.BrandingError("No such season exists")
+ Cancel the daemon in case of cog unload.
- if self.current_season is new_season:
- raise _errors.BrandingError(f"Season {self.current_season.season_name} already active")
+ This is **not** done automatically! The daemon otherwise remains active in the background.
+ """
+ log.debug("Cog unload: cancelling daemon.")
- self.current_season = new_season
- await self.branding_refresh(ctx)
+ self.daemon_loop.cancel()
- @branding_cmds.command(name="info", aliases=["status"])
- async def branding_info(self, ctx: commands.Context) -> None:
+ async def daemon_main(self) -> None:
"""
- Show available assets for current season.
+ Synchronise guild & caches with branding repository.
+
+ Pull the currently active event from the branding repository and check whether it matches the currently
+ active event in the cache. If not, apply the new event.
- This can be used to confirm that assets have been resolved properly.
- When `apply` is used, it attempts to upload exactly the assets listed here.
+ However, it is also possible that an event's assets change as it's active. To account for such cases,
+ we check the banner & icons hashes against the currently cached values. If there is a mismatch, each
+ specific asset is re-applied.
"""
- await ctx.send(embed=await self._info_embed())
+ log.info("Daemon main: checking current event.")
- @branding_cmds.command(name="refresh")
- async def branding_refresh(self, ctx: commands.Context) -> None:
- """Sync currently available assets with branding repository."""
- async with ctx.typing():
- await self.refresh()
- await self.branding_info(ctx)
+ new_event, available_events = await self.repository.get_current_event()
+
+ await self.populate_cache_events(available_events)
+
+ if new_event is None:
+ log.warning("Daemon main: failed to get current event from branding repository, will do nothing.")
+ return
+
+ if new_event.path != await self.cache_information.get("event_path"):
+ log.debug("Daemon main: new event detected!")
+ await self.enter_event(new_event)
+ return
- @branding_cmds.command(name="apply")
- async def branding_apply(self, ctx: commands.Context) -> None:
+ await self.populate_cache_event_description(new_event) # Cache fresh frontend info in case of change.
+
+ log.trace("Daemon main: event has not changed, checking for change in assets.")
+
+ if new_event.banner.sha != await self.cache_information.get("banner_hash"):
+ log.debug("Daemon main: detected banner change.")
+ await self.apply_banner(new_event.banner)
+
+ if compound_hash(new_event.icons) != await self.cache_information.get("icons_hash"):
+ log.debug("Daemon main: detected icon change.")
+ await self.initiate_icon_rotation(new_event.icons)
+ await self.rotate_icons()
+ else:
+ await self.maybe_rotate_icons()
+
+ @tasks.loop(hours=24)
+ async def daemon_loop(self) -> None:
"""
- Apply current season's branding to the guild.
+ Call `daemon_main` every 24 hours.
- Use `info` to check which assets will be applied. Shows which assets have
- failed to be applied, if any.
+ The scheduler maintains an exact 24-hour frequency even if this coroutine takes time to complete. If the
+ coroutine is started at 00:01 and completes at 00:05, it will still be started at 00:01 the next day.
"""
- async with ctx.typing():
- failed_assets = await self.apply()
- if failed_assets:
- raise _errors.BrandingError(
- f"Failed to apply following assets: {', '.join(failed_assets)}"
- )
+ log.trace("Daemon loop: calling daemon main.")
- response = discord.Embed(description=f"All assets applied {Emojis.ok_hand}", colour=Colours.soft_green)
- await ctx.send(embed=response)
+ try:
+ await self.daemon_main()
+ except Exception:
+ log.exception("Daemon loop: failed with an unhandled exception!")
- @branding_cmds.command(name="cycle")
- async def branding_cycle(self, ctx: commands.Context) -> None:
+ @daemon_loop.before_loop
+ async def daemon_before(self) -> None:
"""
- Apply the next-up guild icon, if multiple are available.
+ Call `daemon_loop` immediately, then block the loop until the next-up UTC midnight.
- The order is random.
+ The first iteration is invoked directly such that synchronisation happens immediately after daemon start.
+ We then calculate the time until the next-up midnight and sleep before letting `daemon_loop` begin.
"""
- async with ctx.typing():
- success = await self.cycle()
- if not success:
- raise _errors.BrandingError("Failed to cycle icon")
+ log.trace("Daemon before: performing start-up iteration.")
+
+ await self.daemon_loop()
- response = discord.Embed(description=f"Success {Emojis.ok_hand}", colour=Colours.soft_green)
- await ctx.send(embed=response)
+ log.trace("Daemon before: calculating time to sleep before loop begins.")
+ now = Arrow.utcnow()
- @branding_cmds.group(name="daemon", aliases=["d", "task"])
- async def daemon_group(self, ctx: commands.Context) -> None:
- """Control the background daemon."""
+ # The actual midnight moment is offset into the future to prevent issues with imprecise sleep.
+ tomorrow = now.shift(days=1)
+ midnight = tomorrow.replace(hour=0, minute=1, second=0, microsecond=0)
+
+ sleep_secs = (midnight - now).total_seconds()
+ log.trace(f"Daemon before: sleeping {sleep_secs} seconds before next-up midnight: {midnight}.")
+
+ await asyncio.sleep(sleep_secs)
+
+ # endregion
+ # region: Command interface (branding)
+
+ @commands.group(name="branding")
+ async def branding_group(self, ctx: commands.Context) -> None:
+ """Control the branding cog."""
if not ctx.invoked_subcommand:
await ctx.send_help(ctx.command)
- @daemon_group.command(name="status")
- async def daemon_status(self, ctx: commands.Context) -> None:
- """Check whether daemon is currently active."""
- if self._daemon_running:
- remaining_time = (arrow.utcnow() + time_until_midnight()).humanize()
- response = discord.Embed(description=f"Daemon running {Emojis.ok_hand}", colour=Colours.soft_green)
- response.set_footer(text=f"Next refresh {remaining_time}")
- else:
- response = discord.Embed(description="Daemon not running", colour=Colours.soft_red)
+ @branding_group.command(name="about", aliases=("current", "event"))
+ async def branding_about_cmd(self, ctx: commands.Context) -> None:
+ """Show the current event's description and duration."""
+ await self.send_info_embed(ctx.channel.id, is_notification=False)
+
+ @commands.has_any_role(*MODERATION_ROLES)
+ @branding_group.command(name="sync")
+ async def branding_sync_cmd(self, ctx: commands.Context) -> None:
+ """
+ Force branding synchronisation.
- await ctx.send(embed=response)
+ Show which assets have failed to synchronise, if any.
+ """
+ async with ctx.typing():
+ banner_success, icon_success = await self.synchronise()
- @daemon_group.command(name="start")
- async def daemon_start(self, ctx: commands.Context) -> None:
- """If the daemon isn't running, start it."""
- if self._daemon_running:
- raise _errors.BrandingError("Daemon already running!")
+ failed_assets = ", ".join(
+ name
+ for name, status in [("banner", banner_success), ("icon", icon_success)]
+ if status is False
+ )
- self.daemon = self.bot.loop.create_task(self._daemon_func())
- await self.branding_configuration.set("daemon_active", True)
+ if failed_assets:
+ resp = make_embed("Synchronisation unsuccessful", f"Failed to apply: {failed_assets}.", success=False)
+ resp.set_footer(text="Check log for details.")
+ else:
+ resp = make_embed("Synchronisation successful", "Assets have been applied.", success=True)
- response = discord.Embed(description=f"Daemon started {Emojis.ok_hand}", colour=Colours.soft_green)
- await ctx.send(embed=response)
+ await ctx.send(embed=resp)
- @daemon_group.command(name="stop")
- async def daemon_stop(self, ctx: commands.Context) -> None:
- """If the daemon is running, stop it."""
- if not self._daemon_running:
- raise _errors.BrandingError("Daemon not running!")
+ # endregion
+ # region: Command interface (branding calendar)
- self.daemon.cancel()
- await self.branding_configuration.set("daemon_active", False)
+ @branding_group.group(name="calendar", aliases=("schedule", "events"))
+ async def branding_calendar_group(self, ctx: commands.Context) -> None:
+ """
+ Show the current event calendar.
- response = discord.Embed(description=f"Daemon stopped {Emojis.ok_hand}", colour=Colours.soft_green)
- await ctx.send(embed=response)
+ We draw event information from `cache_events` and use each key-value pair to create a field in the response
+ embed. As such, we do not need to query the API to get event information. The cache is automatically
+ re-populated by the daemon whenever it makes a request. A moderator+ can also explicitly request a cache
+ refresh using the 'refresh' subcommand.
- async def _fetch_image(self, url: str) -> bytes:
- """Retrieve and read image from `url`."""
- log.debug(f"Getting image from: {url}")
- async with self.bot.http_session.get(url) as resp:
- return await resp.read()
+ Due to Discord limitations, we only show up to 25 events. This is entirely sufficient at the time of writing.
+ In the case that we find ourselves with more than 25 events, a warning log will alert core devs.
- async def _apply_asset(self, target: discord.Guild, asset: _constants.AssetType, url: str) -> bool:
+ In the future, we may be interested in a field-paginating solution.
"""
- Internal method for applying media assets to the guild.
+ if ctx.invoked_subcommand:
+ # If you're wondering why this works: when the 'refresh' subcommand eventually re-invokes
+ # this group, the attribute will be automatically set to None by the framework.
+ return
+
+ available_events = await self.cache_events.to_dict()
+ log.trace(f"Found {len(available_events)} cached events available for calendar view.")
+
+ if not available_events:
+ resp = make_embed("No events found!", "Cache may be empty, try `branding calendar refresh`.", success=False)
+ await ctx.send(embed=resp)
+ return
+
+ embed = discord.Embed(title="Current event calendar", colour=discord.Colour.blurple())
- This shouldn't be called directly. The purpose of this method is mainly generic
- error handling to reduce needless code repetition.
+ # Because Discord embeds can only contain up to 25 fields, we only show the first 25.
+ first_25 = list(available_events.items())[:25]
- Return True if upload was successful, False otherwise.
+ if len(first_25) != len(available_events): # Alert core devs that a paginating solution is now necessary.
+ log.warning(f"There are {len(available_events)} events, but the calendar view can only display 25.")
+
+ for name, duration in first_25:
+ embed.add_field(name=name[:256], value=duration[:1024])
+
+ embed.set_footer(text="Otherwise, the fallback season is used.")
+
+ await ctx.send(embed=embed)
+
+ @commands.has_any_role(*MODERATION_ROLES)
+ @branding_calendar_group.command(name="refresh")
+ async def branding_calendar_refresh_cmd(self, ctx: commands.Context) -> None:
"""
- log.info(f"Attempting to set {asset.name}: {url}")
+ Refresh event cache and show current event calendar.
- kwargs = {asset.value: await self._fetch_image(url)}
- try:
- async with async_timeout.timeout(5):
- await target.edit(**kwargs)
+ Supplementary subcommand allowing force-refreshing the event cache. Implemented as a subcommand because
+ unlike the supergroup, it requires moderator privileges.
+ """
+ log.info("Performing command-requested event cache refresh.")
- except asyncio.TimeoutError:
- log.info("Asset upload timed out")
- return False
+ async with ctx.typing():
+ available_events = await self.repository.get_events()
+ await self.populate_cache_events(available_events)
- except discord.HTTPException as discord_error:
- log.exception("Asset upload failed", exc_info=discord_error)
- return False
+ await ctx.invoke(self.branding_calendar_group)
+
+ # endregion
+ # region: Command interface (branding daemon)
+
+ @commands.has_any_role(*MODERATION_ROLES)
+ @branding_group.group(name="daemon", aliases=("d",))
+ async def branding_daemon_group(self, ctx: commands.Context) -> None:
+ """Control the branding cog's daemon."""
+ if not ctx.invoked_subcommand:
+ await ctx.send_help(ctx.command)
+
+ @branding_daemon_group.command(name="enable", aliases=("start", "on"))
+ async def branding_daemon_enable_cmd(self, ctx: commands.Context) -> None:
+ """Enable the branding daemon."""
+ await self.cache_information.set("daemon_active", True)
+ if self.daemon_loop.is_running():
+ resp = make_embed("Daemon is already enabled!", "", success=False)
else:
- log.info("Asset successfully applied")
- return True
+ self.daemon_loop.start()
+ resp = make_embed("Daemon enabled!", "It will now automatically awaken on start-up.", success=True)
- @_decorators.mock_in_debug(return_value=True)
- async def set_banner(self, url: str) -> bool:
- """Set the guild's banner to image at `url`."""
- guild = self.bot.get_guild(Guild.id)
- if guild is None:
- log.info("Failed to get guild instance, aborting asset upload")
- return False
+ await ctx.send(embed=resp)
- return await self._apply_asset(guild, _constants.AssetType.BANNER, url)
+ @branding_daemon_group.command(name="disable", aliases=("stop", "off"))
+ async def branding_daemon_disable_cmd(self, ctx: commands.Context) -> None:
+ """Disable the branding daemon."""
+ await self.cache_information.set("daemon_active", False)
- @_decorators.mock_in_debug(return_value=True)
- async def set_icon(self, url: str) -> bool:
- """Sets the guild's icon to image at `url`."""
- guild = self.bot.get_guild(Guild.id)
- if guild is None:
- log.info("Failed to get guild instance, aborting asset upload")
- return False
+ if self.daemon_loop.is_running():
+ self.daemon_loop.cancel()
+ resp = make_embed("Daemon disabled!", "It will not awaken on start-up.", success=True)
+ else:
+ resp = make_embed("Daemon is already disabled!", "", success=False)
- return await self._apply_asset(guild, _constants.AssetType.SERVER_ICON, url)
+ await ctx.send(embed=resp)
- def cog_unload(self) -> None:
- """Cancels startup and daemon task."""
- self._startup_task.cancel()
- if self.daemon is not None:
- self.daemon.cancel()
+ @branding_daemon_group.command(name="status")
+ async def branding_daemon_status_cmd(self, ctx: commands.Context) -> None:
+ """Check whether the daemon is currently enabled."""
+ if self.daemon_loop.is_running():
+ resp = make_embed("Daemon is enabled", "Use `branding daemon disable` to stop.", success=True)
+ else:
+ resp = make_embed("Daemon is disabled", "Use `branding daemon enable` to start.", success=False)
+
+ await ctx.send(embed=resp)
+
+ # endregion
diff --git a/bot/exts/backend/branding/_constants.py b/bot/exts/backend/branding/_constants.py
deleted file mode 100644
index dbc7615f2..000000000
--- a/bot/exts/backend/branding/_constants.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from enum import Enum, IntEnum
-
-from bot.constants import Keys
-
-
-class Month(IntEnum):
- """All month constants for seasons."""
-
- JANUARY = 1
- FEBRUARY = 2
- MARCH = 3
- APRIL = 4
- MAY = 5
- JUNE = 6
- JULY = 7
- AUGUST = 8
- SEPTEMBER = 9
- OCTOBER = 10
- NOVEMBER = 11
- DECEMBER = 12
-
- def __str__(self) -> str:
- return self.name.title()
-
-
-class AssetType(Enum):
- """
- Discord media assets.
-
- The values match exactly the kwarg keys that can be passed to `Guild.edit`.
- """
-
- BANNER = "banner"
- SERVER_ICON = "icon"
-
-
-STATUS_OK = 200 # HTTP status code
-
-FILE_BANNER = "banner.png"
-FILE_AVATAR = "avatar.png"
-SERVER_ICONS = "server_icons"
-
-BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents"
-
-PARAMS = {"ref": "master"} # Target branch
-HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3
-
-# A GitHub token is not necessary for the cog to operate,
-# unauthorized requests are however limited to 60 per hour
-if Keys.github:
- HEADERS["Authorization"] = f"token {Keys.github}"
diff --git a/bot/exts/backend/branding/_decorators.py b/bot/exts/backend/branding/_decorators.py
deleted file mode 100644
index 6a1e7e869..000000000
--- a/bot/exts/backend/branding/_decorators.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import functools
-import logging
-import typing as t
-
-from bot.constants import DEBUG_MODE
-
-log = logging.getLogger(__name__)
-
-
-def mock_in_debug(return_value: t.Any) -> t.Callable:
- """
- Short-circuit function execution if in debug mode and return `return_value`.
-
- The original function name, and the incoming args and kwargs are DEBUG level logged
- upon each call. This is useful for expensive operations, i.e. media asset uploads
- that are prone to rate-limits but need to be tested extensively.
- """
- def decorator(func: t.Callable) -> t.Callable:
- @functools.wraps(func)
- async def wrapped(*args, **kwargs) -> t.Any:
- """Short-circuit and log if in debug mode."""
- if DEBUG_MODE:
- log.debug(f"Function {func.__name__} called with args: {args}, kwargs: {kwargs}")
- return return_value
- return await func(*args, **kwargs)
- return wrapped
- return decorator
diff --git a/bot/exts/backend/branding/_errors.py b/bot/exts/backend/branding/_errors.py
deleted file mode 100644
index 7cd271af3..000000000
--- a/bot/exts/backend/branding/_errors.py
+++ /dev/null
@@ -1,2 +0,0 @@
-class BrandingError(Exception):
- """Exception raised by the BrandingManager cog."""
diff --git a/bot/exts/backend/branding/_repository.py b/bot/exts/backend/branding/_repository.py
new file mode 100644
index 000000000..7b09d4641
--- /dev/null
+++ b/bot/exts/backend/branding/_repository.py
@@ -0,0 +1,240 @@
+import logging
+import typing as t
+from datetime import date, datetime
+
+import frontmatter
+
+from bot.bot import Bot
+from bot.constants import Keys
+from bot.errors import BrandingMisconfiguration
+
+# Base URL for requests into the branding repository.
+BRANDING_URL = "https://api.github.com/repos/python-discord/branding/contents"
+
+PARAMS = {"ref": "main"} # Target branch.
+HEADERS = {"Accept": "application/vnd.github.v3+json"} # Ensure we use API v3.
+
+# A GitHub token is not necessary. However, unauthorized requests are limited to 60 per hour.
+if Keys.github:
+ HEADERS["Authorization"] = f"token {Keys.github}"
+
+# Since event periods are year-agnostic, we parse them into `datetime` objects with a manually inserted year.
+# Please note that this is intentionally a leap year to allow Feb 29 to be valid.
+ARBITRARY_YEAR = 2020
+
+# Format used to parse date strings after we inject `ARBITRARY_YEAR` at the end.
+DATE_FMT = "%B %d %Y" # Ex: July 10 2020
+
+log = logging.getLogger(__name__)
+
+
+class RemoteObject:
+ """
+ Remote file or directory on GitHub.
+
+ The annotations match keys in the response JSON that we're interested in.
+ """
+
+ sha: str # Hash helps us detect asset change.
+ name: str # Filename.
+ path: str # Path from repo root.
+ type: str # Either 'file' or 'dir'.
+ download_url: t.Optional[str] # If type is 'dir', this is None!
+
+ def __init__(self, dictionary: t.Dict[str, t.Any]) -> None:
+ """Initialize by grabbing annotated attributes from `dictionary`."""
+ missing_keys = self.__annotations__.keys() - dictionary.keys()
+ if missing_keys:
+ raise KeyError(f"Fetched object lacks expected keys: {missing_keys}")
+ for annotation in self.__annotations__:
+ setattr(self, annotation, dictionary[annotation])
+
+
+class MetaFile(t.NamedTuple):
+ """Attributes defined in a 'meta.md' file."""
+
+ is_fallback: bool
+ start_date: t.Optional[date]
+ end_date: t.Optional[date]
+ description: str # Markdown event description.
+
+
+class Event(t.NamedTuple):
+ """Event defined in the branding repository."""
+
+ path: str # Path from repo root where event lives. This is the event's identity.
+ meta: MetaFile
+ banner: RemoteObject
+ icons: t.List[RemoteObject]
+
+ def __str__(self) -> str:
+ return f"<Event at '{self.path}'>"
+
+
+class BrandingRepository:
+ """
+ Branding repository abstraction.
+
+ This class represents the branding repository's main branch and exposes available events and assets
+ as objects. It performs the necessary amount of validation to ensure that a misconfigured event
+ isn't returned. Such events are simply ignored, and will be substituted with the fallback event,
+ if available. Warning logs will inform core developers if a misconfigured event is encountered.
+
+ Colliding events cause no special behaviour. In such cases, the first found active event is returned.
+ We work with the assumption that the branding repository checks for such conflicts and prevents them
+ from reaching the main branch.
+
+ This class keeps no internal state. All `get_current_event` calls will result in GitHub API requests.
+ The caller is therefore responsible for being responsible and caching information to prevent API abuse.
+
+ Requests are made using the HTTP session looked up on the bot instance.
+ """
+
+ def __init__(self, bot: Bot) -> None:
+ self.bot = bot
+
+ async def fetch_directory(self, path: str, types: t.Container[str] = ("file", "dir")) -> t.Dict[str, RemoteObject]:
+ """
+ Fetch directory found at `path` in the branding repository.
+
+ Raise an exception if the request fails, or if the response lacks the expected keys.
+
+ Passing custom `types` allows getting only files or directories. By default, both are included.
+ """
+ full_url = f"{BRANDING_URL}/{path}"
+ log.debug(f"Fetching directory from branding repository: '{full_url}'.")
+
+ async with self.bot.http_session.get(full_url, params=PARAMS, headers=HEADERS) as response:
+ if response.status != 200:
+ raise RuntimeError(f"Failed to fetch directory due to status: {response.status}")
+
+ log.debug("Fetch successful, reading JSON response.")
+ json_directory = await response.json()
+
+ return {file["name"]: RemoteObject(file) for file in json_directory if file["type"] in types}
+
+ async def fetch_file(self, download_url: str) -> bytes:
+ """
+ Fetch file as bytes from `download_url`.
+
+ Raise an exception if the request does not succeed.
+ """
+ log.debug(f"Fetching file from branding repository: '{download_url}'.")
+
+ async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response:
+ if response.status != 200:
+ raise RuntimeError(f"Failed to fetch file due to status: {response.status}")
+
+ log.debug("Fetch successful, reading payload.")
+ return await response.read()
+
+ def parse_meta_file(self, raw_file: bytes) -> MetaFile:
+ """
+ Parse a 'meta.md' file from raw bytes.
+
+ The caller is responsible for handling errors caused by misconfiguration.
+ """
+ attrs, description = frontmatter.parse(raw_file, encoding="UTF-8")
+
+ if not description:
+ raise BrandingMisconfiguration("No description found in 'meta.md'!")
+
+ if attrs.get("fallback", False):
+ return MetaFile(is_fallback=True, start_date=None, end_date=None, description=description)
+
+ start_date_raw = attrs.get("start_date")
+ end_date_raw = attrs.get("end_date")
+
+ if None in (start_date_raw, end_date_raw):
+ raise BrandingMisconfiguration("Non-fallback event doesn't have start and end dates defined!")
+
+ # We extend the configured month & day with an arbitrary leap year, allowing a datetime object to exist.
+ # This may raise errors if misconfigured. We let the caller handle such cases.
+ start_date = datetime.strptime(f"{start_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date()
+ end_date = datetime.strptime(f"{end_date_raw} {ARBITRARY_YEAR}", DATE_FMT).date()
+
+ return MetaFile(is_fallback=False, start_date=start_date, end_date=end_date, description=description)
+
+ async def construct_event(self, directory: RemoteObject) -> Event:
+ """
+ Construct an `Event` instance from an event `directory`.
+
+ The caller is responsible for handling errors caused by misconfiguration.
+ """
+ contents = await self.fetch_directory(directory.path)
+
+ missing_assets = {"meta.md", "banner.png", "server_icons"} - contents.keys()
+
+ if missing_assets:
+ raise BrandingMisconfiguration(f"Directory is missing following assets: {missing_assets}")
+
+ server_icons = await self.fetch_directory(contents["server_icons"].path, types=("file",))
+
+ if len(server_icons) == 0:
+ raise BrandingMisconfiguration("Found no server icons!")
+
+ meta_bytes = await self.fetch_file(contents["meta.md"].download_url)
+
+ meta_file = self.parse_meta_file(meta_bytes)
+
+ return Event(directory.path, meta_file, contents["banner.png"], list(server_icons.values()))
+
+ async def get_events(self) -> t.List[Event]:
+ """
+ Discover available events in the branding repository.
+
+ Misconfigured events are skipped. May return an empty list in the catastrophic case.
+ """
+ log.debug("Discovering events in branding repository.")
+
+ try:
+ event_directories = await self.fetch_directory("events", types=("dir",)) # Skip files.
+ except Exception:
+ log.exception("Failed to fetch 'events' directory.")
+ return []
+
+ instances: t.List[Event] = []
+
+ for event_directory in event_directories.values():
+ log.trace(f"Attempting to construct event from directory: '{event_directory.path}'.")
+ try:
+ instance = await self.construct_event(event_directory)
+ except Exception as exc:
+ log.warning(f"Could not construct event '{event_directory.path}'.", exc_info=exc)
+ else:
+ instances.append(instance)
+
+ return instances
+
+ async def get_current_event(self) -> t.Tuple[t.Optional[Event], t.List[Event]]:
+ """
+ Get the currently active event, or the fallback event.
+
+ The second return value is a list of all available events. The caller may discard it, if not needed.
+ Returning all events alongside the current one prevents having to query the API twice in some cases.
+
+ The current event may be None in the case that no event is active, and no fallback event is found.
+ """
+ utc_now = datetime.utcnow()
+ log.debug(f"Finding active event for: {utc_now}.")
+
+ # Construct an object in the arbitrary year for the purpose of comparison.
+ lookup_now = date(year=ARBITRARY_YEAR, month=utc_now.month, day=utc_now.day)
+ log.trace(f"Lookup object in arbitrary year: {lookup_now}.")
+
+ available_events = await self.get_events()
+ log.trace(f"Found {len(available_events)} available events.")
+
+ for event in available_events:
+ meta = event.meta
+ if not meta.is_fallback and (meta.start_date <= lookup_now <= meta.end_date):
+ return event, available_events
+
+ log.trace("No active event found. Looking for fallback event.")
+
+ for event in available_events:
+ if event.meta.is_fallback:
+ return event, available_events
+
+ log.warning("No event is currently active and no fallback event was found!")
+ return None, available_events
diff --git a/bot/exts/backend/branding/_seasons.py b/bot/exts/backend/branding/_seasons.py
deleted file mode 100644
index 5f6256b30..000000000
--- a/bot/exts/backend/branding/_seasons.py
+++ /dev/null
@@ -1,175 +0,0 @@
-import logging
-import typing as t
-from datetime import datetime
-
-from bot.constants import Colours
-from bot.exts.backend.branding._constants import Month
-from bot.exts.backend.branding._errors import BrandingError
-
-log = logging.getLogger(__name__)
-
-
-class SeasonBase:
- """
- Base for Seasonal classes.
-
- This serves as the off-season fallback for when no specific
- seasons are active.
-
- Seasons are 'registered' simply by inheriting from `SeasonBase`.
- We discover them by calling `__subclasses__`.
- """
-
- season_name: str = "Evergreen"
-
- colour: str = Colours.soft_green
- description: str = "The default season!"
-
- branding_path: str = "seasonal/evergreen"
-
- months: t.Set[Month] = set(Month)
-
-
-class Christmas(SeasonBase):
- """Branding for December."""
-
- season_name = "Festive season"
-
- colour = Colours.soft_red
- description = (
- "The time is here to get into the festive spirit! No matter who you are, where you are, "
- "or what beliefs you may follow, we hope every one of you enjoy this festive season!"
- )
-
- branding_path = "seasonal/christmas"
-
- months = {Month.DECEMBER}
-
-
-class Easter(SeasonBase):
- """Branding for April."""
-
- season_name = "Easter"
-
- colour = Colours.bright_green
- description = (
- "Bunny here, bunny there, bunny everywhere! Here at Python Discord, we celebrate "
- "our version of Easter during the entire month of April."
- )
-
- branding_path = "seasonal/easter"
-
- months = {Month.APRIL}
-
-
-class Halloween(SeasonBase):
- """Branding for October."""
-
- season_name = "Halloween"
-
- colour = Colours.orange
- description = "Trick or treat?!"
-
- branding_path = "seasonal/halloween"
-
- months = {Month.OCTOBER}
-
-
-class Pride(SeasonBase):
- """Branding for June."""
-
- season_name = "Pride"
-
- colour = Colours.pink
- description = (
- "The month of June is a special month for us at Python Discord. It is very important to us "
- "that everyone feels welcome here, no matter their origin, identity or sexuality. During the "
- "month of June, while some of you are participating in Pride festivals across the world, "
- "we will be celebrating individuality and commemorating the history and challenges "
- "of the LGBTQ+ community with a Pride event of our own!"
- )
-
- branding_path = "seasonal/pride"
-
- months = {Month.JUNE}
-
-
-class Valentines(SeasonBase):
- """Branding for February."""
-
- season_name = "Valentines"
-
- colour = Colours.pink
- description = "Love is in the air!"
-
- branding_path = "seasonal/valentines"
-
- months = {Month.FEBRUARY}
-
-
-class Wildcard(SeasonBase):
- """Branding for August."""
-
- season_name = "Wildcard"
-
- colour = Colours.purple
- description = "A season full of surprises!"
-
- months = {Month.AUGUST}
-
-
-def get_all_seasons() -> t.List[t.Type[SeasonBase]]:
- """Give all available season classes."""
- return [SeasonBase] + SeasonBase.__subclasses__()
-
-
-def get_current_season() -> t.Type[SeasonBase]:
- """Give active season, based on current UTC month."""
- current_month = Month(datetime.utcnow().month)
-
- active_seasons = tuple(
- season
- for season in SeasonBase.__subclasses__()
- if current_month in season.months
- )
-
- if not active_seasons:
- return SeasonBase
-
- return active_seasons[0]
-
-
-def get_season(name: str) -> t.Optional[t.Type[SeasonBase]]:
- """
- Give season such that its class name or its `season_name` attr match `name` (caseless).
-
- If no such season exists, return None.
- """
- name = name.casefold()
-
- for season in get_all_seasons():
- matches = (season.__name__.casefold(), season.season_name.casefold())
-
- if name in matches:
- return season
-
-
-def _validate_season_overlap() -> None:
- """
- Raise BrandingError if there are any colliding seasons.
-
- This serves as a local test to ensure that seasons haven't been misconfigured.
- """
- month_to_season = {}
-
- for season in SeasonBase.__subclasses__():
- for month in season.months:
- colliding_season = month_to_season.get(month)
-
- if colliding_season:
- raise BrandingError(f"Season {season} collides with {colliding_season} in {month.name}")
- else:
- month_to_season[month] = season
-
-
-_validate_season_overlap()
diff --git a/bot/exts/backend/error_handler.py b/bot/exts/backend/error_handler.py
index d2cce5558..da0e94a7e 100644
--- a/bot/exts/backend/error_handler.py
+++ b/bot/exts/backend/error_handler.py
@@ -1,7 +1,6 @@
import contextlib
import difflib
import logging
-import random
import typing as t
from discord import Embed
@@ -10,11 +9,10 @@ from sentry_sdk import push_scope
from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Colours, ERROR_REPLIES, Icons, MODERATION_ROLES
+from bot.constants import Colours, Icons, MODERATION_ROLES
from bot.converters import TagNameConverter
from bot.errors import InvalidInfractedUser, LockedResourceError
-from bot.exts.backend.branding._errors import BrandingError
-from bot.utils.checks import InWhitelistCheckFailure
+from bot.utils.checks import ContextCheckFailure
log = logging.getLogger(__name__)
@@ -79,9 +77,6 @@ class ErrorHandler(Cog):
await self.handle_api_error(ctx, e.original)
elif isinstance(e.original, LockedResourceError):
await ctx.send(f"{e.original} Please wait for it to finish and try again later.")
- elif isinstance(e.original, BrandingError):
- await ctx.send(embed=self._get_error_embed(random.choice(ERROR_REPLIES), str(e.original)))
- return
elif isinstance(e.original, InvalidInfractedUser):
await ctx.send(f"Cannot infract that user. {e.original.reason}")
else:
@@ -239,10 +234,12 @@ class ErrorHandler(Cog):
elif isinstance(e, errors.BadUnionArgument):
embed = self._get_error_embed("Bad argument", f"{e}\n{e.errors[-1]}")
await ctx.send(embed=embed)
+ await prepared_help_command
self.bot.stats.incr("errors.bad_union_argument")
elif isinstance(e, errors.ArgumentParsingError):
embed = self._get_error_embed("Argument parsing error", str(e))
await ctx.send(embed=embed)
+ prepared_help_command.close()
self.bot.stats.incr("errors.argument_parsing_error")
else:
embed = self._get_error_embed(
@@ -277,7 +274,7 @@ class ErrorHandler(Cog):
await ctx.send(
"Sorry, it looks like I don't have the permissions or roles I need to do that."
)
- elif isinstance(e, (InWhitelistCheckFailure, errors.NoPrivateMessage)):
+ elif isinstance(e, (ContextCheckFailure, errors.NoPrivateMessage)):
ctx.bot.stats.incr("errors.wrong_channel_or_dm_error")
await ctx.send(e)
diff --git a/bot/exts/backend/logging.py b/bot/exts/backend/logging.py
index 94fa2b139..823f14ea4 100644
--- a/bot/exts/backend/logging.py
+++ b/bot/exts/backend/logging.py
@@ -29,7 +29,7 @@ class Logging(Cog):
url="https://github.com/python-discord/bot",
icon_url=(
"https://raw.githubusercontent.com/"
- "python-discord/branding/master/logos/logo_circle/logo_circle_large.png"
+ "python-discord/branding/main/logos/logo_circle/logo_circle_large.png"
)
)
diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py
index af8528a68..7555e25a2 100644
--- a/bot/exts/filters/antispam.py
+++ b/bot/exts/filters/antispam.py
@@ -3,7 +3,7 @@ import logging
from collections.abc import Mapping
from dataclasses import dataclass, field
from datetime import datetime, timedelta
-from operator import itemgetter
+from operator import attrgetter, itemgetter
from typing import Dict, Iterable, List, Set
from discord import Colour, Member, Message, NotFound, Object, TextChannel
@@ -18,6 +18,7 @@ from bot.constants import (
)
from bot.converters import Duration
from bot.exts.moderation.modlog import ModLog
+from bot.utils import lock, scheduling
from bot.utils.messages import format_user, send_attachments
@@ -114,7 +115,7 @@ class AntiSpam(Cog):
self.message_deletion_queue = dict()
- self.bot.loop.create_task(self.alert_on_validation_error())
+ self.bot.loop.create_task(self.alert_on_validation_error(), name="AntiSpam.alert_on_validation_error")
@property
def mod_log(self) -> ModLog:
@@ -191,7 +192,10 @@ class AntiSpam(Cog):
if channel.id not in self.message_deletion_queue:
log.trace(f"Creating queue for channel `{channel.id}`")
self.message_deletion_queue[message.channel.id] = DeletionContext(channel)
- self.bot.loop.create_task(self._process_deletion_context(message.channel.id))
+ scheduling.create_task(
+ self._process_deletion_context(message.channel.id),
+ name=f"AntiSpam._process_deletion_context({message.channel.id})"
+ )
# Add the relevant of this trigger to the Deletion Context
await self.message_deletion_queue[message.channel.id].add(
@@ -201,16 +205,15 @@ class AntiSpam(Cog):
)
for member in members:
-
- # Fire it off as a background task to ensure
- # that the sleep doesn't block further tasks
- self.bot.loop.create_task(
- self.punish(message, member, full_reason)
+ scheduling.create_task(
+ self.punish(message, member, full_reason),
+ name=f"AntiSpam.punish(message={message.id}, member={member.id}, rule={rule_name})"
)
await self.maybe_delete_messages(channel, relevant_messages)
break
+ @lock.lock_arg("antispam.punish", "member", attrgetter("id"))
async def punish(self, msg: Message, member: Member, reason: str) -> None:
"""Punishes the given member for triggering an antispam rule."""
if not any(role.id == self.muted_role.id for role in member.roles):
diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py
index 3527bf8bb..464732453 100644
--- a/bot/exts/filters/filtering.py
+++ b/bot/exts/filters/filtering.py
@@ -2,10 +2,11 @@ import asyncio
import logging
import re
from datetime import datetime, timedelta
-from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Union
+from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
import dateutil
import discord.errors
+import regex
from async_rediscache import RedisCache
from dateutil.relativedelta import relativedelta
from discord import Colour, HTTPException, Member, Message, NotFound, TextChannel
@@ -34,7 +35,11 @@ CODE_BLOCK_RE = re.compile(
EVERYONE_PING_RE = re.compile(rf"@everyone|<@&{Guild.id}>|@here")
SPOILER_RE = re.compile(r"(\|\|.+?\|\|)", re.DOTALL)
URL_RE = re.compile(r"(https?://[^\s]+)", flags=re.IGNORECASE)
-ZALGO_RE = re.compile(r"[\u0300-\u036F\u0489]")
+
+# Exclude variation selectors from zalgo because they're actually invisible.
+VARIATION_SELECTORS = r"\uFE00-\uFE0F\U000E0100-\U000E01EF"
+INVISIBLE_RE = regex.compile(rf"[{VARIATION_SELECTORS}\p{{UNASSIGNED}}\p{{FORMAT}}\p{{CONTROL}}--\s]", regex.V1)
+ZALGO_RE = regex.compile(rf"[\p{{NONSPACING MARK}}\p{{ENCLOSING MARK}}--[{VARIATION_SELECTORS}]]", regex.V1)
# Other constants.
DAYS_BETWEEN_ALERTS = 3
@@ -137,6 +142,10 @@ class Filtering(Cog):
"""Fetch items from the filter_list_cache."""
return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"].keys()
+ def _get_filterlist_value(self, list_type: str, value: Any, *, allowed: bool) -> dict:
+ """Fetch one specific value from filter_list_cache."""
+ return self.bot.filter_list_cache[f"{list_type.upper()}.{allowed}"][value]
+
@staticmethod
def _expand_spoilers(text: str) -> str:
"""Return a string containing all interpretations of a spoilered message."""
@@ -174,6 +183,7 @@ class Filtering(Cog):
def get_name_matches(self, name: str) -> List[re.Match]:
"""Check bad words from passed string (name). Return list of matches."""
+ name = self.clean_input(name)
matches = []
watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False)
for pattern in watchlist_patterns:
@@ -236,7 +246,13 @@ class Filtering(Cog):
# We also do not need to worry about filters that take the full message,
# since all we have is an arbitrary string.
if _filter["enabled"] and _filter["content_only"]:
- match = await _filter["function"](result)
+ filter_result = await _filter["function"](result)
+ reason = None
+
+ if isinstance(filter_result, tuple):
+ match, reason = filter_result
+ else:
+ match = filter_result
if match:
# If this is a filter (not a watchlist), we set the variable so we know
@@ -245,7 +261,7 @@ class Filtering(Cog):
filter_triggered = True
stats = self._add_stats(filter_name, match, result)
- await self._send_log(filter_name, _filter, msg, stats, is_eval=True)
+ await self._send_log(filter_name, _filter, msg, stats, reason, is_eval=True)
break # We don't want multiple filters to trigger
@@ -267,9 +283,17 @@ class Filtering(Cog):
# Does the filter only need the message content or the full message?
if _filter["content_only"]:
- match = await _filter["function"](msg.content)
+ payload = msg.content
+ else:
+ payload = msg
+
+ result = await _filter["function"](payload)
+ reason = None
+
+ if isinstance(result, tuple):
+ match, reason = result
else:
- match = await _filter["function"](msg)
+ match = result
if match:
is_private = msg.channel.type is discord.ChannelType.private
@@ -316,7 +340,7 @@ class Filtering(Cog):
log.trace(f"Offensive message {msg.id} will be deleted on {delete_date}")
stats = self._add_stats(filter_name, match, msg.content)
- await self._send_log(filter_name, _filter, msg, stats)
+ await self._send_log(filter_name, _filter, msg, stats, reason)
break # We don't want multiple filters to trigger
@@ -326,6 +350,7 @@ class Filtering(Cog):
_filter: Dict[str, Any],
msg: discord.Message,
stats: Stats,
+ reason: Optional[str] = None,
*,
is_eval: bool = False,
) -> None:
@@ -339,6 +364,7 @@ class Filtering(Cog):
ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True)
eval_msg = "using !eval " if is_eval else ""
+ footer = f"Reason: {reason}" if reason else None
message = (
f"The {filter_name} {_filter['type']} was triggered by {format_user(msg.author)} "
f"{channel_str} {eval_msg}with [the following message]({msg.jump_url}):\n\n"
@@ -357,6 +383,7 @@ class Filtering(Cog):
channel_id=Channels.mod_alerts,
ping_everyone=ping_everyone,
additional_embeds=stats.additional_embeds,
+ footer=footer,
)
def _add_stats(self, name: str, match: FilterMatch, content: str) -> Stats:
@@ -381,13 +408,14 @@ class Filtering(Cog):
if name == "filter_invites" and match is not True:
additional_embeds = []
for _, data in match.items():
+ reason = f"Reason: {data['reason']} | " if data.get('reason') else ""
embed = discord.Embed(description=(
f"**Members:**\n{data['members']}\n"
f"**Active:**\n{data['active']}"
))
embed.set_author(name=data["name"])
embed.set_thumbnail(url=data["icon"])
- embed.set_footer(text=f"Guild ID: {data['id']}")
+ embed.set_footer(text=f"{reason}Guild ID: {data['id']}")
additional_embeds.append(embed)
elif name == "watch_rich_embeds":
@@ -411,39 +439,49 @@ class Filtering(Cog):
and not msg.author.bot # Author not a bot
)
- async def _has_watch_regex_match(self, text: str) -> Union[bool, re.Match]:
+ async def _has_watch_regex_match(self, text: str) -> Tuple[Union[bool, re.Match], Optional[str]]:
"""
Return True if `text` matches any regex from `word_watchlist` or `token_watchlist` configs.
`word_watchlist`'s patterns are placed between word boundaries while `token_watchlist` is
matched as-is. Spoilers are expanded, if any, and URLs are ignored.
+ Second return value is a reason written to database about blacklist entry (can be None).
"""
if SPOILER_RE.search(text):
text = self._expand_spoilers(text)
+ text = self.clean_input(text)
+
# Make sure it's not a URL
if URL_RE.search(text):
- return False
+ return False, None
watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False)
for pattern in watchlist_patterns:
match = re.search(pattern, text, flags=re.IGNORECASE)
if match:
- return match
+ return match, self._get_filterlist_value('filter_token', pattern, allowed=False)['comment']
+
+ return False, None
- async def _has_urls(self, text: str) -> bool:
- """Returns True if the text contains one of the blacklisted URLs from the config file."""
+ async def _has_urls(self, text: str) -> Tuple[bool, Optional[str]]:
+ """
+ Returns True if the text contains one of the blacklisted URLs from the config file.
+
+ Second return value is a reason of URL blacklisting (can be None).
+ """
+ text = self.clean_input(text)
if not URL_RE.search(text):
- return False
+ return False, None
text = text.lower()
domain_blacklist = self._get_filterlist_items("domain_name", allowed=False)
for url in domain_blacklist:
if url.lower() in text:
- return True
+ return True, self._get_filterlist_value("domain_name", url, allowed=False)["comment"]
- return False
+ return False, None
@staticmethod
async def _has_zalgo(text: str) -> bool:
@@ -463,6 +501,8 @@ class Filtering(Cog):
Attempts to catch some of common ways to try to cheat the system.
"""
+ text = self.clean_input(text)
+
# Remove backslashes to prevent escape character aroundfuckery like
# discord\.gg/gdudes-pony-farm
text = text.replace("\\", "")
@@ -500,6 +540,10 @@ class Filtering(Cog):
)
if invite_not_allowed:
+ reason = None
+ if guild_id in guild_invite_blacklist:
+ reason = self._get_filterlist_value("guild_invite", guild_id, allowed=False)["comment"]
+
guild_icon_hash = guild["icon"]
guild_icon = (
"https://cdn.discordapp.com/icons/"
@@ -511,7 +555,8 @@ class Filtering(Cog):
"id": guild['id'],
"icon": guild_icon,
"members": response["approximate_member_count"],
- "active": response["approximate_presence_count"]
+ "active": response["approximate_presence_count"],
+ "reason": reason
}
return invite_data if invite_data else False
@@ -594,6 +639,15 @@ class Filtering(Cog):
await self.bot.api_client.delete(f'bot/offensive-messages/{msg["id"]}')
log.info(f"Deleted the offensive message with id {msg['id']}.")
+ @staticmethod
+ def clean_input(string: str) -> str:
+ """Remove zalgo and invisible characters from `string`."""
+ # For future consideration: remove characters in the Mc, Sk, and Lm categories too.
+ # Can be normalised with form C to merge char + combining char into a single char to avoid
+ # removing legit diacritics, but this would open up a way to bypass filters.
+ no_zalgo = ZALGO_RE.sub("", string)
+ return INVISIBLE_RE.sub("", no_zalgo)
+
def setup(bot: Bot) -> None:
"""Load the Filtering cog."""
diff --git a/bot/exts/filters/webhook_remover.py b/bot/exts/filters/webhook_remover.py
index 08fe94055..f11fc8912 100644
--- a/bot/exts/filters/webhook_remover.py
+++ b/bot/exts/filters/webhook_remover.py
@@ -14,7 +14,7 @@ WEBHOOK_URL_RE = re.compile(r"((?:https?://)?discord(?:app)?\.com/api/webhooks/\
ALERT_MESSAGE_TEMPLATE = (
"{user}, looks like you posted a Discord webhook URL. Therefore, your "
"message has been removed. Your webhook may have been **compromised** so "
- "please re-create the webhook **immediately**. If you believe this was "
+ "please re-create the webhook **immediately**. If you believe this was a "
"mistake, please let us know."
)
diff --git a/bot/exts/fun/off_topic_names.py b/bot/exts/fun/off_topic_names.py
index 7fc93b88c..845b8175c 100644
--- a/bot/exts/fun/off_topic_names.py
+++ b/bot/exts/fun/off_topic_names.py
@@ -139,10 +139,20 @@ class OffTopicNames(Cog):
@has_any_role(*MODERATION_ROLES)
async def search_command(self, ctx: Context, *, query: OffTopicName) -> None:
"""Search for an off-topic name."""
- result = await self.bot.api_client.get('bot/off-topic-channel-names')
- in_matches = {name for name in result if query in name}
- close_matches = difflib.get_close_matches(query, result, n=10, cutoff=0.70)
- lines = sorted(f"• {name}" for name in in_matches.union(close_matches))
+ query = OffTopicName.translate_name(query, from_unicode=False).lower()
+
+ # Map normalized names to returned names for search purposes
+ result = {
+ OffTopicName.translate_name(name, from_unicode=False).lower(): name
+ for name in await self.bot.api_client.get('bot/off-topic-channel-names')
+ }
+
+ # Search normalized keys
+ in_matches = {name for name in result.keys() if query in name}
+ close_matches = difflib.get_close_matches(query, result.keys(), n=10, cutoff=0.70)
+
+ # Send Results
+ lines = sorted(f"• {result[name]}" for name in in_matches.union(close_matches))
embed = Embed(
title="Query results",
colour=Colour.blue()
diff --git a/bot/exts/help_channels/_caches.py b/bot/exts/help_channels/_caches.py
index 4cea385b7..c5e4ee917 100644
--- a/bot/exts/help_channels/_caches.py
+++ b/bot/exts/help_channels/_caches.py
@@ -8,12 +8,19 @@ claim_times = RedisCache(namespace="HelpChannels.claim_times")
# RedisCache[discord.TextChannel.id, t.Union[discord.User.id, discord.Member.id]]
claimants = RedisCache(namespace="HelpChannels.help_channel_claimants")
+# Stores the timestamp of the last message from the claimant of a help channel
+# RedisCache[discord.TextChannel.id, UtcPosixTimestamp]
+claimant_last_message_times = RedisCache(namespace="HelpChannels.claimant_last_message_times")
+
+# This cache maps a help channel to the timestamp of the last non-claimant message.
+# This cache being empty for a given help channel indicates the question is unanswered.
+# RedisCache[discord.TextChannel.id, UtcPosixTimestamp]
+non_claimant_last_message_times = RedisCache(namespace="HelpChannels.non_claimant_last_message_times")
+
# This cache maps a help channel to original question message in same channel.
# RedisCache[discord.TextChannel.id, discord.Message.id]
question_messages = RedisCache(namespace="HelpChannels.question_messages")
-# This cache maps a help channel to whether it has had any
-# activity other than the original claimant. True being no other
-# activity and False being other activity.
-# RedisCache[discord.TextChannel.id, bool]
-unanswered = RedisCache(namespace="HelpChannels.unanswered")
+# This cache keeps track of the dynamic message ID for
+# the continuously updated message in the #How-to-get-help channel.
+dynamic_message = RedisCache(namespace="HelpChannels.dynamic_message")
diff --git a/bot/exts/help_channels/_channel.py b/bot/exts/help_channels/_channel.py
index 224214b00..0846b28c8 100644
--- a/bot/exts/help_channels/_channel.py
+++ b/bot/exts/help_channels/_channel.py
@@ -1,8 +1,11 @@
import logging
import typing as t
-from datetime import datetime, timedelta
+from datetime import timedelta
+from enum import Enum
+import arrow
import discord
+from arrow import Arrow
import bot
from bot import constants
@@ -15,6 +18,17 @@ MAX_CHANNELS_PER_CATEGORY = 50
EXCLUDED_CHANNELS = (constants.Channels.cooldown,)
+class ClosingReason(Enum):
+ """All possible closing reasons for help channels."""
+
+ COMMAND = "command"
+ LATEST_MESSSAGE = "auto.latest_message"
+ CLAIMANT_TIMEOUT = "auto.claimant_timeout"
+ OTHER_TIMEOUT = "auto.other_timeout"
+ DELETED = "auto.deleted"
+ CLEANUP = "auto.cleanup"
+
+
def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[discord.TextChannel]:
"""Yield the text channels of the `category` in an unsorted manner."""
log.trace(f"Getting text channels in the category '{category}' ({category.id}).")
@@ -25,23 +39,69 @@ def get_category_channels(category: discord.CategoryChannel) -> t.Iterable[disco
yield channel
-async def get_idle_time(channel: discord.TextChannel) -> t.Optional[int]:
+async def get_closing_time(channel: discord.TextChannel, init_done: bool) -> t.Tuple[Arrow, ClosingReason]:
"""
- Return the time elapsed, in seconds, since the last message sent in the `channel`.
+ Return the time at which the given help `channel` should be closed along with the reason.
- Return None if the channel has no messages.
- """
- log.trace(f"Getting the idle time for #{channel} ({channel.id}).")
-
- msg = await _message.get_last_message(channel)
- if not msg:
- log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages.")
- return None
+ `init_done` is True if the cog has finished loading and False otherwise.
- idle_time = (datetime.utcnow() - msg.created_at).seconds
+ The time is calculated as follows:
- log.trace(f"#{channel} ({channel.id}) has been idle for {idle_time} seconds.")
- return idle_time
+ * If `init_done` is True or the cached time for the claimant's last message is unavailable,
+ add the configured `idle_minutes_claimant` to the time the most recent message was sent.
+ * If the help session is empty (see `is_empty`), do the above but with `deleted_idle_minutes`.
+ * If either of the above is attempted but the channel is completely empty, close the channel
+ immediately.
+ * Otherwise, retrieve the times of the claimant's and non-claimant's last messages from the
+ cache. Add the configured `idle_minutes_claimant` and idle_minutes_others`, respectively, and
+ choose the time which is furthest in the future.
+ """
+ log.trace(f"Getting the closing time for #{channel} ({channel.id}).")
+
+ is_empty = await _message.is_empty(channel)
+ if is_empty:
+ idle_minutes_claimant = constants.HelpChannels.deleted_idle_minutes
+ else:
+ idle_minutes_claimant = constants.HelpChannels.idle_minutes_claimant
+
+ claimant_time = await _caches.claimant_last_message_times.get(channel.id)
+
+ # The current session lacks messages, the cog is still starting, or the cache is empty.
+ if is_empty or not init_done or claimant_time is None:
+ msg = await _message.get_last_message(channel)
+ if not msg:
+ log.debug(f"No idle time available; #{channel} ({channel.id}) has no messages, closing now.")
+ return Arrow.min, ClosingReason.DELETED
+
+ # Use the greatest offset to avoid the possibility of prematurely closing the channel.
+ time = Arrow.fromdatetime(msg.created_at) + timedelta(minutes=idle_minutes_claimant)
+ reason = ClosingReason.DELETED if is_empty else ClosingReason.LATEST_MESSSAGE
+ return time, reason
+
+ claimant_time = Arrow.utcfromtimestamp(claimant_time)
+ others_time = await _caches.non_claimant_last_message_times.get(channel.id)
+
+ if others_time:
+ others_time = Arrow.utcfromtimestamp(others_time)
+ else:
+ # The help session hasn't received any answers (messages from non-claimants) yet.
+ # Set to min value so it isn't considered when calculating the closing time.
+ others_time = Arrow.min
+
+ # Offset the cached times by the configured values.
+ others_time += timedelta(minutes=constants.HelpChannels.idle_minutes_others)
+ claimant_time += timedelta(minutes=idle_minutes_claimant)
+
+ # Use the time which is the furthest into the future.
+ if claimant_time >= others_time:
+ closing_time = claimant_time
+ reason = ClosingReason.CLAIMANT_TIMEOUT
+ else:
+ closing_time = others_time
+ reason = ClosingReason.OTHER_TIMEOUT
+
+ log.trace(f"#{channel} ({channel.id}) should be closed at {closing_time} due to {reason}.")
+ return closing_time, reason
async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]:
@@ -50,8 +110,8 @@ async def get_in_use_time(channel_id: int) -> t.Optional[timedelta]:
claimed_timestamp = await _caches.claim_times.get(channel_id)
if claimed_timestamp:
- claimed = datetime.utcfromtimestamp(claimed_timestamp)
- return datetime.utcnow() - claimed
+ claimed = Arrow.utcfromtimestamp(claimed_timestamp)
+ return arrow.utcnow() - claimed
def is_excluded_channel(channel: discord.abc.GuildChannel) -> bool:
diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py
index 0995c8a79..262b18e16 100644
--- a/bot/exts/help_channels/_cog.py
+++ b/bot/exts/help_channels/_cog.py
@@ -2,9 +2,10 @@ import asyncio
import logging
import random
import typing as t
-from datetime import datetime, timezone
+from datetime import timedelta
from operator import attrgetter
+import arrow
import discord
import discord.abc
from discord.ext import commands
@@ -20,6 +21,7 @@ NAMESPACE = "help"
HELP_CHANNEL_TOPIC = """
This is a Python help channel. You can claim your own help channel in the Python Help: Available category.
"""
+AVAILABLE_HELP_CHANNELS = "**Currently available help channel(s):** {available}"
class HelpChannels(commands.Cog):
@@ -43,7 +45,9 @@ class HelpChannels(commands.Cog):
In Use Category
* Contains all channels which are occupied by someone needing help
- * Channel moves to dormant category after `constants.HelpChannels.idle_minutes` of being idle
+ * Channel moves to dormant category after
+ - `constants.HelpChannels.idle_minutes_other` minutes since the last user message, or
+ - `constants.HelpChannels.idle_minutes_claimant` minutes since the last claimant message.
* Command can prematurely mark a channel as dormant
* Channel claimant is allowed to use the command
* Allowed roles for the command are configurable with `constants.HelpChannels.cmd_whitelist`
@@ -54,7 +58,7 @@ class HelpChannels(commands.Cog):
* Contains channels which aren't in use
* Channels are used to refill the Available category
- Help channels are named after the chemical elements in `bot/resources/elements.json`.
+ Help channels are named after the foods in `bot/resources/foods.json`.
"""
def __init__(self, bot: Bot):
@@ -70,7 +74,10 @@ class HelpChannels(commands.Cog):
self.channel_queue: asyncio.Queue[discord.TextChannel] = None
self.name_queue: t.Deque[str] = None
- self.last_notification: t.Optional[datetime] = None
+ self.last_notification: t.Optional[arrow.Arrow] = None
+
+ self.dynamic_message: t.Optional[int] = None
+ self.available_help_channels: t.Set[discord.TextChannel] = set()
# Asyncio stuff
self.queue_tasks: t.List[asyncio.Task] = []
@@ -102,17 +109,26 @@ class HelpChannels(commands.Cog):
await _cooldown.revoke_send_permissions(message.author, self.scheduler)
await _message.pin(message)
+ try:
+ await _message.dm_on_open(message)
+ except Exception as e:
+ log.warning("Error occurred while sending DM:", exc_info=e)
# Add user with channel for dormant check.
await _caches.claimants.set(message.channel.id, message.author.id)
self.bot.stats.incr("help.claimed")
- # Must use a timezone-aware datetime to ensure a correct POSIX timestamp.
- timestamp = datetime.now(timezone.utc).timestamp()
+ # datetime.timestamp() would assume it's local, despite d.py giving a (naïve) UTC time.
+ timestamp = arrow.Arrow.fromdatetime(message.created_at).timestamp()
+
await _caches.claim_times.set(message.channel.id, timestamp)
+ await _caches.claimant_last_message_times.set(message.channel.id, timestamp)
+ # Delete to indicate that the help session has yet to receive an answer.
+ await _caches.non_claimant_last_message_times.delete(message.channel.id)
- await _caches.unanswered.set(message.channel.id, True)
+ # Removing the help channel from the dynamic message, and editing/sending that message.
+ self.available_help_channels.remove(message.channel)
# Not awaited because it may indefinitely hold the lock while waiting for a channel.
scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}")
@@ -183,7 +199,7 @@ class HelpChannels(commands.Cog):
# Don't use a discord.py check because the check needs to fail silently.
if await self.close_check(ctx):
log.info(f"Close command invoked by {ctx.author} in #{ctx.channel}.")
- await self.unclaim_channel(ctx.channel, is_auto=False)
+ await self.unclaim_channel(ctx.channel, closed_on=_channel.ClosingReason.COMMAND)
async def get_available_candidate(self) -> discord.TextChannel:
"""
@@ -229,7 +245,11 @@ class HelpChannels(commands.Cog):
elif missing < 0:
log.trace(f"Moving {abs(missing)} superfluous available channels over to the Dormant category.")
for channel in channels[:abs(missing)]:
- await self.unclaim_channel(channel)
+ await self.unclaim_channel(channel, closed_on=_channel.ClosingReason.CLEANUP)
+
+ # Getting channels that need to be included in the dynamic message.
+ await self.update_available_help_channels()
+ log.trace("Dynamic available help message updated.")
async def init_categories(self) -> None:
"""Get the help category objects. Remove the cog if retrieval fails."""
@@ -275,6 +295,10 @@ class HelpChannels(commands.Cog):
# This may confuse users. So would potentially long delays for the cog to become ready.
self.close_command.enabled = True
+ # Acquiring the dynamic message ID, if it exists within the cache.
+ log.trace("Attempting to fetch How-to-get-help dynamic message ID.")
+ self.dynamic_message = await _caches.dynamic_message.get("message_id")
+
await self.init_available()
_stats.report_counts()
@@ -289,26 +313,23 @@ class HelpChannels(commands.Cog):
"""
log.trace(f"Handling in-use channel #{channel} ({channel.id}).")
- if not await _message.is_empty(channel):
- idle_seconds = constants.HelpChannels.idle_minutes * 60
- else:
- idle_seconds = constants.HelpChannels.deleted_idle_minutes * 60
-
- time_elapsed = await _channel.get_idle_time(channel)
+ closing_time, closed_on = await _channel.get_closing_time(channel, self.init_task.done())
- if time_elapsed is None or time_elapsed >= idle_seconds:
+ # Closing time is in the past.
+ # Add 1 second due to POSIX timestamps being lower resolution than datetime objects.
+ if closing_time < (arrow.utcnow() + timedelta(seconds=1)):
log.info(
- f"#{channel} ({channel.id}) is idle longer than {idle_seconds} seconds "
- f"and will be made dormant."
+ f"#{channel} ({channel.id}) is idle past {closing_time} "
+ f"and will be made dormant. Reason: {closed_on.value}"
)
- await self.unclaim_channel(channel)
+ await self.unclaim_channel(channel, closed_on=closed_on)
else:
# Cancel the existing task, if any.
if has_task:
self.scheduler.cancel(channel.id)
- delay = idle_seconds - time_elapsed
+ delay = (closing_time - arrow.utcnow()).seconds
log.info(
f"#{channel} ({channel.id}) is still active; "
f"scheduling it to be moved after {delay} seconds."
@@ -332,6 +353,10 @@ class HelpChannels(commands.Cog):
category_id=constants.Categories.help_available,
)
+ # Adding the help channel to the dynamic message, and editing/sending that message.
+ self.available_help_channels.add(channel)
+ await self.update_available_help_channels()
+
_stats.report_counts()
async def move_to_dormant(self, channel: discord.TextChannel) -> None:
@@ -352,7 +377,7 @@ class HelpChannels(commands.Cog):
_stats.report_counts()
@lock.lock_arg(f"{NAMESPACE}.unclaim", "channel")
- async def unclaim_channel(self, channel: discord.TextChannel, *, is_auto: bool = True) -> None:
+ async def unclaim_channel(self, channel: discord.TextChannel, *, closed_on: _channel.ClosingReason) -> None:
"""
Unclaim an in-use help `channel` to make it dormant.
@@ -360,7 +385,7 @@ class HelpChannels(commands.Cog):
Remove the cooldown role from the channel claimant if they have no other channels claimed.
Cancel the scheduled cooldown role removal task.
- Set `is_auto` to True if the channel was automatically closed or False if manually closed.
+ `closed_on` is the reason that the channel was closed. See _channel.ClosingReason for possible values.
"""
claimant_id = await _caches.claimants.get(channel.id)
_unclaim_channel = self._unclaim_channel
@@ -371,9 +396,14 @@ class HelpChannels(commands.Cog):
decorator = lock.lock_arg(f"{NAMESPACE}.unclaim", "claimant_id", wait=True)
_unclaim_channel = decorator(_unclaim_channel)
- return await _unclaim_channel(channel, claimant_id, is_auto)
+ return await _unclaim_channel(channel, claimant_id, closed_on)
- async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, is_auto: bool) -> None:
+ async def _unclaim_channel(
+ self,
+ channel: discord.TextChannel,
+ claimant_id: int,
+ closed_on: _channel.ClosingReason
+ ) -> None:
"""Actual implementation of `unclaim_channel`. See that for full documentation."""
await _caches.claimants.delete(channel.id)
@@ -389,12 +419,12 @@ class HelpChannels(commands.Cog):
await _cooldown.remove_cooldown_role(claimant)
await _message.unpin(channel)
- await _stats.report_complete_session(channel.id, is_auto)
+ await _stats.report_complete_session(channel.id, closed_on)
await self.move_to_dormant(channel)
# Cancel the task that makes the channel dormant only if called by the close command.
# In other cases, the task is either already done or not-existent.
- if not is_auto:
+ if closed_on == _channel.ClosingReason.COMMAND:
self.scheduler.cancel(channel.id)
async def move_to_in_use(self, channel: discord.TextChannel) -> None:
@@ -406,7 +436,7 @@ class HelpChannels(commands.Cog):
category_id=constants.Categories.help_in_use,
)
- timeout = constants.HelpChannels.idle_minutes * 60
+ timeout = constants.HelpChannels.idle_minutes_claimant * 60
log.trace(f"Scheduling #{channel} ({channel.id}) to become dormant in {timeout} sec.")
self.scheduler.schedule_later(timeout, channel.id, self.move_idle_channel(channel))
@@ -424,7 +454,7 @@ class HelpChannels(commands.Cog):
if not _channel.is_excluded_channel(message.channel):
await self.claim_channel(message)
else:
- await _message.check_for_answer(message)
+ await _message.update_message_caches(message)
@commands.Cog.listener()
async def on_message_delete(self, msg: discord.Message) -> None:
@@ -461,3 +491,34 @@ class HelpChannels(commands.Cog):
self.queue_tasks.remove(task)
return channel
+
+ async def update_available_help_channels(self) -> None:
+ """Updates the dynamic message within #how-to-get-help for available help channels."""
+ if not self.available_help_channels:
+ self.available_help_channels = set(
+ c for c in self.available_category.channels if not _channel.is_excluded_channel(c)
+ )
+
+ available_channels = AVAILABLE_HELP_CHANNELS.format(
+ available=", ".join(
+ c.mention for c in sorted(self.available_help_channels, key=attrgetter("position"))
+ ) or None
+ )
+
+ if self.dynamic_message is not None:
+ try:
+ log.trace("Help channels have changed, dynamic message has been edited.")
+ await self.bot.http.edit_message(
+ constants.Channels.how_to_get_help, self.dynamic_message, content=available_channels
+ )
+ except discord.NotFound:
+ pass
+ else:
+ return
+
+ log.trace("Dynamic message could not be edited or found. Creating a new one.")
+ new_dynamic_message = await self.bot.http.send_message(
+ constants.Channels.how_to_get_help, available_channels
+ )
+ self.dynamic_message = new_dynamic_message["id"]
+ await _caches.dynamic_message.set("message_id", self.dynamic_message)
diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py
index 2bbd4bdd6..afd698ffe 100644
--- a/bot/exts/help_channels/_message.py
+++ b/bot/exts/help_channels/_message.py
@@ -1,8 +1,10 @@
import logging
+import textwrap
import typing as t
-from datetime import datetime
+import arrow
import discord
+from arrow import Arrow
import bot
from bot import constants
@@ -27,7 +29,7 @@ For more tips, check out our guide on **[asking good questions]({ASKING_GUIDE_UR
AVAILABLE_TITLE = "Available help channel"
-AVAILABLE_FOOTER = f"Closes after {constants.HelpChannels.idle_minutes} minutes of inactivity or when you send !close."
+AVAILABLE_FOOTER = "Closes after a period of inactivity, or when you send !close."
DORMANT_MSG = f"""
This help channel has been marked as **dormant**, and has been moved into the **Help: Dormant** \
@@ -41,25 +43,27 @@ through our guide for **[asking a good question]({ASKING_GUIDE_URL})**.
"""
-async def check_for_answer(message: discord.Message) -> None:
- """Checks for whether new content in a help channel comes from non-claimants."""
+async def update_message_caches(message: discord.Message) -> None:
+ """Checks the source of new content in a help channel and updates the appropriate cache."""
channel = message.channel
# Confirm the channel is an in use help channel
if is_in_category(channel, constants.Categories.help_in_use):
- log.trace(f"Checking if #{channel} ({channel.id}) has been answered.")
+ log.trace(f"Checking if #{channel} ({channel.id}) has had a reply.")
- # Check if there is an entry in unanswered
- if await _caches.unanswered.contains(channel.id):
- claimant_id = await _caches.claimants.get(channel.id)
- if not claimant_id:
- # The mapping for this channel doesn't exist, we can't do anything.
- return
+ claimant_id = await _caches.claimants.get(channel.id)
+ if not claimant_id:
+ # The mapping for this channel doesn't exist, we can't do anything.
+ return
- # Check the message did not come from the claimant
- if claimant_id != message.author.id:
- # Mark the channel as answered
- await _caches.unanswered.set(channel.id, False)
+ # datetime.timestamp() would assume it's local, despite d.py giving a (naïve) UTC time.
+ timestamp = Arrow.fromdatetime(message.created_at).timestamp()
+
+ # Overwrite the appropriate last message cache depending on the author of the message
+ if message.author.id == claimant_id:
+ await _caches.claimant_last_message_times.set(channel.id, timestamp)
+ else:
+ await _caches.non_claimant_last_message_times.set(channel.id, timestamp)
async def get_last_message(channel: discord.TextChannel) -> t.Optional[discord.Message]:
@@ -92,12 +96,44 @@ async def is_empty(channel: discord.TextChannel) -> bool:
return False
-async def notify(channel: discord.TextChannel, last_notification: t.Optional[datetime]) -> t.Optional[datetime]:
+async def dm_on_open(message: discord.Message) -> None:
+ """
+ DM claimant with a link to the claimed channel's first message, with a 100 letter preview of the message.
+
+ Does nothing if the user has DMs disabled.
+ """
+ embed = discord.Embed(
+ title="Help channel opened",
+ description=f"You claimed {message.channel.mention}.",
+ colour=bot.constants.Colours.bright_green,
+ timestamp=message.created_at,
+ )
+
+ embed.set_thumbnail(url=constants.Icons.green_questionmark)
+ formatted_message = textwrap.shorten(message.content, width=100, placeholder="...")
+ if formatted_message:
+ embed.add_field(name="Your message", value=formatted_message, inline=False)
+ embed.add_field(
+ name="Conversation",
+ value=f"[Jump to message!]({message.jump_url})",
+ inline=False,
+ )
+
+ try:
+ await message.author.send(embed=embed)
+ log.trace(f"Sent DM to {message.author.id} after claiming help channel.")
+ except discord.errors.Forbidden:
+ log.trace(
+ f"Ignoring to send DM to {message.author.id} after claiming help channel: DMs disabled."
+ )
+
+
+async def notify(channel: discord.TextChannel, last_notification: t.Optional[Arrow]) -> t.Optional[Arrow]:
"""
Send a message in `channel` notifying about a lack of available help channels.
- If a notification was sent, return the `datetime` at which the message was sent. Otherwise,
- return None.
+ If a notification was sent, return the time at which the message was sent.
+ Otherwise, return None.
Configuration:
@@ -111,7 +147,7 @@ async def notify(channel: discord.TextChannel, last_notification: t.Optional[dat
log.trace("Notifying about lack of channels.")
if last_notification:
- elapsed = (datetime.utcnow() - last_notification).seconds
+ elapsed = (arrow.utcnow() - last_notification).seconds
minimum_interval = constants.HelpChannels.notify_minutes * 60
should_send = elapsed >= minimum_interval
else:
@@ -134,7 +170,7 @@ async def notify(channel: discord.TextChannel, last_notification: t.Optional[dat
allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles)
)
- return message.created_at
+ return Arrow.fromdatetime(message.created_at)
except Exception:
# Handle it here cause this feature isn't critical for the functionality of the system.
log.exception("Failed to send notification about lack of dormant channels!")
diff --git a/bot/exts/help_channels/_name.py b/bot/exts/help_channels/_name.py
index 728234b1e..061f855ae 100644
--- a/bot/exts/help_channels/_name.py
+++ b/bot/exts/help_channels/_name.py
@@ -14,11 +14,11 @@ log = logging.getLogger(__name__)
def create_name_queue(*categories: discord.CategoryChannel) -> deque:
"""
- Return a queue of element names to use for creating new channels.
+ Return a queue of food names to use for creating new channels.
Skip names that are already in use by channels in `categories`.
"""
- log.trace("Creating the chemical element name queue.")
+ log.trace("Creating the food name queue.")
used_names = _get_used_names(*categories)
@@ -31,7 +31,7 @@ def create_name_queue(*categories: discord.CategoryChannel) -> deque:
def _get_names() -> t.List[str]:
"""
- Return a truncated list of prefixed element names.
+ Return a truncated list of prefixed food names.
The amount of names is configured with `HelpChannels.max_total_channels`.
The prefix is configured with `HelpChannels.name_prefix`.
@@ -39,10 +39,10 @@ def _get_names() -> t.List[str]:
count = constants.HelpChannels.max_total_channels
prefix = constants.HelpChannels.name_prefix
- log.trace(f"Getting the first {count} element names from JSON.")
+ log.trace(f"Getting the first {count} food names from JSON.")
- with Path("bot/resources/elements.json").open(encoding="utf-8") as elements_file:
- all_names = json.load(elements_file)
+ with Path("bot/resources/foods.json").open(encoding="utf-8") as foods_file:
+ all_names = json.load(foods_file)
if prefix:
return [prefix + name for name in all_names[:count]]
diff --git a/bot/exts/help_channels/_stats.py b/bot/exts/help_channels/_stats.py
index b8778e7d9..eb34e75e1 100644
--- a/bot/exts/help_channels/_stats.py
+++ b/bot/exts/help_channels/_stats.py
@@ -22,21 +22,20 @@ def report_counts() -> None:
log.warning(f"Couldn't find category {name!r} to track channel count stats.")
-async def report_complete_session(channel_id: int, is_auto: bool) -> None:
+async def report_complete_session(channel_id: int, closed_on: _channel.ClosingReason) -> None:
"""
Report stats for a completed help session channel `channel_id`.
- Set `is_auto` to True if the channel was automatically closed or False if manually closed.
+ `closed_on` is the reason why the channel was closed. See `_channel.ClosingReason` for possible reasons.
"""
- caller = "auto" if is_auto else "command"
- bot.instance.stats.incr(f"help.dormant_calls.{caller}")
+ bot.instance.stats.incr(f"help.dormant_calls.{closed_on.value}")
in_use_time = await _channel.get_in_use_time(channel_id)
if in_use_time:
bot.instance.stats.timing("help.in_use_time", in_use_time)
- unanswered = await _caches.unanswered.get(channel_id)
- if unanswered:
+ non_claimant_last_message_time = await _caches.non_claimant_last_message_times.get(channel_id)
+ if non_claimant_last_message_time is None:
bot.instance.stats.incr("help.sessions.unanswered")
- elif unanswered is not None:
+ else:
bot.instance.stats.incr("help.sessions.answered")
diff --git a/bot/exts/info/codeblock/_parsing.py b/bot/exts/info/codeblock/_parsing.py
index e35fbca22..73fd11b94 100644
--- a/bot/exts/info/codeblock/_parsing.py
+++ b/bot/exts/info/codeblock/_parsing.py
@@ -103,6 +103,9 @@ def _is_python_code(content: str) -> bool:
"""Return True if `content` is valid Python consisting of more than just expressions."""
log.trace("Checking if content is Python code.")
try:
+ # Remove null bytes because they cause ast.parse to raise a ValueError.
+ content = content.replace("\x00", "")
+
# Attempt to parse the message into an AST node.
# Invalid Python code will raise a SyntaxError.
tree = ast.parse(content)
diff --git a/bot/exts/info/doc.py b/bot/exts/info/doc.py
deleted file mode 100644
index 9b5bd6504..000000000
--- a/bot/exts/info/doc.py
+++ /dev/null
@@ -1,485 +0,0 @@
-import asyncio
-import functools
-import logging
-import re
-import textwrap
-from contextlib import suppress
-from types import SimpleNamespace
-from typing import Optional, Tuple
-
-import discord
-from bs4 import BeautifulSoup
-from bs4.element import PageElement, Tag
-from discord.errors import NotFound
-from discord.ext import commands
-from markdownify import MarkdownConverter
-from requests import ConnectTimeout, ConnectionError, HTTPError
-from sphinx.ext import intersphinx
-from urllib3.exceptions import ProtocolError
-
-from bot.bot import Bot
-from bot.constants import MODERATION_ROLES, RedirectOutput
-from bot.converters import ValidPythonIdentifier, ValidURL
-from bot.pagination import LinePaginator
-from bot.utils.cache import AsyncCache
-from bot.utils.messages import wait_for_deletion
-
-
-log = logging.getLogger(__name__)
-logging.getLogger('urllib3').setLevel(logging.WARNING)
-
-# Since Intersphinx is intended to be used with Sphinx,
-# we need to mock its configuration.
-SPHINX_MOCK_APP = SimpleNamespace(
- config=SimpleNamespace(
- intersphinx_timeout=3,
- tls_verify=True,
- user_agent="python3:python-discord/bot:1.0.0"
- )
-)
-
-NO_OVERRIDE_GROUPS = (
- "2to3fixer",
- "token",
- "label",
- "pdbcommand",
- "term",
-)
-NO_OVERRIDE_PACKAGES = (
- "python",
-)
-
-SEARCH_END_TAG_ATTRS = (
- "data",
- "function",
- "class",
- "exception",
- "seealso",
- "section",
- "rubric",
- "sphinxsidebar",
-)
-UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶")
-WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)")
-
-FAILED_REQUEST_RETRY_AMOUNT = 3
-NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay
-
-symbol_cache = AsyncCache()
-
-
-class DocMarkdownConverter(MarkdownConverter):
- """Subclass markdownify's MarkdownCoverter to provide custom conversion methods."""
-
- def convert_code(self, el: PageElement, text: str) -> str:
- """Undo `markdownify`s underscore escaping."""
- return f"`{text}`".replace('\\', '')
-
- def convert_pre(self, el: PageElement, text: str) -> str:
- """Wrap any codeblocks in `py` for syntax highlighting."""
- code = ''.join(el.strings)
- return f"```py\n{code}```"
-
-
-def markdownify(html: str) -> DocMarkdownConverter:
- """Create a DocMarkdownConverter object from the input html."""
- return DocMarkdownConverter(bullets='•').convert(html)
-
-
-class InventoryURL(commands.Converter):
- """
- Represents an Intersphinx inventory URL.
-
- This converter checks whether intersphinx accepts the given inventory URL, and raises
- `BadArgument` if that is not the case.
-
- Otherwise, it simply passes through the given URL.
- """
-
- @staticmethod
- async def convert(ctx: commands.Context, url: str) -> str:
- """Convert url to Intersphinx inventory URL."""
- try:
- intersphinx.fetch_inventory(SPHINX_MOCK_APP, '', url)
- except AttributeError:
- raise commands.BadArgument(f"Failed to fetch Intersphinx inventory from URL `{url}`.")
- except ConnectionError:
- if url.startswith('https'):
- raise commands.BadArgument(
- f"Cannot establish a connection to `{url}`. Does it support HTTPS?"
- )
- raise commands.BadArgument(f"Cannot connect to host with URL `{url}`.")
- except ValueError:
- raise commands.BadArgument(
- f"Failed to read Intersphinx inventory from URL `{url}`. "
- "Are you sure that it's a valid inventory file?"
- )
- return url
-
-
-class Doc(commands.Cog):
- """A set of commands for querying & displaying documentation."""
-
- def __init__(self, bot: Bot):
- self.base_urls = {}
- self.bot = bot
- self.inventories = {}
- self.renamed_symbols = set()
-
- self.bot.loop.create_task(self.init_refresh_inventory())
-
- async def init_refresh_inventory(self) -> None:
- """Refresh documentation inventory on cog initialization."""
- await self.bot.wait_until_guild_available()
- await self.refresh_inventory()
-
- async def update_single(
- self, package_name: str, base_url: str, inventory_url: str
- ) -> None:
- """
- Rebuild the inventory for a single package.
-
- Where:
- * `package_name` is the package name to use, appears in the log
- * `base_url` is the root documentation URL for the specified package, used to build
- absolute paths that link to specific symbols
- * `inventory_url` is the absolute URL to the intersphinx inventory, fetched by running
- `intersphinx.fetch_inventory` in an executor on the bot's event loop
- """
- self.base_urls[package_name] = base_url
-
- package = await self._fetch_inventory(inventory_url)
- if not package:
- return None
-
- for group, value in package.items():
- for symbol, (package_name, _version, relative_doc_url, _) in value.items():
- absolute_doc_url = base_url + relative_doc_url
-
- if symbol in self.inventories:
- group_name = group.split(":")[1]
- symbol_base_url = self.inventories[symbol].split("/", 3)[2]
- if (
- group_name in NO_OVERRIDE_GROUPS
- or any(package in symbol_base_url for package in NO_OVERRIDE_PACKAGES)
- ):
-
- symbol = f"{group_name}.{symbol}"
- # If renamed `symbol` already exists, add library name in front to differentiate between them.
- if symbol in self.renamed_symbols:
- # Split `package_name` because of packages like Pillow that have spaces in them.
- symbol = f"{package_name.split()[0]}.{symbol}"
-
- self.inventories[symbol] = absolute_doc_url
- self.renamed_symbols.add(symbol)
- continue
-
- self.inventories[symbol] = absolute_doc_url
-
- log.trace(f"Fetched inventory for {package_name}.")
-
- async def refresh_inventory(self) -> None:
- """Refresh internal documentation inventory."""
- log.debug("Refreshing documentation inventory...")
-
- # Clear the old base URLS and inventories to ensure
- # that we start from a fresh local dataset.
- # Also, reset the cache used for fetching documentation.
- self.base_urls.clear()
- self.inventories.clear()
- self.renamed_symbols.clear()
- symbol_cache.clear()
-
- # Run all coroutines concurrently - since each of them performs a HTTP
- # request, this speeds up fetching the inventory data heavily.
- coros = [
- self.update_single(
- package["package"], package["base_url"], package["inventory_url"]
- ) for package in await self.bot.api_client.get('bot/documentation-links')
- ]
- await asyncio.gather(*coros)
-
- async def get_symbol_html(self, symbol: str) -> Optional[Tuple[list, str]]:
- """
- Given a Python symbol, return its signature and description.
-
- The first tuple element is the signature of the given symbol as a markup-free string, and
- the second tuple element is the description of the given symbol with HTML markup included.
-
- If the given symbol is a module, returns a tuple `(None, str)`
- else if the symbol could not be found, returns `None`.
- """
- url = self.inventories.get(symbol)
- if url is None:
- return None
-
- async with self.bot.http_session.get(url) as response:
- html = await response.text(encoding='utf-8')
-
- # Find the signature header and parse the relevant parts.
- symbol_id = url.split('#')[-1]
- soup = BeautifulSoup(html, 'lxml')
- symbol_heading = soup.find(id=symbol_id)
- search_html = str(soup)
-
- if symbol_heading is None:
- return None
-
- if symbol_id == f"module-{symbol}":
- # Get page content from the module headerlink to the
- # first tag that has its class in `SEARCH_END_TAG_ATTRS`
- start_tag = symbol_heading.find("a", attrs={"class": "headerlink"})
- if start_tag is None:
- return [], ""
-
- end_tag = start_tag.find_next(self._match_end_tag)
- if end_tag is None:
- return [], ""
-
- description_start_index = search_html.find(str(start_tag.parent)) + len(str(start_tag.parent))
- description_end_index = search_html.find(str(end_tag))
- description = search_html[description_start_index:description_end_index]
- signatures = None
-
- else:
- signatures = []
- description = str(symbol_heading.find_next_sibling("dd"))
- description_pos = search_html.find(description)
- # Get text of up to 3 signatures, remove unwanted symbols
- for element in [symbol_heading] + symbol_heading.find_next_siblings("dt", limit=2):
- signature = UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text)
- if signature and search_html.find(str(element)) < description_pos:
- signatures.append(signature)
-
- return signatures, description.replace('¶', '')
-
- @symbol_cache(arg_offset=1)
- async def get_symbol_embed(self, symbol: str) -> Optional[discord.Embed]:
- """
- Attempt to scrape and fetch the data for the given `symbol`, and build an embed from its contents.
-
- If the symbol is known, an Embed with documentation about it is returned.
- """
- scraped_html = await self.get_symbol_html(symbol)
- if scraped_html is None:
- return None
-
- signatures = scraped_html[0]
- permalink = self.inventories[symbol]
- description = markdownify(scraped_html[1])
-
- # Truncate the description of the embed to the last occurrence
- # of a double newline (interpreted as a paragraph) before index 1000.
- if len(description) > 1000:
- shortened = description[:1000]
- description_cutoff = shortened.rfind('\n\n', 100)
- if description_cutoff == -1:
- # Search the shortened version for cutoff points in decreasing desirability,
- # cutoff at 1000 if none are found.
- for string in (". ", ", ", ",", " "):
- description_cutoff = shortened.rfind(string)
- if description_cutoff != -1:
- break
- else:
- description_cutoff = 1000
- description = description[:description_cutoff]
-
- # If there is an incomplete code block, cut it out
- if description.count("```") % 2:
- codeblock_start = description.rfind('```py')
- description = description[:codeblock_start].rstrip()
- description += f"... [read more]({permalink})"
-
- description = WHITESPACE_AFTER_NEWLINES_RE.sub('', description)
- if signatures is None:
- # If symbol is a module, don't show signature.
- embed_description = description
-
- elif not signatures:
- # It's some "meta-page", for example:
- # https://docs.djangoproject.com/en/dev/ref/views/#module-django.views
- embed_description = "This appears to be a generic page not tied to a specific symbol."
-
- else:
- embed_description = "".join(f"```py\n{textwrap.shorten(signature, 500)}```" for signature in signatures)
- embed_description += f"\n{description}"
-
- embed = discord.Embed(
- title=f'`{symbol}`',
- url=permalink,
- description=embed_description
- )
- # Show all symbols with the same name that were renamed in the footer.
- embed.set_footer(
- text=", ".join(renamed for renamed in self.renamed_symbols - {symbol} if renamed.endswith(f".{symbol}"))
- )
- return embed
-
- @commands.group(name='docs', aliases=('doc', 'd'), invoke_without_command=True)
- async def docs_group(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None:
- """Lookup documentation for Python symbols."""
- await self.get_command(ctx, symbol)
-
- @docs_group.command(name='get', aliases=('g',))
- async def get_command(self, ctx: commands.Context, symbol: commands.clean_content = None) -> None:
- """
- Return a documentation embed for a given symbol.
-
- If no symbol is given, return a list of all available inventories.
-
- Examples:
- !docs
- !docs aiohttp
- !docs aiohttp.ClientSession
- !docs get aiohttp.ClientSession
- """
- if symbol is None:
- inventory_embed = discord.Embed(
- title=f"All inventories (`{len(self.base_urls)}` total)",
- colour=discord.Colour.blue()
- )
-
- lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items())
- if self.base_urls:
- await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False)
-
- else:
- inventory_embed.description = "Hmmm, seems like there's nothing here yet."
- await ctx.send(embed=inventory_embed)
-
- else:
- # Fetching documentation for a symbol (at least for the first time, since
- # caching is used) takes quite some time, so let's send typing to indicate
- # that we got the command, but are still working on it.
- async with ctx.typing():
- doc_embed = await self.get_symbol_embed(symbol)
-
- if doc_embed is None:
- error_embed = discord.Embed(
- description=f"Sorry, I could not find any documentation for `{symbol}`.",
- colour=discord.Colour.red()
- )
- error_message = await ctx.send(embed=error_embed)
- with suppress(NotFound):
- await error_message.delete(delay=NOT_FOUND_DELETE_DELAY)
- await ctx.message.delete(delay=NOT_FOUND_DELETE_DELAY)
- else:
- msg = await ctx.send(embed=doc_embed)
- await wait_for_deletion(msg, (ctx.author.id,))
-
- @docs_group.command(name='set', aliases=('s',))
- @commands.has_any_role(*MODERATION_ROLES)
- async def set_command(
- self, ctx: commands.Context, package_name: ValidPythonIdentifier,
- base_url: ValidURL, inventory_url: InventoryURL
- ) -> None:
- """
- Adds a new documentation metadata object to the site's database.
-
- The database will update the object, should an existing item with the specified `package_name` already exist.
-
- Example:
- !docs set \
- python \
- https://docs.python.org/3/ \
- https://docs.python.org/3/objects.inv
- """
- body = {
- 'package': package_name,
- 'base_url': base_url,
- 'inventory_url': inventory_url
- }
- await self.bot.api_client.post('bot/documentation-links', json=body)
-
- log.info(
- f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n"
- f"Package name: {package_name}\n"
- f"Base url: {base_url}\n"
- f"Inventory URL: {inventory_url}"
- )
-
- # Rebuilding the inventory can take some time, so lets send out a
- # typing event to show that the Bot is still working.
- async with ctx.typing():
- await self.refresh_inventory()
- await ctx.send(f"Added package `{package_name}` to database and refreshed inventory.")
-
- @docs_group.command(name='delete', aliases=('remove', 'rm', 'd'))
- @commands.has_any_role(*MODERATION_ROLES)
- async def delete_command(self, ctx: commands.Context, package_name: ValidPythonIdentifier) -> None:
- """
- Removes the specified package from the database.
-
- Examples:
- !docs delete aiohttp
- """
- await self.bot.api_client.delete(f'bot/documentation-links/{package_name}')
-
- async with ctx.typing():
- # Rebuild the inventory to ensure that everything
- # that was from this package is properly deleted.
- await self.refresh_inventory()
- await ctx.send(f"Successfully deleted `{package_name}` and refreshed inventory.")
-
- @docs_group.command(name="refresh", aliases=("rfsh", "r"))
- @commands.has_any_role(*MODERATION_ROLES)
- async def refresh_command(self, ctx: commands.Context) -> None:
- """Refresh inventories and send differences to channel."""
- old_inventories = set(self.base_urls)
- with ctx.typing():
- await self.refresh_inventory()
- # Get differences of added and removed inventories
- added = ', '.join(inv for inv in self.base_urls if inv not in old_inventories)
- if added:
- added = f"+ {added}"
-
- removed = ', '.join(inv for inv in old_inventories if inv not in self.base_urls)
- if removed:
- removed = f"- {removed}"
-
- embed = discord.Embed(
- title="Inventories refreshed",
- description=f"```diff\n{added}\n{removed}```" if added or removed else ""
- )
- await ctx.send(embed=embed)
-
- async def _fetch_inventory(self, inventory_url: str) -> Optional[dict]:
- """Get and return inventory from `inventory_url`. If fetching fails, return None."""
- fetch_func = functools.partial(intersphinx.fetch_inventory, SPHINX_MOCK_APP, '', inventory_url)
- for retry in range(1, FAILED_REQUEST_RETRY_AMOUNT+1):
- try:
- package = await self.bot.loop.run_in_executor(None, fetch_func)
- except ConnectTimeout:
- log.error(
- f"Fetching of inventory {inventory_url} timed out,"
- f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})"
- )
- except ProtocolError:
- log.error(
- f"Connection lost while fetching inventory {inventory_url},"
- f" trying again. ({retry}/{FAILED_REQUEST_RETRY_AMOUNT})"
- )
- except HTTPError as e:
- log.error(f"Fetching of inventory {inventory_url} failed with status code {e.response.status_code}.")
- return None
- except ConnectionError:
- log.error(f"Couldn't establish connection to inventory {inventory_url}.")
- return None
- else:
- return package
- log.error(f"Fetching of inventory {inventory_url} failed.")
- return None
-
- @staticmethod
- def _match_end_tag(tag: Tag) -> bool:
- """Matches `tag` if its class value is in `SEARCH_END_TAG_ATTRS` or the tag is table."""
- for attr in SEARCH_END_TAG_ATTRS:
- if attr in tag.get("class", ()):
- return True
-
- return tag.name == "table"
-
-
-def setup(bot: Bot) -> None:
- """Load the Doc cog."""
- bot.add_cog(Doc(bot))
diff --git a/bot/exts/info/doc/__init__.py b/bot/exts/info/doc/__init__.py
new file mode 100644
index 000000000..38a8975c0
--- /dev/null
+++ b/bot/exts/info/doc/__init__.py
@@ -0,0 +1,16 @@
+from bot.bot import Bot
+from ._redis_cache import DocRedisCache
+
+MAX_SIGNATURE_AMOUNT = 3
+PRIORITY_PACKAGES = (
+ "python",
+)
+NAMESPACE = "doc"
+
+doc_cache = DocRedisCache(namespace=NAMESPACE)
+
+
+def setup(bot: Bot) -> None:
+ """Load the Doc cog."""
+ from ._cog import DocCog
+ bot.add_cog(DocCog(bot))
diff --git a/bot/exts/info/doc/_batch_parser.py b/bot/exts/info/doc/_batch_parser.py
new file mode 100644
index 000000000..369bb462c
--- /dev/null
+++ b/bot/exts/info/doc/_batch_parser.py
@@ -0,0 +1,186 @@
+from __future__ import annotations
+
+import asyncio
+import collections
+import logging
+from collections import defaultdict
+from contextlib import suppress
+from operator import attrgetter
+from typing import Deque, Dict, List, NamedTuple, Optional, Union
+
+import discord
+from bs4 import BeautifulSoup
+
+import bot
+from bot.constants import Channels
+from bot.utils import scheduling
+from . import _cog, doc_cache
+from ._parsing import get_symbol_markdown
+
+log = logging.getLogger(__name__)
+
+
+class StaleInventoryNotifier:
+ """Handle sending notifications about stale inventories through `DocItem`s to dev log."""
+
+ def __init__(self):
+ self._init_task = bot.instance.loop.create_task(
+ self._init_channel(),
+ name="StaleInventoryNotifier channel init"
+ )
+ self._warned_urls = set()
+
+ async def _init_channel(self) -> None:
+ """Wait for guild and get channel."""
+ await bot.instance.wait_until_guild_available()
+ self._dev_log = bot.instance.get_channel(Channels.dev_log)
+
+ async def send_warning(self, doc_item: _cog.DocItem) -> None:
+ """Send a warning to dev log if one wasn't already sent for `item`'s url."""
+ if doc_item.url not in self._warned_urls:
+ self._warned_urls.add(doc_item.url)
+ await self._init_task
+ embed = discord.Embed(
+ description=f"Doc item `{doc_item.symbol_id=}` present in loaded documentation inventories "
+ f"not found on [site]({doc_item.url}), inventories may need to be refreshed."
+ )
+ await self._dev_log.send(embed=embed)
+
+
+class QueueItem(NamedTuple):
+ """Contains a `DocItem` and the `BeautifulSoup` object needed to parse it."""
+
+ doc_item: _cog.DocItem
+ soup: BeautifulSoup
+
+ def __eq__(self, other: Union[QueueItem, _cog.DocItem]):
+ if isinstance(other, _cog.DocItem):
+ return self.doc_item == other
+ return NamedTuple.__eq__(self, other)
+
+
+class ParseResultFuture(asyncio.Future):
+ """
+ Future with metadata for the parser class.
+
+ `user_requested` is set by the parser when a Future is requested by an user and moved to the front,
+ allowing the futures to only be waited for when clearing if they were user requested.
+ """
+
+ def __init__(self):
+ super().__init__()
+ self.user_requested = False
+
+
+class BatchParser:
+ """
+ Get the Markdown of all symbols on a page and send them to redis when a symbol is requested.
+
+ DocItems are added through the `add_item` method which adds them to the `_page_doc_items` dict.
+ `get_markdown` is used to fetch the Markdown; when this is used for the first time on a page,
+ all of the symbols are queued to be parsed to avoid multiple web requests to the same page.
+ """
+
+ def __init__(self):
+ self._queue: Deque[QueueItem] = collections.deque()
+ self._page_doc_items: Dict[str, List[_cog.DocItem]] = defaultdict(list)
+ self._item_futures: Dict[_cog.DocItem, ParseResultFuture] = defaultdict(ParseResultFuture)
+ self._parse_task = None
+
+ self.stale_inventory_notifier = StaleInventoryNotifier()
+
+ async def get_markdown(self, doc_item: _cog.DocItem) -> Optional[str]:
+ """
+ Get the result Markdown of `doc_item`.
+
+ If no symbols were fetched from `doc_item`s page before,
+ the HTML has to be fetched and then all items from the page are put into the parse queue.
+
+ Not safe to run while `self.clear` is running.
+ """
+ if doc_item not in self._item_futures and doc_item not in self._queue:
+ self._item_futures[doc_item].user_requested = True
+
+ async with bot.instance.http_session.get(doc_item.url) as response:
+ soup = await bot.instance.loop.run_in_executor(
+ None,
+ BeautifulSoup,
+ await response.text(encoding="utf8"),
+ "lxml",
+ )
+
+ self._queue.extendleft(QueueItem(item, soup) for item in self._page_doc_items[doc_item.url])
+ log.debug(f"Added items from {doc_item.url} to the parse queue.")
+
+ if self._parse_task is None:
+ self._parse_task = scheduling.create_task(self._parse_queue(), name="Queue parse")
+ else:
+ self._item_futures[doc_item].user_requested = True
+ with suppress(ValueError):
+ # If the item is not in the queue then the item is already parsed or is being parsed
+ self._move_to_front(doc_item)
+ return await self._item_futures[doc_item]
+
+ async def _parse_queue(self) -> None:
+ """
+ Parse all items from the queue, setting their result Markdown on the futures and sending them to redis.
+
+ The coroutine will run as long as the queue is not empty, resetting `self._parse_task` to None when finished.
+ """
+ log.trace("Starting queue parsing.")
+ try:
+ while self._queue:
+ item, soup = self._queue.pop()
+ markdown = None
+
+ if (future := self._item_futures[item]).done():
+ # Some items are present in the inventories multiple times under different symbol names,
+ # if we already parsed an equal item, we can just skip it.
+ continue
+
+ try:
+ markdown = await bot.instance.loop.run_in_executor(None, get_symbol_markdown, soup, item)
+ if markdown is not None:
+ await doc_cache.set(item, markdown)
+ else:
+ # Don't wait for this coro as the parsing doesn't depend on anything it does.
+ scheduling.create_task(
+ self.stale_inventory_notifier.send_warning(item), name="Stale inventory warning"
+ )
+ except Exception:
+ log.exception(f"Unexpected error when handling {item}")
+ future.set_result(markdown)
+ del self._item_futures[item]
+ await asyncio.sleep(0.1)
+ finally:
+ self._parse_task = None
+ log.trace("Finished parsing queue.")
+
+ def _move_to_front(self, item: Union[QueueItem, _cog.DocItem]) -> None:
+ """Move `item` to the front of the parse queue."""
+ # The parse queue stores soups along with the doc symbols in QueueItem objects,
+ # in case we're moving a DocItem we have to get the associated QueueItem first and then move it.
+ item_index = self._queue.index(item)
+ queue_item = self._queue[item_index]
+ del self._queue[item_index]
+
+ self._queue.append(queue_item)
+ log.trace(f"Moved {item} to the front of the queue.")
+
+ def add_item(self, doc_item: _cog.DocItem) -> None:
+ """Map a DocItem to its page so that the symbol will be parsed once the page is requested."""
+ self._page_doc_items[doc_item.url].append(doc_item)
+
+ async def clear(self) -> None:
+ """
+ Clear all internal symbol data.
+
+ Wait for all user-requested symbols to be parsed before clearing the parser.
+ """
+ for future in filter(attrgetter("user_requested"), self._item_futures.values()):
+ await future
+ if self._parse_task is not None:
+ self._parse_task.cancel()
+ self._queue.clear()
+ self._page_doc_items.clear()
+ self._item_futures.clear()
diff --git a/bot/exts/info/doc/_cog.py b/bot/exts/info/doc/_cog.py
new file mode 100644
index 000000000..2a8016fb8
--- /dev/null
+++ b/bot/exts/info/doc/_cog.py
@@ -0,0 +1,442 @@
+from __future__ import annotations
+
+import asyncio
+import logging
+import sys
+import textwrap
+from collections import defaultdict
+from contextlib import suppress
+from types import SimpleNamespace
+from typing import Dict, NamedTuple, Optional, Tuple, Union
+
+import aiohttp
+import discord
+from discord.ext import commands
+
+from bot.bot import Bot
+from bot.constants import MODERATION_ROLES, RedirectOutput
+from bot.converters import Inventory, PackageName, ValidURL, allowed_strings
+from bot.pagination import LinePaginator
+from bot.utils.lock import SharedEvent, lock
+from bot.utils.messages import send_denial, wait_for_deletion
+from bot.utils.scheduling import Scheduler
+from . import NAMESPACE, PRIORITY_PACKAGES, _batch_parser, doc_cache
+from ._inventory_parser import InventoryDict, fetch_inventory
+
+log = logging.getLogger(__name__)
+
+# symbols with a group contained here will get the group prefixed on duplicates
+FORCE_PREFIX_GROUPS = (
+ "2to3fixer",
+ "token",
+ "label",
+ "pdbcommand",
+ "term",
+)
+NOT_FOUND_DELETE_DELAY = RedirectOutput.delete_delay
+# Delay to wait before trying to reach a rescheduled inventory again, in minutes
+FETCH_RESCHEDULE_DELAY = SimpleNamespace(first=2, repeated=5)
+
+COMMAND_LOCK_SINGLETON = "inventory refresh"
+
+
+class DocItem(NamedTuple):
+ """Holds inventory symbol information."""
+
+ package: str # Name of the package name the symbol is from
+ group: str # Interpshinx "role" of the symbol, for example `label` or `method`
+ base_url: str # Absolute path to to which the relative path resolves, same for all items with the same package
+ relative_url_path: str # Relative path to the page where the symbol is located
+ symbol_id: str # Fragment id used to locate the symbol on the page
+
+ @property
+ def url(self) -> str:
+ """Return the absolute url to the symbol."""
+ return self.base_url + self.relative_url_path
+
+
+class DocCog(commands.Cog):
+ """A set of commands for querying & displaying documentation."""
+
+ def __init__(self, bot: Bot):
+ # Contains URLs to documentation home pages.
+ # Used to calculate inventory diffs on refreshes and to display all currently stored inventories.
+ self.base_urls = {}
+ self.bot = bot
+ self.doc_symbols: Dict[str, DocItem] = {} # Maps symbol names to objects containing their metadata.
+ self.item_fetcher = _batch_parser.BatchParser()
+ # Maps a conflicting symbol name to a list of the new, disambiguated names created from conflicts with the name.
+ self.renamed_symbols = defaultdict(list)
+
+ self.inventory_scheduler = Scheduler(self.__class__.__name__)
+
+ self.refresh_event = asyncio.Event()
+ self.refresh_event.set()
+ self.symbol_get_event = SharedEvent()
+
+ self.init_refresh_task = self.bot.loop.create_task(
+ self.init_refresh_inventory(),
+ name="Doc inventory init"
+ )
+
+ @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
+ async def init_refresh_inventory(self) -> None:
+ """Refresh documentation inventory on cog initialization."""
+ await self.bot.wait_until_guild_available()
+ await self.refresh_inventories()
+
+ def update_single(self, package_name: str, base_url: str, inventory: InventoryDict) -> None:
+ """
+ Build the inventory for a single package.
+
+ Where:
+ * `package_name` is the package name to use in logs and when qualifying symbols
+ * `base_url` is the root documentation URL for the specified package, used to build
+ absolute paths that link to specific symbols
+ * `package` is the content of a intersphinx inventory.
+ """
+ self.base_urls[package_name] = base_url
+
+ for group, items in inventory.items():
+ for symbol_name, relative_doc_url in items:
+
+ # e.g. get 'class' from 'py:class'
+ group_name = group.split(":")[1]
+ symbol_name = self.ensure_unique_symbol_name(
+ package_name,
+ group_name,
+ symbol_name,
+ )
+
+ relative_url_path, _, symbol_id = relative_doc_url.partition("#")
+ # Intern fields that have shared content so we're not storing unique strings for every object
+ doc_item = DocItem(
+ package_name,
+ sys.intern(group_name),
+ base_url,
+ sys.intern(relative_url_path),
+ symbol_id,
+ )
+ self.doc_symbols[symbol_name] = doc_item
+ self.item_fetcher.add_item(doc_item)
+
+ log.trace(f"Fetched inventory for {package_name}.")
+
+ async def update_or_reschedule_inventory(
+ self,
+ api_package_name: str,
+ base_url: str,
+ inventory_url: str,
+ ) -> None:
+ """
+ Update the cog's inventories, or reschedule this method to execute again if the remote inventory is unreachable.
+
+ The first attempt is rescheduled to execute in `FETCH_RESCHEDULE_DELAY.first` minutes, the subsequent attempts
+ in `FETCH_RESCHEDULE_DELAY.repeated` minutes.
+ """
+ package = await fetch_inventory(inventory_url)
+
+ if not package:
+ if api_package_name in self.inventory_scheduler:
+ self.inventory_scheduler.cancel(api_package_name)
+ delay = FETCH_RESCHEDULE_DELAY.repeated
+ else:
+ delay = FETCH_RESCHEDULE_DELAY.first
+ log.info(f"Failed to fetch inventory; attempting again in {delay} minutes.")
+ self.inventory_scheduler.schedule_later(
+ delay*60,
+ api_package_name,
+ self.update_or_reschedule_inventory(api_package_name, base_url, inventory_url),
+ )
+ else:
+ self.update_single(api_package_name, base_url, package)
+
+ def ensure_unique_symbol_name(self, package_name: str, group_name: str, symbol_name: str) -> str:
+ """
+ Ensure `symbol_name` doesn't overwrite an another symbol in `doc_symbols`.
+
+ For conflicts, rename either the current symbol or the existing symbol with which it conflicts.
+ Store the new name in `renamed_symbols` and return the name to use for the symbol.
+
+ If the existing symbol was renamed or there was no conflict, the returned name is equivalent to `symbol_name`.
+ """
+ if (item := self.doc_symbols.get(symbol_name)) is None:
+ return symbol_name # There's no conflict so it's fine to simply use the given symbol name.
+
+ def rename(prefix: str, *, rename_extant: bool = False) -> str:
+ new_name = f"{prefix}.{symbol_name}"
+ if new_name in self.doc_symbols:
+ # If there's still a conflict, qualify the name further.
+ if rename_extant:
+ new_name = f"{item.package}.{item.group}.{symbol_name}"
+ else:
+ new_name = f"{package_name}.{group_name}.{symbol_name}"
+
+ self.renamed_symbols[symbol_name].append(new_name)
+
+ if rename_extant:
+ # Instead of renaming the current symbol, rename the symbol with which it conflicts.
+ self.doc_symbols[new_name] = self.doc_symbols[symbol_name]
+ return symbol_name
+ else:
+ return new_name
+
+ # Certain groups are added as prefixes to disambiguate the symbols.
+ if group_name in FORCE_PREFIX_GROUPS:
+ return rename(group_name)
+
+ # The existing symbol with which the current symbol conflicts should have a group prefix.
+ # It currently doesn't have the group prefix because it's only added once there's a conflict.
+ elif item.group in FORCE_PREFIX_GROUPS:
+ return rename(item.group, rename_extant=True)
+
+ elif package_name in PRIORITY_PACKAGES:
+ return rename(item.package, rename_extant=True)
+
+ # If we can't specially handle the symbol through its group or package,
+ # fall back to prepending its package name to the front.
+ else:
+ return rename(package_name)
+
+ async def refresh_inventories(self) -> None:
+ """Refresh internal documentation inventories."""
+ self.refresh_event.clear()
+ await self.symbol_get_event.wait()
+ log.debug("Refreshing documentation inventory...")
+ self.inventory_scheduler.cancel_all()
+
+ self.base_urls.clear()
+ self.doc_symbols.clear()
+ self.renamed_symbols.clear()
+ await self.item_fetcher.clear()
+
+ coros = [
+ self.update_or_reschedule_inventory(
+ package["package"], package["base_url"], package["inventory_url"]
+ ) for package in await self.bot.api_client.get("bot/documentation-links")
+ ]
+ await asyncio.gather(*coros)
+ log.debug("Finished inventory refresh.")
+ self.refresh_event.set()
+
+ def get_symbol_item(self, symbol_name: str) -> Tuple[str, Optional[DocItem]]:
+ """
+ Get the `DocItem` and the symbol name used to fetch it from the `doc_symbols` dict.
+
+ If the doc item is not found directly from the passed in name and the name contains a space,
+ the first word of the name will be attempted to be used to get the item.
+ """
+ doc_item = self.doc_symbols.get(symbol_name)
+ if doc_item is None and " " in symbol_name:
+ symbol_name = symbol_name.split(" ", maxsplit=1)[0]
+ doc_item = self.doc_symbols.get(symbol_name)
+
+ return symbol_name, doc_item
+
+ async def get_symbol_markdown(self, doc_item: DocItem) -> str:
+ """
+ Get the Markdown from the symbol `doc_item` refers to.
+
+ First a redis lookup is attempted, if that fails the `item_fetcher`
+ is used to fetch the page and parse the HTML from it into Markdown.
+ """
+ markdown = await doc_cache.get(doc_item)
+
+ if markdown is None:
+ log.debug(f"Redis cache miss with {doc_item}.")
+ try:
+ markdown = await self.item_fetcher.get_markdown(doc_item)
+
+ except aiohttp.ClientError as e:
+ log.warning(f"A network error has occurred when requesting parsing of {doc_item}.", exc_info=e)
+ return "Unable to parse the requested symbol due to a network error."
+
+ except Exception:
+ log.exception(f"An unexpected error has occurred when requesting parsing of {doc_item}.")
+ return "Unable to parse the requested symbol due to an error."
+
+ if markdown is None:
+ return "Unable to parse the requested symbol."
+ return markdown
+
+ async def create_symbol_embed(self, symbol_name: str) -> Optional[discord.Embed]:
+ """
+ Attempt to scrape and fetch the data for the given `symbol_name`, and build an embed from its contents.
+
+ If the symbol is known, an Embed with documentation about it is returned.
+
+ First check the DocRedisCache before querying the cog's `BatchParser`.
+ """
+ log.trace(f"Building embed for symbol `{symbol_name}`")
+ if not self.refresh_event.is_set():
+ log.debug("Waiting for inventories to be refreshed before processing item.")
+ await self.refresh_event.wait()
+ # Ensure a refresh can't run in case of a context switch until the with block is exited
+ with self.symbol_get_event:
+ symbol_name, doc_item = self.get_symbol_item(symbol_name)
+ if doc_item is None:
+ log.debug("Symbol does not exist.")
+ return None
+
+ self.bot.stats.incr(f"doc_fetches.{doc_item.package}")
+
+ # Show all symbols with the same name that were renamed in the footer,
+ # with a max of 200 chars.
+ if symbol_name in self.renamed_symbols:
+ renamed_symbols = ", ".join(self.renamed_symbols[symbol_name])
+ footer_text = textwrap.shorten("Similar names: " + renamed_symbols, 200, placeholder=" ...")
+ else:
+ footer_text = ""
+
+ embed = discord.Embed(
+ title=discord.utils.escape_markdown(symbol_name),
+ url=f"{doc_item.url}#{doc_item.symbol_id}",
+ description=await self.get_symbol_markdown(doc_item)
+ )
+ embed.set_footer(text=footer_text)
+ return embed
+
+ @commands.group(name="docs", aliases=("doc", "d"), invoke_without_command=True)
+ async def docs_group(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None:
+ """Look up documentation for Python symbols."""
+ await self.get_command(ctx, symbol_name=symbol_name)
+
+ @docs_group.command(name="getdoc", aliases=("g",))
+ async def get_command(self, ctx: commands.Context, *, symbol_name: Optional[str]) -> None:
+ """
+ Return a documentation embed for a given symbol.
+
+ If no symbol is given, return a list of all available inventories.
+
+ Examples:
+ !docs
+ !docs aiohttp
+ !docs aiohttp.ClientSession
+ !docs getdoc aiohttp.ClientSession
+ """
+ if not symbol_name:
+ inventory_embed = discord.Embed(
+ title=f"All inventories (`{len(self.base_urls)}` total)",
+ colour=discord.Colour.blue()
+ )
+
+ lines = sorted(f"• [`{name}`]({url})" for name, url in self.base_urls.items())
+ if self.base_urls:
+ await LinePaginator.paginate(lines, ctx, inventory_embed, max_size=400, empty=False)
+
+ else:
+ inventory_embed.description = "Hmmm, seems like there's nothing here yet."
+ await ctx.send(embed=inventory_embed)
+
+ else:
+ symbol = symbol_name.strip("`")
+ async with ctx.typing():
+ doc_embed = await self.create_symbol_embed(symbol)
+
+ if doc_embed is None:
+ error_message = await send_denial(ctx, "No documentation found for the requested symbol.")
+ await wait_for_deletion(error_message, (ctx.author.id,), timeout=NOT_FOUND_DELETE_DELAY)
+ with suppress(discord.NotFound):
+ await ctx.message.delete()
+ with suppress(discord.NotFound):
+ await error_message.delete()
+ else:
+ msg = await ctx.send(embed=doc_embed)
+ await wait_for_deletion(msg, (ctx.author.id,))
+
+ @docs_group.command(name="setdoc", aliases=("s",))
+ @commands.has_any_role(*MODERATION_ROLES)
+ @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
+ async def set_command(
+ self,
+ ctx: commands.Context,
+ package_name: PackageName,
+ base_url: ValidURL,
+ inventory: Inventory,
+ ) -> None:
+ """
+ Adds a new documentation metadata object to the site's database.
+
+ The database will update the object, should an existing item with the specified `package_name` already exist.
+
+ Example:
+ !docs setdoc \
+ python \
+ https://docs.python.org/3/ \
+ https://docs.python.org/3/objects.inv
+ """
+ if not base_url.endswith("/"):
+ raise commands.BadArgument("The base url must end with a slash.")
+ inventory_url, inventory_dict = inventory
+ body = {
+ "package": package_name,
+ "base_url": base_url,
+ "inventory_url": inventory_url
+ }
+ await self.bot.api_client.post("bot/documentation-links", json=body)
+
+ log.info(
+ f"User @{ctx.author} ({ctx.author.id}) added a new documentation package:\n"
+ + "\n".join(f"{key}: {value}" for key, value in body.items())
+ )
+
+ self.update_single(package_name, base_url, inventory_dict)
+ await ctx.send(f"Added the package `{package_name}` to the database and updated the inventories.")
+
+ @docs_group.command(name="deletedoc", aliases=("removedoc", "rm", "d"))
+ @commands.has_any_role(*MODERATION_ROLES)
+ @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
+ async def delete_command(self, ctx: commands.Context, package_name: PackageName) -> None:
+ """
+ Removes the specified package from the database.
+
+ Example:
+ !docs deletedoc aiohttp
+ """
+ await self.bot.api_client.delete(f"bot/documentation-links/{package_name}")
+
+ async with ctx.typing():
+ await self.refresh_inventories()
+ await doc_cache.delete(package_name)
+ await ctx.send(f"Successfully deleted `{package_name}` and refreshed the inventories.")
+
+ @docs_group.command(name="refreshdoc", aliases=("rfsh", "r"))
+ @commands.has_any_role(*MODERATION_ROLES)
+ @lock(NAMESPACE, COMMAND_LOCK_SINGLETON, raise_error=True)
+ async def refresh_command(self, ctx: commands.Context) -> None:
+ """Refresh inventories and show the difference."""
+ old_inventories = set(self.base_urls)
+ with ctx.typing():
+ await self.refresh_inventories()
+ new_inventories = set(self.base_urls)
+
+ if added := ", ".join(new_inventories - old_inventories):
+ added = "+ " + added
+
+ if removed := ", ".join(old_inventories - new_inventories):
+ removed = "- " + removed
+
+ embed = discord.Embed(
+ title="Inventories refreshed",
+ description=f"```diff\n{added}\n{removed}```" if added or removed else ""
+ )
+ await ctx.send(embed=embed)
+
+ @docs_group.command(name="cleardoccache", aliases=("deletedoccache",))
+ @commands.has_any_role(*MODERATION_ROLES)
+ async def clear_cache_command(
+ self,
+ ctx: commands.Context,
+ package_name: Union[PackageName, allowed_strings("*")] # noqa: F722
+ ) -> None:
+ """Clear the persistent redis cache for `package`."""
+ if await doc_cache.delete(package_name):
+ await ctx.send(f"Successfully cleared the cache for `{package_name}`.")
+ else:
+ await ctx.send("No keys matching the package found.")
+
+ def cog_unload(self) -> None:
+ """Clear scheduled inventories, queued symbols and cleanup task on cog unload."""
+ self.inventory_scheduler.cancel_all()
+ self.init_refresh_task.cancel()
+ asyncio.create_task(self.item_fetcher.clear(), name="DocCog.item_fetcher unload clear")
diff --git a/bot/exts/info/doc/_html.py b/bot/exts/info/doc/_html.py
new file mode 100644
index 000000000..94efd81b7
--- /dev/null
+++ b/bot/exts/info/doc/_html.py
@@ -0,0 +1,136 @@
+import logging
+import re
+from functools import partial
+from typing import Callable, Container, Iterable, List, Union
+
+from bs4 import BeautifulSoup
+from bs4.element import NavigableString, PageElement, SoupStrainer, Tag
+
+from . import MAX_SIGNATURE_AMOUNT
+
+log = logging.getLogger(__name__)
+
+_UNWANTED_SIGNATURE_SYMBOLS_RE = re.compile(r"\[source]|\\\\|¶")
+_SEARCH_END_TAG_ATTRS = (
+ "data",
+ "function",
+ "class",
+ "exception",
+ "seealso",
+ "section",
+ "rubric",
+ "sphinxsidebar",
+)
+
+
+class Strainer(SoupStrainer):
+ """Subclass of SoupStrainer to allow matching of both `Tag`s and `NavigableString`s."""
+
+ def __init__(self, *, include_strings: bool, **kwargs):
+ self.include_strings = include_strings
+ passed_text = kwargs.pop("text", None)
+ if passed_text is not None:
+ log.warning("`text` is not a supported kwarg in the custom strainer.")
+ super().__init__(**kwargs)
+
+ Markup = Union[PageElement, List["Markup"]]
+
+ def search(self, markup: Markup) -> Union[PageElement, str]:
+ """Extend default SoupStrainer behaviour to allow matching both `Tag`s` and `NavigableString`s."""
+ if isinstance(markup, str):
+ # Let everything through the text filter if we're including strings and tags.
+ if not self.name and not self.attrs and self.include_strings:
+ return markup
+ else:
+ return super().search(markup)
+
+
+def _find_elements_until_tag(
+ start_element: PageElement,
+ end_tag_filter: Union[Container[str], Callable[[Tag], bool]],
+ *,
+ func: Callable,
+ include_strings: bool = False,
+ limit: int = None,
+) -> List[Union[Tag, NavigableString]]:
+ """
+ Get all elements up to `limit` or until a tag matching `end_tag_filter` is found.
+
+ `end_tag_filter` can be either a container of string names to check against,
+ or a filtering callable that's applied to tags.
+
+ When `include_strings` is True, `NavigableString`s from the document will be included in the result along `Tag`s.
+
+ `func` takes in a BeautifulSoup unbound method for finding multiple elements, such as `BeautifulSoup.find_all`.
+ The method is then iterated over and all elements until the matching tag or the limit are added to the return list.
+ """
+ use_container_filter = not callable(end_tag_filter)
+ elements = []
+
+ for element in func(start_element, name=Strainer(include_strings=include_strings), limit=limit):
+ if isinstance(element, Tag):
+ if use_container_filter:
+ if element.name in end_tag_filter:
+ break
+ elif end_tag_filter(element):
+ break
+ elements.append(element)
+
+ return elements
+
+
+_find_next_children_until_tag = partial(_find_elements_until_tag, func=partial(BeautifulSoup.find_all, recursive=False))
+_find_recursive_children_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_all)
+_find_next_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_next_siblings)
+_find_previous_siblings_until_tag = partial(_find_elements_until_tag, func=BeautifulSoup.find_previous_siblings)
+
+
+def _class_filter_factory(class_names: Iterable[str]) -> Callable[[Tag], bool]:
+ """Create callable that returns True when the passed in tag's class is in `class_names` or when it's a table."""
+ def match_tag(tag: Tag) -> bool:
+ for attr in class_names:
+ if attr in tag.get("class", ()):
+ return True
+ return tag.name == "table"
+
+ return match_tag
+
+
+def get_general_description(start_element: Tag) -> List[Union[Tag, NavigableString]]:
+ """
+ Get page content to a table or a tag with its class in `SEARCH_END_TAG_ATTRS`.
+
+ A headerlink tag is attempted to be found to skip repeating the symbol information in the description.
+ If it's found it's used as the tag to start the search from instead of the `start_element`.
+ """
+ child_tags = _find_recursive_children_until_tag(start_element, _class_filter_factory(["section"]), limit=100)
+ header = next(filter(_class_filter_factory(["headerlink"]), child_tags), None)
+ start_tag = header.parent if header is not None else start_element
+ return _find_next_siblings_until_tag(start_tag, _class_filter_factory(_SEARCH_END_TAG_ATTRS), include_strings=True)
+
+
+def get_dd_description(symbol: PageElement) -> List[Union[Tag, NavigableString]]:
+ """Get the contents of the next dd tag, up to a dt or a dl tag."""
+ description_tag = symbol.find_next("dd")
+ return _find_next_children_until_tag(description_tag, ("dt", "dl"), include_strings=True)
+
+
+def get_signatures(start_signature: PageElement) -> List[str]:
+ """
+ Collect up to `_MAX_SIGNATURE_AMOUNT` signatures from dt tags around the `start_signature` dt tag.
+
+ First the signatures under the `start_signature` are included;
+ if less than 2 are found, tags above the start signature are added to the result if any are present.
+ """
+ signatures = []
+ for element in (
+ *reversed(_find_previous_siblings_until_tag(start_signature, ("dd",), limit=2)),
+ start_signature,
+ *_find_next_siblings_until_tag(start_signature, ("dd",), limit=2),
+ )[-MAX_SIGNATURE_AMOUNT:]:
+ signature = _UNWANTED_SIGNATURE_SYMBOLS_RE.sub("", element.text)
+
+ if signature:
+ signatures.append(signature)
+
+ return signatures
diff --git a/bot/exts/info/doc/_inventory_parser.py b/bot/exts/info/doc/_inventory_parser.py
new file mode 100644
index 000000000..80d5841a0
--- /dev/null
+++ b/bot/exts/info/doc/_inventory_parser.py
@@ -0,0 +1,126 @@
+import logging
+import re
+import zlib
+from collections import defaultdict
+from typing import AsyncIterator, DefaultDict, List, Optional, Tuple
+
+import aiohttp
+
+import bot
+
+log = logging.getLogger(__name__)
+
+FAILED_REQUEST_ATTEMPTS = 3
+_V2_LINE_RE = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+?(\S*)\s+(.*)')
+
+InventoryDict = DefaultDict[str, List[Tuple[str, str]]]
+
+
+class ZlibStreamReader:
+ """Class used for decoding zlib data of a stream line by line."""
+
+ READ_CHUNK_SIZE = 16 * 1024
+
+ def __init__(self, stream: aiohttp.StreamReader) -> None:
+ self.stream = stream
+
+ async def _read_compressed_chunks(self) -> AsyncIterator[bytes]:
+ """Read zlib data in `READ_CHUNK_SIZE` sized chunks and decompress."""
+ decompressor = zlib.decompressobj()
+ async for chunk in self.stream.iter_chunked(self.READ_CHUNK_SIZE):
+ yield decompressor.decompress(chunk)
+
+ yield decompressor.flush()
+
+ async def __aiter__(self) -> AsyncIterator[str]:
+ """Yield lines of decompressed text."""
+ buf = b''
+ async for chunk in self._read_compressed_chunks():
+ buf += chunk
+ pos = buf.find(b'\n')
+ while pos != -1:
+ yield buf[:pos].decode()
+ buf = buf[pos + 1:]
+ pos = buf.find(b'\n')
+
+
+async def _load_v1(stream: aiohttp.StreamReader) -> InventoryDict:
+ invdata = defaultdict(list)
+
+ async for line in stream:
+ name, type_, location = line.decode().rstrip().split(maxsplit=2)
+ # version 1 did not add anchors to the location
+ if type_ == "mod":
+ type_ = "py:module"
+ location += "#module-" + name
+ else:
+ type_ = "py:" + type_
+ location += "#" + name
+ invdata[type_].append((name, location))
+ return invdata
+
+
+async def _load_v2(stream: aiohttp.StreamReader) -> InventoryDict:
+ invdata = defaultdict(list)
+
+ async for line in ZlibStreamReader(stream):
+ m = _V2_LINE_RE.match(line.rstrip())
+ name, type_, _prio, location, _dispname = m.groups() # ignore the parsed items we don't need
+ if location.endswith("$"):
+ location = location[:-1] + name
+
+ invdata[type_].append((name, location))
+ return invdata
+
+
+async def _fetch_inventory(url: str) -> InventoryDict:
+ """Fetch, parse and return an intersphinx inventory file from an url."""
+ timeout = aiohttp.ClientTimeout(sock_connect=5, sock_read=5)
+ async with bot.instance.http_session.get(url, timeout=timeout, raise_for_status=True) as response:
+ stream = response.content
+
+ inventory_header = (await stream.readline()).decode().rstrip()
+ inventory_version = int(inventory_header[-1:])
+ await stream.readline() # skip project name
+ await stream.readline() # skip project version
+
+ if inventory_version == 1:
+ return await _load_v1(stream)
+
+ elif inventory_version == 2:
+ if b"zlib" not in await stream.readline():
+ raise ValueError(f"Invalid inventory file at url {url}.")
+ return await _load_v2(stream)
+
+ raise ValueError(f"Invalid inventory file at url {url}.")
+
+
+async def fetch_inventory(url: str) -> Optional[InventoryDict]:
+ """
+ Get an inventory dict from `url`, retrying `FAILED_REQUEST_ATTEMPTS` times on errors.
+
+ `url` should point at a valid sphinx objects.inv inventory file, which will be parsed into the
+ inventory dict in the format of {"domain:role": [("symbol_name", "relative_url_to_symbol"), ...], ...}
+ """
+ for attempt in range(1, FAILED_REQUEST_ATTEMPTS+1):
+ try:
+ inventory = await _fetch_inventory(url)
+ except aiohttp.ClientConnectorError:
+ log.warning(
+ f"Failed to connect to inventory url at {url}; "
+ f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})."
+ )
+ except aiohttp.ClientError:
+ log.error(
+ f"Failed to get inventory from {url}; "
+ f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})."
+ )
+ except Exception:
+ log.exception(
+ f"An unexpected error has occurred during fetching of {url}; "
+ f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})."
+ )
+ else:
+ return inventory
+
+ return None
diff --git a/bot/exts/info/doc/_markdown.py b/bot/exts/info/doc/_markdown.py
new file mode 100644
index 000000000..1b7d8232b
--- /dev/null
+++ b/bot/exts/info/doc/_markdown.py
@@ -0,0 +1,58 @@
+from urllib.parse import urljoin
+
+from bs4.element import PageElement
+from markdownify import MarkdownConverter
+
+
+class DocMarkdownConverter(MarkdownConverter):
+ """Subclass markdownify's MarkdownCoverter to provide custom conversion methods."""
+
+ def __init__(self, *, page_url: str, **options):
+ super().__init__(**options)
+ self.page_url = page_url
+
+ def convert_li(self, el: PageElement, text: str, convert_as_inline: bool) -> str:
+ """Fix markdownify's erroneous indexing in ol tags."""
+ parent = el.parent
+ if parent is not None and parent.name == "ol":
+ li_tags = parent.find_all("li")
+ bullet = f"{li_tags.index(el)+1}."
+ else:
+ depth = -1
+ while el:
+ if el.name == "ul":
+ depth += 1
+ el = el.parent
+ bullets = self.options["bullets"]
+ bullet = bullets[depth % len(bullets)]
+ return f"{bullet} {text}\n"
+
+ def convert_hn(self, _n: int, el: PageElement, text: str, convert_as_inline: bool) -> str:
+ """Convert h tags to bold text with ** instead of adding #."""
+ if convert_as_inline:
+ return text
+ return f"**{text}**\n\n"
+
+ def convert_code(self, el: PageElement, text: str, convert_as_inline: bool) -> str:
+ """Undo `markdownify`s underscore escaping."""
+ return f"`{text}`".replace("\\", "")
+
+ def convert_pre(self, el: PageElement, text: str, convert_as_inline: bool) -> str:
+ """Wrap any codeblocks in `py` for syntax highlighting."""
+ code = "".join(el.strings)
+ return f"```py\n{code}```"
+
+ def convert_a(self, el: PageElement, text: str, convert_as_inline: bool) -> str:
+ """Resolve relative URLs to `self.page_url`."""
+ el["href"] = urljoin(self.page_url, el["href"])
+ return super().convert_a(el, text, convert_as_inline)
+
+ def convert_p(self, el: PageElement, text: str, convert_as_inline: bool) -> str:
+ """Include only one newline instead of two when the parent is a li tag."""
+ if convert_as_inline:
+ return text
+
+ parent = el.parent
+ if parent is not None and parent.name == "li":
+ return f"{text}\n"
+ return super().convert_p(el, text, convert_as_inline)
diff --git a/bot/exts/info/doc/_parsing.py b/bot/exts/info/doc/_parsing.py
new file mode 100644
index 000000000..bf840b96f
--- /dev/null
+++ b/bot/exts/info/doc/_parsing.py
@@ -0,0 +1,256 @@
+from __future__ import annotations
+
+import logging
+import re
+import string
+import textwrap
+from collections import namedtuple
+from typing import Collection, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union
+
+from bs4 import BeautifulSoup
+from bs4.element import NavigableString, Tag
+
+from bot.utils.helpers import find_nth_occurrence
+from . import MAX_SIGNATURE_AMOUNT
+from ._html import get_dd_description, get_general_description, get_signatures
+from ._markdown import DocMarkdownConverter
+if TYPE_CHECKING:
+ from ._cog import DocItem
+
+log = logging.getLogger(__name__)
+
+_WHITESPACE_AFTER_NEWLINES_RE = re.compile(r"(?<=\n\n)(\s+)")
+_PARAMETERS_RE = re.compile(r"\((.+)\)")
+
+_NO_SIGNATURE_GROUPS = {
+ "attribute",
+ "envvar",
+ "setting",
+ "tempaltefilter",
+ "templatetag",
+ "term",
+}
+_EMBED_CODE_BLOCK_LINE_LENGTH = 61
+# _MAX_SIGNATURE_AMOUNT code block wrapped lines with py syntax highlight
+_MAX_SIGNATURES_LENGTH = (_EMBED_CODE_BLOCK_LINE_LENGTH + 8) * MAX_SIGNATURE_AMOUNT
+# Maximum embed description length - signatures on top
+_MAX_DESCRIPTION_LENGTH = 2048 - _MAX_SIGNATURES_LENGTH
+_TRUNCATE_STRIP_CHARACTERS = "!?:;." + string.whitespace
+
+BracketPair = namedtuple("BracketPair", ["opening_bracket", "closing_bracket"])
+_BRACKET_PAIRS = {
+ "{": BracketPair("{", "}"),
+ "(": BracketPair("(", ")"),
+ "[": BracketPair("[", "]"),
+ "<": BracketPair("<", ">"),
+}
+
+
+def _split_parameters(parameters_string: str) -> Iterator[str]:
+ """
+ Split parameters of a signature into individual parameter strings on commas.
+
+ Long string literals are not accounted for.
+ """
+ last_split = 0
+ depth = 0
+ current_search: Optional[BracketPair] = None
+
+ enumerated_string = enumerate(parameters_string)
+ for index, character in enumerated_string:
+ if character in {"'", '"'}:
+ # Skip everything inside of strings, regardless of the depth.
+ quote_character = character # The closing quote must equal the opening quote.
+ preceding_backslashes = 0
+ for _, character in enumerated_string:
+ # If an odd number of backslashes precedes the quote, it was escaped.
+ if character == quote_character and not preceding_backslashes % 2:
+ break
+ if character == "\\":
+ preceding_backslashes += 1
+ else:
+ preceding_backslashes = 0
+
+ elif current_search is None:
+ if (current_search := _BRACKET_PAIRS.get(character)) is not None:
+ depth = 1
+ elif character == ",":
+ yield parameters_string[last_split:index]
+ last_split = index + 1
+
+ else:
+ if character == current_search.opening_bracket:
+ depth += 1
+
+ elif character == current_search.closing_bracket:
+ depth -= 1
+ if depth == 0:
+ current_search = None
+
+ yield parameters_string[last_split:]
+
+
+def _truncate_signatures(signatures: Collection[str]) -> Union[List[str], Collection[str]]:
+ """
+ Truncate passed signatures to not exceed `_MAX_SIGNATURES_LENGTH`.
+
+ If the signatures need to be truncated, parameters are collapsed until they fit withing the limit.
+ Individual signatures can consist of max 1, 2, ..., `_MAX_SIGNATURE_AMOUNT` lines of text,
+ inversely proportional to the amount of signatures.
+ A maximum of `_MAX_SIGNATURE_AMOUNT` signatures is assumed to be passed.
+ """
+ if sum(len(signature) for signature in signatures) <= _MAX_SIGNATURES_LENGTH:
+ # Total length of signatures is under the length limit; no truncation needed.
+ return signatures
+
+ max_signature_length = _EMBED_CODE_BLOCK_LINE_LENGTH * (MAX_SIGNATURE_AMOUNT + 1 - len(signatures))
+ formatted_signatures = []
+ for signature in signatures:
+ signature = signature.strip()
+ if len(signature) > max_signature_length:
+ if (parameters_match := _PARAMETERS_RE.search(signature)) is None:
+ # The signature has no parameters or the regex failed; perform a simple truncation of the text.
+ formatted_signatures.append(textwrap.shorten(signature, max_signature_length, placeholder="..."))
+ continue
+
+ truncated_signature = []
+ parameters_string = parameters_match[1]
+ running_length = len(signature) - len(parameters_string)
+ for parameter in _split_parameters(parameters_string):
+ # Check if including this parameter would still be within the maximum length.
+ if (len(parameter) + running_length) <= max_signature_length - 5: # account for comma and placeholder
+ truncated_signature.append(parameter)
+ running_length += len(parameter) + 1
+ else:
+ # There's no more room for this parameter. Truncate the parameter list and put it in the signature.
+ truncated_signature.append(" ...")
+ formatted_signatures.append(signature.replace(parameters_string, ",".join(truncated_signature)))
+ break
+ else:
+ # The current signature is under the length limit; no truncation needed.
+ formatted_signatures.append(signature)
+
+ return formatted_signatures
+
+
+def _get_truncated_description(
+ elements: Iterable[Union[Tag, NavigableString]],
+ markdown_converter: DocMarkdownConverter,
+ max_length: int,
+ max_lines: int,
+) -> str:
+ """
+ Truncate the Markdown from `elements` to be at most `max_length` characters when rendered or `max_lines` newlines.
+
+ `max_length` limits the length of the rendered characters in the string,
+ with the real string length limited to `_MAX_DESCRIPTION_LENGTH` to accommodate discord length limits.
+ """
+ result = ""
+ markdown_element_ends = [] # Stores indices into `result` which point to the end boundary of each Markdown element.
+ rendered_length = 0
+
+ tag_end_index = 0
+ for element in elements:
+ is_tag = isinstance(element, Tag)
+ element_length = len(element.text) if is_tag else len(element)
+
+ if rendered_length + element_length < max_length:
+ if is_tag:
+ element_markdown = markdown_converter.process_tag(element, convert_as_inline=False)
+ else:
+ element_markdown = markdown_converter.process_text(element)
+
+ rendered_length += element_length
+ tag_end_index += len(element_markdown)
+
+ if not element_markdown.isspace():
+ markdown_element_ends.append(tag_end_index)
+ result += element_markdown
+ else:
+ break
+
+ if not markdown_element_ends:
+ return ""
+
+ # Determine the "hard" truncation index. Account for the ellipsis placeholder for the max length.
+ newline_truncate_index = find_nth_occurrence(result, "\n", max_lines)
+ if newline_truncate_index is not None and newline_truncate_index < _MAX_DESCRIPTION_LENGTH - 3:
+ # Truncate based on maximum lines if there are more than the maximum number of lines.
+ truncate_index = newline_truncate_index
+ else:
+ # There are less than the maximum number of lines; truncate based on the max char length.
+ truncate_index = _MAX_DESCRIPTION_LENGTH - 3
+
+ # Nothing needs to be truncated if the last element ends before the truncation index.
+ if truncate_index >= markdown_element_ends[-1]:
+ return result
+
+ # Determine the actual truncation index.
+ possible_truncation_indices = [cut for cut in markdown_element_ends if cut < truncate_index]
+ if not possible_truncation_indices:
+ # In case there is no Markdown element ending before the truncation index, try to find a good cutoff point.
+ force_truncated = result[:truncate_index]
+ # If there is an incomplete codeblock, cut it out.
+ if force_truncated.count("```") % 2:
+ force_truncated = force_truncated[:force_truncated.rfind("```")]
+ # Search for substrings to truncate at, with decreasing desirability.
+ for string_ in ("\n\n", "\n", ". ", ", ", ",", " "):
+ cutoff = force_truncated.rfind(string_)
+
+ if cutoff != -1:
+ truncated_result = force_truncated[:cutoff]
+ break
+ else:
+ truncated_result = force_truncated
+
+ else:
+ # Truncate at the last Markdown element that comes before the truncation index.
+ markdown_truncate_index = possible_truncation_indices[-1]
+ truncated_result = result[:markdown_truncate_index]
+
+ return truncated_result.strip(_TRUNCATE_STRIP_CHARACTERS) + "..."
+
+
+def _create_markdown(signatures: Optional[List[str]], description: Iterable[Tag], url: str) -> str:
+ """
+ Create a Markdown string with the signatures at the top, and the converted html description below them.
+
+ The signatures are wrapped in python codeblocks, separated from the description by a newline.
+ The result Markdown string is max 750 rendered characters for the description with signatures at the start.
+ """
+ description = _get_truncated_description(
+ description,
+ markdown_converter=DocMarkdownConverter(bullets="•", page_url=url),
+ max_length=750,
+ max_lines=13
+ )
+ description = _WHITESPACE_AFTER_NEWLINES_RE.sub("", description)
+ if signatures is not None:
+ signature = "".join(f"```py\n{signature}```" for signature in _truncate_signatures(signatures))
+ return f"{signature}\n{description}"
+ else:
+ return description
+
+
+def get_symbol_markdown(soup: BeautifulSoup, symbol_data: DocItem) -> Optional[str]:
+ """
+ Return parsed Markdown of the passed item using the passed in soup, truncated to fit within a discord message.
+
+ The method of parsing and what information gets included depends on the symbol's group.
+ """
+ symbol_heading = soup.find(id=symbol_data.symbol_id)
+ if symbol_heading is None:
+ return None
+ signature = None
+ # Modules, doc pages and labels don't point to description list tags but to tags like divs,
+ # no special parsing can be done so we only try to include what's under them.
+ if symbol_heading.name != "dt":
+ description = get_general_description(symbol_heading)
+
+ elif symbol_data.group in _NO_SIGNATURE_GROUPS:
+ description = get_dd_description(symbol_heading)
+
+ else:
+ signature = get_signatures(symbol_heading)
+ description = get_dd_description(symbol_heading)
+ return _create_markdown(signature, description, symbol_data.url).replace("¶", "").strip()
diff --git a/bot/exts/info/doc/_redis_cache.py b/bot/exts/info/doc/_redis_cache.py
new file mode 100644
index 000000000..ad764816f
--- /dev/null
+++ b/bot/exts/info/doc/_redis_cache.py
@@ -0,0 +1,70 @@
+from __future__ import annotations
+
+import datetime
+from typing import Optional, TYPE_CHECKING
+
+from async_rediscache.types.base import RedisObject, namespace_lock
+if TYPE_CHECKING:
+ from ._cog import DocItem
+
+WEEK_SECONDS = datetime.timedelta(weeks=1).total_seconds()
+
+
+class DocRedisCache(RedisObject):
+ """Interface for redis functionality needed by the Doc cog."""
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._set_expires = set()
+
+ @namespace_lock
+ async def set(self, item: DocItem, value: str) -> None:
+ """
+ Set the Markdown `value` for the symbol `item`.
+
+ All keys from a single page are stored together, expiring a week after the first set.
+ """
+ url_key = remove_suffix(item.relative_url_path, ".html")
+ redis_key = f"{self.namespace}:{item.package}:{url_key}"
+ needs_expire = False
+
+ with await self._get_pool_connection() as connection:
+ if redis_key not in self._set_expires:
+ # An expire is only set if the key didn't exist before.
+ # If this is the first time setting values for this key check if it exists and add it to
+ # `_set_expires` to prevent redundant checks for subsequent uses with items from the same page.
+ self._set_expires.add(redis_key)
+ needs_expire = not await connection.exists(redis_key)
+
+ await connection.hset(redis_key, item.symbol_id, value)
+ if needs_expire:
+ await connection.expire(redis_key, WEEK_SECONDS)
+
+ @namespace_lock
+ async def get(self, item: DocItem) -> Optional[str]:
+ """Return the Markdown content of the symbol `item` if it exists."""
+ url_key = remove_suffix(item.relative_url_path, ".html")
+
+ with await self._get_pool_connection() as connection:
+ return await connection.hget(f"{self.namespace}:{item.package}:{url_key}", item.symbol_id, encoding="utf8")
+
+ @namespace_lock
+ async def delete(self, package: str) -> bool:
+ """Remove all values for `package`; return True if at least one key was deleted, False otherwise."""
+ with await self._get_pool_connection() as connection:
+ package_keys = [
+ package_key async for package_key in connection.iscan(match=f"{self.namespace}:{package}:*")
+ ]
+ if package_keys:
+ await connection.delete(*package_keys)
+ return True
+ return False
+
+
+def remove_suffix(string: str, suffix: str) -> str:
+ """Remove `suffix` from end of `string`."""
+ # TODO replace usages with str.removesuffix on 3.9
+ if string.endswith(suffix):
+ return string[:-len(suffix)]
+ else:
+ return string
diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py
index 256be2161..226e4992e 100644
--- a/bot/exts/info/information.py
+++ b/bot/exts/info/information.py
@@ -6,7 +6,7 @@ from collections import defaultdict
from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple, Union
import fuzzywuzzy
-from discord import Colour, Embed, Guild, Message, Role
+from discord import AllowedMentions, Colour, Embed, Guild, Message, Role
from discord.ext.commands import BucketType, Cog, Context, Paginator, command, group, has_any_role
from bot import constants
@@ -17,7 +17,7 @@ from bot.decorators import in_whitelist
from bot.pagination import LinePaginator
from bot.utils.channel import is_mod_channel, is_staff_channel
from bot.utils.checks import cooldown_with_role_bypass, has_no_roles_check, in_whitelist_check
-from bot.utils.time import time_since
+from bot.utils.time import humanize_delta, time_since
log = logging.getLogger(__name__)
@@ -52,7 +52,7 @@ class Information(Cog):
)
return {role.name.title(): len(role.members) for role in roles}
- def get_extended_server_info(self) -> str:
+ def get_extended_server_info(self, ctx: Context) -> str:
"""Return additional server info only visible in moderation channels."""
talentpool_info = ""
if cog := self.bot.get_cog("Talentpool"):
@@ -64,9 +64,10 @@ class Information(Cog):
defcon_info = ""
if cog := self.bot.get_cog("Defcon"):
- defcon_status = "Enabled" if cog.enabled else "Disabled"
- defcon_days = cog.days.days if cog.enabled else "-"
- defcon_info = f"Defcon status: {defcon_status}\nDefcon days: {defcon_days}\n"
+ threshold = humanize_delta(cog.threshold) if cog.threshold else "-"
+ defcon_info = f"Defcon threshold: {threshold}\n"
+
+ verification = f"Verification level: {ctx.guild.verification_level.name}\n"
python_general = self.bot.get_channel(constants.Channels.python_general)
@@ -74,6 +75,7 @@ class Information(Cog):
{talentpool_info}\
{bb_info}\
{defcon_info}\
+ {verification}\
{python_general.mention} cooldown: {python_general.slowmode_delay}s
""")
@@ -198,11 +200,11 @@ class Information(Cog):
# Additional info if ran in moderation channels
if is_mod_channel(ctx.channel):
- embed.add_field(name="Moderation:", value=self.get_extended_server_info())
+ embed.add_field(name="Moderation:", value=self.get_extended_server_info(ctx))
await ctx.send(embed=embed)
- @command(name="user", aliases=["user_info", "member", "member_info"])
+ @command(name="user", aliases=["user_info", "member", "member_info", "u"])
async def user_info(self, ctx: Context, user: FetchedMember = None) -> None:
"""Returns info about a user."""
if user is None:
@@ -285,7 +287,7 @@ class Information(Cog):
embed.add_field(name=field_name, value=field_content, inline=False)
embed.set_thumbnail(url=user.avatar_url_as(static_format="png"))
- embed.colour = user.top_role.colour if roles else Colour.blurple()
+ embed.colour = user.colour if user.colour != Colour.default() else Colour.blurple()
return embed
@@ -448,9 +450,9 @@ class Information(Cog):
def add_content(title: str, content: str) -> None:
paginator.add_line(f'== {title} ==\n')
- # replace backticks as it breaks out of code blocks. Spaces seemed to be the most reasonable solution.
- # we hope it's not close to 2000
- paginator.add_line(content.replace('```', '`` `'))
+ # Replace backticks as it breaks out of code blocks.
+ # An invisible character seemed to be the most reasonable solution. We hope it's not close to 2000.
+ paginator.add_line(content.replace('`', '`\u200b'))
paginator.close_page()
if message.content:
@@ -469,7 +471,7 @@ class Information(Cog):
add_content(title, transformer(item))
for page in paginator.pages:
- await ctx.send(page)
+ await ctx.send(page, allowed_mentions=AllowedMentions.none())
@raw.command()
async def json(self, ctx: Context, message: Message) -> None:
diff --git a/bot/exts/info/pypi.py b/bot/exts/info/pypi.py
index 3e326e8bb..2e42e7d6b 100644
--- a/bot/exts/info/pypi.py
+++ b/bot/exts/info/pypi.py
@@ -1,19 +1,23 @@
import itertools
import logging
import random
+import re
from discord import Embed
from discord.ext.commands import Cog, Context, command
from discord.utils import escape_markdown
from bot.bot import Bot
-from bot.constants import Colours, NEGATIVE_REPLIES
+from bot.constants import Colours, NEGATIVE_REPLIES, RedirectOutput
URL = "https://pypi.org/pypi/{package}/json"
-FIELDS = ("author", "requires_python", "summary", "license")
PYPI_ICON = "https://cdn.discordapp.com/emojis/766274397257334814.png"
+
PYPI_COLOURS = itertools.cycle((Colours.yellow, Colours.blue, Colours.white))
+ILLEGAL_CHARACTERS = re.compile(r"[^-_.a-zA-Z0-9]+")
+INVALID_INPUT_DELETE_DELAY = RedirectOutput.delete_delay
+
log = logging.getLogger(__name__)
@@ -26,43 +30,47 @@ class PyPi(Cog):
@command(name="pypi", aliases=("package", "pack"))
async def get_package_info(self, ctx: Context, package: str) -> None:
"""Provide information about a specific package from PyPI."""
- embed = Embed(
- title=random.choice(NEGATIVE_REPLIES),
- colour=Colours.soft_red
- )
+ embed = Embed(title=random.choice(NEGATIVE_REPLIES), colour=Colours.soft_red)
embed.set_thumbnail(url=PYPI_ICON)
- async with self.bot.http_session.get(URL.format(package=package)) as response:
- if response.status == 404:
- embed.description = "Package could not be found."
+ error = True
+
+ if characters := re.search(ILLEGAL_CHARACTERS, package):
+ embed.description = f"Illegal character(s) passed into command: '{escape_markdown(characters.group(0))}'"
+
+ else:
+ async with self.bot.http_session.get(URL.format(package=package)) as response:
+ if response.status == 404:
+ embed.description = "Package could not be found."
+
+ elif response.status == 200 and response.content_type == "application/json":
+ response_json = await response.json()
+ info = response_json["info"]
- elif response.status == 200 and response.content_type == "application/json":
- response_json = await response.json()
- info = response_json["info"]
+ embed.title = f"{info['name']} v{info['version']}"
- embed.title = f"{info['name']} v{info['version']}"
- embed.url = info['package_url']
- embed.colour = next(PYPI_COLOURS)
+ embed.url = info["package_url"]
+ embed.colour = next(PYPI_COLOURS)
- for field in FIELDS:
- field_data = info[field]
+ summary = escape_markdown(info["summary"])
- # Field could be completely empty, in some cases can be a string with whitespaces, or None.
- if field_data and not field_data.isspace():
- if '\n' in field_data and field == "license":
- field_data = field_data.split('\n')[0]
+ # Summary could be completely empty, or just whitespace.
+ if summary and not summary.isspace():
+ embed.description = summary
+ else:
+ embed.description = "No summary provided."
- embed.add_field(
- name=field.replace("_", " ").title(),
- value=escape_markdown(field_data),
- inline=False,
- )
+ error = False
- else:
- embed.description = "There was an error when fetching your PyPi package."
- log.trace(f"Error when fetching PyPi package: {response.status}.")
+ else:
+ embed.description = "There was an error when fetching your PyPi package."
+ log.trace(f"Error when fetching PyPi package: {response.status}.")
- await ctx.send(embed=embed)
+ if error:
+ await ctx.send(embed=embed, delete_after=INVALID_INPUT_DELETE_DELAY)
+ await ctx.message.delete(delay=INVALID_INPUT_DELETE_DELAY)
+ else:
+ await ctx.send(embed=embed)
def setup(bot: Bot) -> None:
diff --git a/bot/exts/info/source.py b/bot/exts/info/source.py
index 7b41352d4..ef07c77a1 100644
--- a/bot/exts/info/source.py
+++ b/bot/exts/info/source.py
@@ -14,9 +14,10 @@ SourceType = Union[commands.HelpCommand, commands.Command, commands.Cog, str, co
class SourceConverter(commands.Converter):
"""Convert an argument into a help command, tag, command, or cog."""
- async def convert(self, ctx: commands.Context, argument: str) -> SourceType:
+ @staticmethod
+ async def convert(ctx: commands.Context, argument: str) -> SourceType:
"""Convert argument into source object."""
- if argument.lower().startswith("help"):
+ if argument.lower() == "help":
return ctx.bot.help_command
cog = ctx.bot.get_cog(argument)
@@ -68,7 +69,8 @@ class BotSource(commands.Cog):
Raise BadArgument if `source_item` is a dynamically-created object (e.g. via internal eval).
"""
if isinstance(source_item, commands.Command):
- src = source_item.callback.__code__
+ source_item = inspect.unwrap(source_item.callback)
+ src = source_item.__code__
filename = src.co_filename
elif isinstance(source_item, str):
tags_cog = self.bot.get_cog("Tags")
@@ -97,7 +99,7 @@ class BotSource(commands.Cog):
else:
file_location = Path(filename).relative_to(Path.cwd()).as_posix()
- url = f"{URLs.github_bot_repo}/blob/master/{file_location}{lines_extension}"
+ url = f"{URLs.github_bot_repo}/blob/main/{file_location}{lines_extension}"
return url, file_location, first_line_no or None
diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py
index 00b4d1a78..bb91a8563 100644
--- a/bot/exts/info/tags.py
+++ b/bot/exts/info/tags.py
@@ -189,7 +189,7 @@ class Tags(Cog):
If a tag is not specified, display a paginated embed of all tags.
Tags are on cooldowns on a per-tag, per-channel basis. If a tag is on cooldown, display
- nothing and return False.
+ nothing and return True.
"""
def _command_on_cooldown(tag_name: str) -> bool:
"""
@@ -217,7 +217,7 @@ class Tags(Cog):
f"{ctx.author} tried to get the '{tag_name}' tag, but the tag is on cooldown. "
f"Cooldown ends in {time_left:.1f} seconds."
)
- return False
+ return True
if tag_name is not None:
temp_founds = self._get_tag(tag_name)
@@ -285,7 +285,8 @@ class Tags(Cog):
"""
Get a specified tag, or a list of all tags if no tag is specified.
- Returns False if a tag is on cooldown, or if no matches are found.
+ Returns True if something can be sent, or if the tag is on cooldown.
+ Returns False if no matches are found.
"""
return await self.display_tag(ctx, tag_name)
diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py
index caa6fb917..dfb1afd19 100644
--- a/bot/exts/moderation/defcon.py
+++ b/bot/exts/moderation/defcon.py
@@ -1,17 +1,25 @@
-from __future__ import annotations
-
+import asyncio
import logging
+import traceback
from collections import namedtuple
-from datetime import datetime, timedelta
+from datetime import datetime
from enum import Enum
+from typing import Optional, Union
-from discord import Colour, Embed, Member
+from aioredis import RedisError
+from async_rediscache import RedisCache
+from dateutil.relativedelta import relativedelta
+from discord import Colour, Embed, Member, User
+from discord.ext import tasks
from discord.ext.commands import Cog, Context, group, has_any_role
from bot.bot import Bot
from bot.constants import Channels, Colours, Emojis, Event, Icons, MODERATION_ROLES, Roles
+from bot.converters import DurationDelta, Expiry
from bot.exts.moderation.modlog import ModLog
from bot.utils.messages import format_user
+from bot.utils.scheduling import Scheduler
+from bot.utils.time import humanize_delta, parse_duration_string, relativedelta_to_timedelta
log = logging.getLogger(__name__)
@@ -28,71 +36,81 @@ will be resolved soon. In the meantime, please feel free to peruse the resources
BASE_CHANNEL_TOPIC = "Python Discord Defense Mechanism"
+SECONDS_IN_DAY = 86400
+
class Action(Enum):
"""Defcon Action."""
- ActionInfo = namedtuple('LogInfoDetails', ['icon', 'color', 'template'])
+ ActionInfo = namedtuple('LogInfoDetails', ['icon', 'emoji', 'color', 'template'])
- ENABLED = ActionInfo(Icons.defcon_enabled, Colours.soft_green, "**Days:** {days}\n\n")
- DISABLED = ActionInfo(Icons.defcon_disabled, Colours.soft_red, "")
- UPDATED = ActionInfo(Icons.defcon_updated, Colour.blurple(), "**Days:** {days}\n\n")
+ SERVER_OPEN = ActionInfo(Icons.defcon_unshutdown, Emojis.defcon_unshutdown, Colours.soft_green, "")
+ SERVER_SHUTDOWN = ActionInfo(Icons.defcon_shutdown, Emojis.defcon_shutdown, Colours.soft_red, "")
+ DURATION_UPDATE = ActionInfo(
+ Icons.defcon_update, Emojis.defcon_update, Colour.blurple(), "**Threshold:** {threshold}\n\n"
+ )
class Defcon(Cog):
"""Time-sensitive server defense mechanisms."""
- days = None # type: timedelta
- enabled = False # type: bool
+ # RedisCache[str, str]
+ # The cache's keys are "threshold" and "expiry".
+ # The caches' values are strings formatted as valid input to the DurationDelta converter, or empty when off.
+ defcon_settings = RedisCache()
def __init__(self, bot: Bot):
self.bot = bot
self.channel = None
- self.days = timedelta(days=0)
+ self.threshold = relativedelta(days=0)
+ self.expiry = None
+
+ self.scheduler = Scheduler(self.__class__.__name__)
- self.bot.loop.create_task(self.sync_settings())
+ self.bot.loop.create_task(self._sync_settings())
@property
def mod_log(self) -> ModLog:
"""Get currently loaded ModLog cog instance."""
return self.bot.get_cog("ModLog")
- async def sync_settings(self) -> None:
+ @defcon_settings.atomic_transaction
+ async def _sync_settings(self) -> None:
"""On cog load, try to synchronize DEFCON settings to the API."""
+ log.trace("Waiting for the guild to become available before syncing.")
await self.bot.wait_until_guild_available()
self.channel = await self.bot.fetch_channel(Channels.defcon)
- try:
- response = await self.bot.api_client.get('bot/bot-settings/defcon')
- data = response['data']
+ log.trace("Syncing settings.")
- except Exception: # Yikes!
+ try:
+ settings = await self.defcon_settings.to_dict()
+ self.threshold = parse_duration_string(settings["threshold"]) if settings.get("threshold") else None
+ self.expiry = datetime.fromisoformat(settings["expiry"]) if settings.get("expiry") else None
+ except RedisError:
log.exception("Unable to get DEFCON settings!")
- await self.bot.get_channel(Channels.dev_log).send(
- f"<@&{Roles.admins}> **WARNING**: Unable to get DEFCON settings!"
+ await self.channel.send(
+ f"<@&{Roles.moderators}> <@&{Roles.devops}> **WARNING**: Unable to get DEFCON settings!"
+ f"\n\n```{traceback.format_exc()}```"
)
else:
- if data["enabled"]:
- self.enabled = True
- self.days = timedelta(days=data["days"])
- log.info(f"DEFCON enabled: {self.days.days} days")
+ if self.expiry:
+ self.scheduler.schedule_at(self.expiry, 0, self._remove_threshold())
- else:
- self.enabled = False
- self.days = timedelta(days=0)
- log.info("DEFCON disabled")
+ self._update_notifier()
+ log.info(f"DEFCON synchronized: {humanize_delta(self.threshold) if self.threshold else '-'}")
- await self.update_channel_topic()
+ self._update_channel_topic()
@Cog.listener()
async def on_member_join(self, member: Member) -> None:
- """If DEFCON is enabled, check newly joining users to see if they meet the account age threshold."""
- if self.enabled and self.days.days > 0:
+ """Check newly joining users to see if they meet the account age threshold."""
+ if self.threshold:
now = datetime.utcnow()
- if now - member.created_at < self.days:
- log.info(f"Rejecting user {member}: Account is too new and DEFCON is enabled")
+ if now - member.created_at < relativedelta_to_timedelta(self.threshold):
+ log.info(f"Rejecting user {member}: Account is too new")
message_sent = False
@@ -124,134 +142,163 @@ class Defcon(Cog):
"""Check the DEFCON status or run a subcommand."""
await ctx.send_help(ctx.command)
- async def _defcon_action(self, ctx: Context, days: int, action: Action) -> None:
- """Providing a structured way to do an defcon action."""
- try:
- response = await self.bot.api_client.get('bot/bot-settings/defcon')
- data = response['data']
-
- if "enable_date" in data and action is Action.DISABLED:
- enabled = datetime.fromisoformat(data["enable_date"])
-
- delta = datetime.now() - enabled
-
- self.bot.stats.timing("defcon.enabled", delta)
- except Exception:
- pass
-
- error = None
- try:
- await self.bot.api_client.put(
- 'bot/bot-settings/defcon',
- json={
- 'name': 'defcon',
- 'data': {
- # TODO: retrieve old days count
- 'days': days,
- 'enabled': action is not Action.DISABLED,
- 'enable_date': datetime.now().isoformat()
- }
- }
- )
- except Exception as err:
- log.exception("Unable to update DEFCON settings.")
- error = err
- finally:
- await ctx.send(self.build_defcon_msg(action, error))
- await self.send_defcon_log(action, ctx.author, error)
-
- self.bot.stats.gauge("defcon.threshold", days)
-
- @defcon_group.command(name='enable', aliases=('on', 'e'), root_aliases=("defon",))
- @has_any_role(*MODERATION_ROLES)
- async def enable_command(self, ctx: Context) -> None:
- """
- Enable DEFCON mode. Useful in a pinch, but be sure you know what you're doing!
-
- Currently, this just adds an account age requirement. Use !defcon days <int> to set how old an account must be,
- in days.
- """
- self.enabled = True
- await self._defcon_action(ctx, days=0, action=Action.ENABLED)
- await self.update_channel_topic()
-
- @defcon_group.command(name='disable', aliases=('off', 'd'), root_aliases=("defoff",))
- @has_any_role(*MODERATION_ROLES)
- async def disable_command(self, ctx: Context) -> None:
- """Disable DEFCON mode. Useful in a pinch, but be sure you know what you're doing!"""
- self.enabled = False
- await self._defcon_action(ctx, days=0, action=Action.DISABLED)
- await self.update_channel_topic()
-
- @defcon_group.command(name='status', aliases=('s',))
+ @defcon_group.command(aliases=('s',))
@has_any_role(*MODERATION_ROLES)
- async def status_command(self, ctx: Context) -> None:
+ async def status(self, ctx: Context) -> None:
"""Check the current status of DEFCON mode."""
embed = Embed(
colour=Colour.blurple(), title="DEFCON Status",
- description=f"**Enabled:** {self.enabled}\n"
- f"**Days:** {self.days.days}"
+ description=f"""
+ **Threshold:** {humanize_delta(self.threshold) if self.threshold else "-"}
+ **Expires in:** {humanize_delta(relativedelta(self.expiry, datetime.utcnow())) if self.expiry else "-"}
+ **Verification level:** {ctx.guild.verification_level.name}
+ """
)
await ctx.send(embed=embed)
- @defcon_group.command(name='days')
+ @defcon_group.command(name="threshold", aliases=('t', 'd'))
@has_any_role(*MODERATION_ROLES)
- async def days_command(self, ctx: Context, days: int) -> None:
- """Set how old an account must be to join the server, in days, with DEFCON mode enabled."""
- self.days = timedelta(days=days)
- self.enabled = True
- await self._defcon_action(ctx, days=days, action=Action.UPDATED)
- await self.update_channel_topic()
-
- async def update_channel_topic(self) -> None:
+ async def threshold_command(
+ self, ctx: Context, threshold: Union[DurationDelta, int], expiry: Optional[Expiry] = None
+ ) -> None:
+ """
+ Set how old an account must be to join the server.
+
+ The threshold is the minimum required account age. Can accept either a duration string or a number of days.
+ Set it to 0 to have no threshold.
+ The expiry allows to automatically remove the threshold after a designated time. If no expiry is specified,
+ the cog will remind to remove the threshold hourly.
+ """
+ if isinstance(threshold, int):
+ threshold = relativedelta(days=threshold)
+ await self._update_threshold(ctx.author, threshold=threshold, expiry=expiry)
+
+ @defcon_group.command()
+ @has_any_role(Roles.admins)
+ async def shutdown(self, ctx: Context) -> None:
+ """Shut down the server by setting send permissions of everyone to False."""
+ role = ctx.guild.default_role
+ permissions = role.permissions
+
+ permissions.update(send_messages=False, add_reactions=False, connect=False)
+ await role.edit(reason="DEFCON shutdown", permissions=permissions)
+ await ctx.send(f"{Action.SERVER_SHUTDOWN.value.emoji} Server shut down.")
+
+ @defcon_group.command()
+ @has_any_role(Roles.admins)
+ async def unshutdown(self, ctx: Context) -> None:
+ """Open up the server again by setting send permissions of everyone to None."""
+ role = ctx.guild.default_role
+ permissions = role.permissions
+
+ permissions.update(send_messages=True, add_reactions=True, connect=True)
+ await role.edit(reason="DEFCON unshutdown", permissions=permissions)
+ await ctx.send(f"{Action.SERVER_OPEN.value.emoji} Server reopened.")
+
+ def _update_channel_topic(self) -> None:
"""Update the #defcon channel topic with the current DEFCON status."""
- if self.enabled:
- day_str = "days" if self.days.days > 1 else "day"
- new_topic = f"{BASE_CHANNEL_TOPIC}\n(Status: Enabled, Threshold: {self.days.days} {day_str})"
- else:
- new_topic = f"{BASE_CHANNEL_TOPIC}\n(Status: Disabled)"
+ new_topic = f"{BASE_CHANNEL_TOPIC}\n(Threshold: {humanize_delta(self.threshold) if self.threshold else '-'})"
self.mod_log.ignore(Event.guild_channel_update, Channels.defcon)
- await self.channel.edit(topic=new_topic)
-
- def build_defcon_msg(self, action: Action, e: Exception = None) -> str:
- """Build in-channel response string for DEFCON action."""
- if action is Action.ENABLED:
- msg = f"{Emojis.defcon_enabled} DEFCON enabled.\n\n"
- elif action is Action.DISABLED:
- msg = f"{Emojis.defcon_disabled} DEFCON disabled.\n\n"
- elif action is Action.UPDATED:
- msg = (
- f"{Emojis.defcon_updated} DEFCON days updated; accounts must be {self.days.days} "
- f"day{'s' if self.days.days > 1 else ''} old to join the server.\n\n"
+ asyncio.create_task(self.channel.edit(topic=new_topic))
+
+ @defcon_settings.atomic_transaction
+ async def _update_threshold(self, author: User, threshold: relativedelta, expiry: Optional[Expiry] = None) -> None:
+ """Update the new threshold in the cog, cache, defcon channel, and logs, and additionally schedule expiry."""
+ self.threshold = threshold
+ if threshold == relativedelta(days=0): # If the threshold is 0, we don't need to schedule anything
+ expiry = None
+ self.expiry = expiry
+
+ # Either way, we cancel the old task.
+ self.scheduler.cancel_all()
+ if self.expiry is not None:
+ self.scheduler.schedule_at(expiry, 0, self._remove_threshold())
+
+ self._update_notifier()
+
+ # Make sure to handle the critical part of the update before writing to Redis.
+ error = ""
+ try:
+ await self.defcon_settings.update(
+ {
+ 'threshold': Defcon._stringify_relativedelta(self.threshold) if self.threshold else "",
+ 'expiry': expiry.isoformat() if expiry else 0
+ }
)
+ except RedisError:
+ error = ", but failed to write to cache"
+
+ action = Action.DURATION_UPDATE
- if e:
- msg += (
- "**There was a problem updating the site** - This setting may be reverted when the bot restarts.\n\n"
- f"```py\n{e}\n```"
+ expiry_message = ""
+ if expiry:
+ expiry_message = f" for the next {humanize_delta(relativedelta(expiry, datetime.utcnow()), max_units=2)}"
+
+ if self.threshold:
+ channel_message = (
+ f"updated; accounts must be {humanize_delta(self.threshold)} "
+ f"old to join the server{expiry_message}"
)
+ else:
+ channel_message = "removed"
+
+ await self.channel.send(
+ f"{action.value.emoji} DEFCON threshold {channel_message}{error}."
+ )
+ await self._send_defcon_log(action, author)
+ self._update_channel_topic()
+
+ self._log_threshold_stat(threshold)
- return msg
+ async def _remove_threshold(self) -> None:
+ """Resets the threshold back to 0."""
+ await self._update_threshold(self.bot.user, relativedelta(days=0))
- async def send_defcon_log(self, action: Action, actor: Member, e: Exception = None) -> None:
+ @staticmethod
+ def _stringify_relativedelta(delta: relativedelta) -> str:
+ """Convert a relativedelta object to a duration string."""
+ units = [("years", "y"), ("months", "m"), ("days", "d"), ("hours", "h"), ("minutes", "m"), ("seconds", "s")]
+ return "".join(f"{getattr(delta, unit)}{symbol}" for unit, symbol in units if getattr(delta, unit)) or "0s"
+
+ def _log_threshold_stat(self, threshold: relativedelta) -> None:
+ """Adds the threshold to the bot stats in days."""
+ threshold_days = relativedelta_to_timedelta(threshold).total_seconds() / SECONDS_IN_DAY
+ self.bot.stats.gauge("defcon.threshold", threshold_days)
+
+ async def _send_defcon_log(self, action: Action, actor: User) -> None:
"""Send log message for DEFCON action."""
info = action.value
log_msg: str = (
f"**Staffer:** {actor.mention} {actor} (`{actor.id}`)\n"
- f"{info.template.format(days=self.days.days)}"
+ f"{info.template.format(threshold=(humanize_delta(self.threshold) if self.threshold else '-'))}"
)
status_msg = f"DEFCON {action.name.lower()}"
- if e:
- log_msg += (
- "**There was a problem updating the site** - This setting may be reverted when the bot restarts.\n\n"
- f"```py\n{e}\n```"
- )
-
await self.mod_log.send_log_message(info.icon, info.color, status_msg, log_msg)
+ def _update_notifier(self) -> None:
+ """Start or stop the notifier according to the DEFCON status."""
+ if self.threshold and self.expiry is None and not self.defcon_notifier.is_running():
+ log.info("DEFCON notifier started.")
+ self.defcon_notifier.start()
+
+ elif (not self.threshold or self.expiry is not None) and self.defcon_notifier.is_running():
+ log.info("DEFCON notifier stopped.")
+ self.defcon_notifier.cancel()
+
+ @tasks.loop(hours=1)
+ async def defcon_notifier(self) -> None:
+ """Routinely notify moderators that DEFCON is active."""
+ await self.channel.send(f"Defcon is on and is set to {humanize_delta(self.threshold)}.")
+
+ def cog_unload(self) -> None:
+ """Cancel the notifer and threshold removal tasks when the cog unloads."""
+ log.trace("Cog unload: canceling defcon notifier task.")
+ self.defcon_notifier.cancel()
+ self.scheduler.cancel_all()
+
def setup(bot: Bot) -> None:
"""Load the Defcon cog."""
diff --git a/bot/exts/moderation/dm_relay.py b/bot/exts/moderation/dm_relay.py
index 6d081741c..1d2206e27 100644
--- a/bot/exts/moderation/dm_relay.py
+++ b/bot/exts/moderation/dm_relay.py
@@ -1,132 +1,72 @@
import logging
-from typing import Optional
import discord
-from async_rediscache import RedisCache
-from discord import Color
-from discord.ext import commands
-from discord.ext.commands import Cog
+from discord.ext.commands import Cog, Context, command, has_any_role
-from bot import constants
from bot.bot import Bot
-from bot.converters import UserMentionOrID
-from bot.utils.checks import in_whitelist_check
-from bot.utils.messages import send_attachments
-from bot.utils.webhooks import send_webhook
+from bot.constants import Emojis, MODERATION_ROLES
+from bot.utils.services import send_to_paste_service
log = logging.getLogger(__name__)
class DMRelay(Cog):
- """Relay direct messages to and from the bot."""
-
- # RedisCache[str, t.Union[discord.User.id, discord.Member.id]]
- dm_cache = RedisCache()
+ """Inspect messages sent to the bot."""
def __init__(self, bot: Bot):
self.bot = bot
- self.webhook_id = constants.Webhooks.dm_log
- self.webhook = None
- self.bot.loop.create_task(self.fetch_webhook())
-
- @commands.command(aliases=("reply",))
- async def send_dm(self, ctx: commands.Context, member: Optional[UserMentionOrID], *, message: str) -> None:
- """
- Allows you to send a DM to a user from the bot.
-
- If `member` is not provided, it will send to the last user who DM'd the bot.
-
- This feature should be used extremely sparingly. Use ModMail if you need to have a serious
- conversation with a user. This is just for responding to extraordinary DMs, having a little
- fun with users, and telling people they are DMing the wrong bot.
-
- NOTE: This feature will be removed if it is overused.
- """
- if not member:
- user_id = await self.dm_cache.get("last_user")
- member = ctx.guild.get_member(user_id) if user_id else None
-
- # If we still don't have a Member at this point, give up
- if not member:
- log.debug("This bot has never gotten a DM, or the RedisCache has been cleared.")
- await ctx.message.add_reaction("❌")
+
+ @command(aliases=("relay", "dr"))
+ async def dmrelay(self, ctx: Context, user: discord.User, limit: int = 100) -> None:
+ """Relays the direct message history between the bot and given user."""
+ log.trace(f"Relaying DMs with {user.name} ({user.id})")
+
+ if user.bot:
+ await ctx.send(f"{Emojis.cross_mark} No direct message history with bots.")
return
- if member.id == self.bot.user.id:
- log.debug("Not sending message to bot user")
- return await ctx.send("🚫 I can't send messages to myself!")
-
- try:
- await member.send(message)
- except discord.errors.Forbidden:
- log.debug("User has disabled DMs.")
- await ctx.message.add_reaction("❌")
- else:
- await ctx.message.add_reaction("✅")
- self.bot.stats.incr("dm_relay.dm_sent")
-
- async def fetch_webhook(self) -> None:
- """Fetches the webhook object, so we can post to it."""
- await self.bot.wait_until_guild_available()
-
- try:
- self.webhook = await self.bot.fetch_webhook(self.webhook_id)
- except discord.HTTPException:
- log.exception(f"Failed to fetch webhook with id `{self.webhook_id}`")
-
- @Cog.listener()
- async def on_message(self, message: discord.Message) -> None:
- """Relays the message's content and attachments to the dm_log channel."""
- # Only relay DMs from humans
- if message.author.bot or message.guild or self.webhook is None:
+ output = ""
+ async for msg in user.history(limit=limit, oldest_first=True):
+ created_at = msg.created_at.strftime(r"%Y-%m-%d %H:%M")
+
+ # Metadata (author, created_at, id)
+ output += f"{msg.author} [{created_at}] ({msg.id}): "
+
+ # Content
+ if msg.content:
+ output += msg.content + "\n"
+
+ # Embeds
+ if (embeds := len(msg.embeds)) > 0:
+ output += f"<{embeds} embed{'s' if embeds > 1 else ''}>\n"
+
+ # Attachments
+ attachments = "\n".join(a.url for a in msg.attachments)
+ if attachments:
+ output += attachments + "\n"
+
+ if not output:
+ await ctx.send(f"{Emojis.cross_mark} No direct message history with {user.mention}.")
+ return
+
+ metadata = (
+ f"User: {user} ({user.id})\n"
+ f"Channel ID: {user.dm_channel.id}\n\n"
+ )
+
+ paste_link = await send_to_paste_service(metadata + output, extension="txt")
+
+ if paste_link is None:
+ await ctx.send(f"{Emojis.cross_mark} Failed to upload output to hastebin.")
return
- if message.clean_content:
- await send_webhook(
- webhook=self.webhook,
- content=message.clean_content,
- username=f"{message.author.display_name} ({message.author.id})",
- avatar_url=message.author.avatar_url
- )
- await self.dm_cache.set("last_user", message.author.id)
- self.bot.stats.incr("dm_relay.dm_received")
-
- # Handle any attachments
- if message.attachments:
- try:
- await send_attachments(
- message,
- self.webhook,
- username=f"{message.author.display_name} ({message.author.id})"
- )
- except (discord.errors.Forbidden, discord.errors.NotFound):
- e = discord.Embed(
- description=":x: **This message contained an attachment, but it could not be retrieved**",
- color=Color.red()
- )
- await send_webhook(
- webhook=self.webhook,
- embed=e,
- username=f"{message.author.display_name} ({message.author.id})",
- avatar_url=message.author.avatar_url
- )
- except discord.HTTPException:
- log.exception("Failed to send an attachment to the webhook")
-
- async def cog_check(self, ctx: commands.Context) -> bool:
+ await ctx.send(paste_link)
+
+ async def cog_check(self, ctx: Context) -> bool:
"""Only allow moderators to invoke the commands in this cog."""
- checks = [
- await commands.has_any_role(*constants.MODERATION_ROLES).predicate(ctx),
- in_whitelist_check(
- ctx,
- channels=[constants.Channels.dm_log],
- redirect=None,
- fail_silently=True,
- )
- ]
- return all(checks)
+ return await has_any_role(*MODERATION_ROLES).predicate(ctx)
def setup(bot: Bot) -> None:
- """Load the DMRelay cog."""
+ """Load the DMRelay cog."""
bot.add_cog(DMRelay(bot))
diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py
index a73f2e8da..988fb7220 100644
--- a/bot/exts/moderation/infraction/_scheduler.py
+++ b/bot/exts/moderation/infraction/_scheduler.py
@@ -173,6 +173,8 @@ class InfractionScheduler:
total = len(infractions)
end_msg = f" (#{id_} ; {total} infraction{ngettext('', 's', total)} total)"
+ purge = infraction.get("purge", "")
+
# Execute the necessary actions to apply the infraction on Discord.
if action_coro:
log.trace(f"Awaiting the infraction #{id_} application action coroutine.")
@@ -210,7 +212,7 @@ class InfractionScheduler:
log.error(f"Deletion of {infr_type} infraction #{id_} failed with error code {e.status}.")
infr_message = ""
else:
- infr_message = f" **{' '.join(infr_type.split('_'))}** to {user.mention}{expiry_msg}{end_msg}"
+ infr_message = f" **{purge}{' '.join(infr_type.split('_'))}** to {user.mention}{expiry_msg}{end_msg}"
# Send a confirmation message to the invoking context.
log.trace(f"Sending infraction #{id_} confirmation message.")
@@ -234,7 +236,7 @@ class InfractionScheduler:
footer=f"ID {infraction['id']}"
)
- log.info(f"Applied {infr_type} infraction #{id_} to {user}.")
+ log.info(f"Applied {purge}{infr_type} infraction #{id_} to {user}.")
return not failed
async def pardon_infraction(
diff --git a/bot/exts/moderation/infraction/_utils.py b/bot/exts/moderation/infraction/_utils.py
index e766c1e5c..a98b4828b 100644
--- a/bot/exts/moderation/infraction/_utils.py
+++ b/bot/exts/moderation/infraction/_utils.py
@@ -22,7 +22,6 @@ INFRACTION_ICONS = {
"voice_ban": (Icons.voice_state_red, Icons.voice_state_green),
}
RULES_URL = "https://pythondiscord.com/pages/rules"
-APPEALABLE_INFRACTIONS = ("ban", "mute", "voice_ban")
# Type aliases
UserObject = t.Union[discord.Member, discord.User]
@@ -31,8 +30,12 @@ Infraction = t.Dict[str, t.Union[str, int, bool]]
APPEAL_EMAIL = "[email protected]"
-INFRACTION_TITLE = f"Please review our rules over at {RULES_URL}"
-INFRACTION_APPEAL_FOOTER = f"To appeal this infraction, send an e-mail to {APPEAL_EMAIL}"
+INFRACTION_TITLE = "Please review our rules"
+INFRACTION_APPEAL_EMAIL_FOOTER = f"To appeal this infraction, send an e-mail to {APPEAL_EMAIL}"
+INFRACTION_APPEAL_MODMAIL_FOOTER = (
+ 'If you would like to discuss or appeal this infraction, '
+ 'send a message to the ModMail bot'
+)
INFRACTION_AUTHOR_NAME = "Infraction information"
INFRACTION_DESCRIPTION_TEMPLATE = (
@@ -71,13 +74,13 @@ async def post_user(ctx: Context, user: UserSnowflake) -> t.Optional[dict]:
async def post_infraction(
- ctx: Context,
- user: UserSnowflake,
- infr_type: str,
- reason: str,
- expires_at: datetime = None,
- hidden: bool = False,
- active: bool = True
+ ctx: Context,
+ user: UserSnowflake,
+ infr_type: str,
+ reason: str,
+ expires_at: datetime = None,
+ hidden: bool = False,
+ active: bool = True
) -> t.Optional[dict]:
"""Posts an infraction to the API."""
if isinstance(user, (discord.Member, discord.User)) and user.bot:
@@ -150,11 +153,11 @@ async def get_active_infraction(
async def notify_infraction(
- user: UserObject,
- infr_type: str,
- expires_at: t.Optional[str] = None,
- reason: t.Optional[str] = None,
- icon_url: str = Icons.token_removed
+ user: UserObject,
+ infr_type: str,
+ expires_at: t.Optional[str] = None,
+ reason: t.Optional[str] = None,
+ icon_url: str = Icons.token_removed
) -> bool:
"""DM a user about their new infraction and return True if the DM is successful."""
log.trace(f"Sending {user} a DM about their {infr_type} infraction.")
@@ -178,17 +181,18 @@ async def notify_infraction(
embed.title = INFRACTION_TITLE
embed.url = RULES_URL
- if infr_type in APPEALABLE_INFRACTIONS:
- embed.set_footer(text=INFRACTION_APPEAL_FOOTER)
+ embed.set_footer(
+ text=INFRACTION_APPEAL_EMAIL_FOOTER if infr_type == 'Ban' else INFRACTION_APPEAL_MODMAIL_FOOTER
+ )
return await send_private_embed(user, embed)
async def notify_pardon(
- user: UserObject,
- title: str,
- content: str,
- icon_url: str = Icons.user_verified
+ user: UserObject,
+ title: str,
+ content: str,
+ icon_url: str = Icons.user_verified
) -> bool:
"""DM a user about their pardoned infraction and return True if the DM is successful."""
log.trace(f"Sending {user} a DM about their pardoned infraction.")
diff --git a/bot/exts/moderation/infraction/infractions.py b/bot/exts/moderation/infraction/infractions.py
index 7349d65f2..d89e80acc 100644
--- a/bot/exts/moderation/infraction/infractions.py
+++ b/bot/exts/moderation/infraction/infractions.py
@@ -126,7 +126,7 @@ class Infractions(InfractionScheduler, commands.Cog):
duration = await Duration().convert(ctx, "1h")
await self.apply_mute(ctx, user, reason, expires_at=duration)
- @command()
+ @command(aliases=("tban",))
async def tempban(
self,
ctx: Context,
@@ -198,7 +198,7 @@ class Infractions(InfractionScheduler, commands.Cog):
# endregion
# region: Temporary shadow infractions
- @command(hidden=True, aliases=["shadowtempban", "stempban"])
+ @command(hidden=True, aliases=["shadowtempban", "stempban", "stban"])
async def shadow_tempban(
self,
ctx: Context,
@@ -318,6 +318,8 @@ class Infractions(InfractionScheduler, commands.Cog):
if infraction is None:
return
+ infraction["purge"] = "purge " if purge_days else ""
+
self.mod_log.ignore(Event.member_remove, user.id)
if reason:
diff --git a/bot/exts/moderation/infraction/superstarify.py b/bot/exts/moderation/infraction/superstarify.py
index ffc470c54..704dddf9c 100644
--- a/bot/exts/moderation/infraction/superstarify.py
+++ b/bot/exts/moderation/infraction/superstarify.py
@@ -104,7 +104,7 @@ class Superstarify(InfractionScheduler, Cog):
await self.reapply_infraction(infraction, action)
- @command(name="superstarify", aliases=("force_nick", "star", "starify"))
+ @command(name="superstarify", aliases=("force_nick", "star", "starify", "superstar"))
async def superstarify(
self,
ctx: Context,
@@ -183,7 +183,7 @@ class Superstarify(InfractionScheduler, Cog):
)
await ctx.send(embed=embed)
- @command(name="unsuperstarify", aliases=("release_nick", "unstar", "unstarify"))
+ @command(name="unsuperstarify", aliases=("release_nick", "unstar", "unstarify", "unsuperstar"))
async def unsuperstarify(self, ctx: Context, member: Member) -> None:
"""Remove the superstarify infraction and allow the user to change their nickname."""
await self.pardon_infraction(ctx, "superstar", member)
diff --git a/bot/exts/moderation/slowmode.py b/bot/exts/moderation/slowmode.py
index c449752e1..d8baff76a 100644
--- a/bot/exts/moderation/slowmode.py
+++ b/bot/exts/moderation/slowmode.py
@@ -1,5 +1,4 @@
import logging
-from datetime import datetime
from typing import Optional
from dateutil.relativedelta import relativedelta
@@ -54,8 +53,7 @@ class Slowmode(Cog):
# Convert `dateutil.relativedelta.relativedelta` to `datetime.timedelta`
# Must do this to get the delta in a particular unit of time
- utcnow = datetime.utcnow()
- slowmode_delay = (utcnow + delay - utcnow).total_seconds()
+ slowmode_delay = time.relativedelta_to_timedelta(delay).total_seconds()
humanized_delay = time.humanize_delta(delay)
diff --git a/bot/exts/moderation/stream.py b/bot/exts/moderation/stream.py
new file mode 100644
index 000000000..12e195172
--- /dev/null
+++ b/bot/exts/moderation/stream.py
@@ -0,0 +1,179 @@
+import logging
+from datetime import timedelta, timezone
+
+import arrow
+import discord
+from arrow import Arrow
+from async_rediscache import RedisCache
+from discord.ext import commands
+
+from bot.bot import Bot
+from bot.constants import Colours, Emojis, Guild, Roles, STAFF_ROLES, VideoPermission
+from bot.converters import Expiry
+from bot.utils.scheduling import Scheduler
+from bot.utils.time import format_infraction_with_duration
+
+log = logging.getLogger(__name__)
+
+
+class Stream(commands.Cog):
+ """Grant and revoke streaming permissions from members."""
+
+ # Stores tasks to remove streaming permission
+ # RedisCache[discord.Member.id, UtcPosixTimestamp]
+ task_cache = RedisCache()
+
+ def __init__(self, bot: Bot):
+ self.bot = bot
+ self.scheduler = Scheduler(self.__class__.__name__)
+ self.reload_task = self.bot.loop.create_task(self._reload_tasks_from_redis())
+
+ def cog_unload(self) -> None:
+ """Cancel all scheduled tasks."""
+ self.reload_task.cancel()
+ self.reload_task.add_done_callback(lambda _: self.scheduler.cancel_all())
+
+ async def _revoke_streaming_permission(self, member: discord.Member) -> None:
+ """Remove the streaming permission from the given Member."""
+ await self.task_cache.delete(member.id)
+ await member.remove_roles(discord.Object(Roles.video), reason="Streaming access revoked")
+
+ async def _reload_tasks_from_redis(self) -> None:
+ """Reload outstanding tasks from redis on startup, delete the task if the member has since left the server."""
+ await self.bot.wait_until_guild_available()
+ items = await self.task_cache.items()
+ for key, value in items:
+ member = self.bot.get_guild(Guild.id).get_member(key)
+
+ if not member:
+ # Member isn't found in the cache
+ try:
+ member = await self.bot.get_guild(Guild.id).fetch_member(key)
+ except discord.errors.NotFound:
+ log.debug(
+ f"Member {key} left the guild before we could schedule "
+ "the revoking of their streaming permissions."
+ )
+ await self.task_cache.delete(key)
+ continue
+ except discord.HTTPException:
+ log.exception(f"Exception while trying to retrieve member {key} from Discord.")
+ continue
+
+ revoke_time = Arrow.utcfromtimestamp(value)
+ log.debug(f"Scheduling {member} ({member.id}) to have streaming permission revoked at {revoke_time}")
+ self.scheduler.schedule_at(
+ revoke_time,
+ key,
+ self._revoke_streaming_permission(member)
+ )
+
+ @commands.command(aliases=("streaming",))
+ @commands.has_any_role(*STAFF_ROLES)
+ async def stream(self, ctx: commands.Context, member: discord.Member, duration: Expiry = None) -> None:
+ """
+ Temporarily grant streaming permissions to a member for a given duration.
+
+ A unit of time should be appended to the duration.
+ Units (∗case-sensitive):
+ \u2003`y` - years
+ \u2003`m` - months∗
+ \u2003`w` - weeks
+ \u2003`d` - days
+ \u2003`h` - hours
+ \u2003`M` - minutes∗
+ \u2003`s` - seconds
+
+ Alternatively, an ISO 8601 timestamp can be provided for the duration.
+ """
+ log.trace(f"Attempting to give temporary streaming permission to {member} ({member.id}).")
+
+ if duration is None:
+ # Use default duration and convert back to datetime as Embed.timestamp doesn't support Arrow
+ duration = arrow.utcnow() + timedelta(minutes=VideoPermission.default_permission_duration)
+ duration = duration.datetime
+ elif duration.tzinfo is None:
+ # Make duration tz-aware.
+ # ISODateTime could already include tzinfo, this check is so it isn't overwritten.
+ duration.replace(tzinfo=timezone.utc)
+
+ # Check if the member already has streaming permission
+ already_allowed = any(Roles.video == role.id for role in member.roles)
+ if already_allowed:
+ await ctx.send(f"{Emojis.cross_mark} {member.mention} can already stream.")
+ log.debug(f"{member} ({member.id}) already has permission to stream.")
+ return
+
+ # Schedule task to remove streaming permission from Member and add it to task cache
+ self.scheduler.schedule_at(duration, member.id, self._revoke_streaming_permission(member))
+ await self.task_cache.set(member.id, duration.timestamp())
+
+ await member.add_roles(discord.Object(Roles.video), reason="Temporary streaming access granted")
+
+ # Use embed as embed timestamps do timezone conversions.
+ embed = discord.Embed(
+ description=f"{Emojis.check_mark} {member.mention} can now stream.",
+ colour=Colours.soft_green
+ )
+ embed.set_footer(text=f"Streaming permission has been given to {member} until")
+ embed.timestamp = duration
+
+ # Mention in content as mentions in embeds don't ping
+ await ctx.send(content=member.mention, embed=embed)
+
+ # Convert here for nicer logging
+ revoke_time = format_infraction_with_duration(str(duration))
+ log.debug(f"Successfully gave {member} ({member.id}) permission to stream until {revoke_time}.")
+
+ @commands.command(aliases=("pstream",))
+ @commands.has_any_role(*STAFF_ROLES)
+ async def permanentstream(self, ctx: commands.Context, member: discord.Member) -> None:
+ """Permanently grants the given member the permission to stream."""
+ log.trace(f"Attempting to give permanent streaming permission to {member} ({member.id}).")
+
+ # Check if the member already has streaming permission
+ if any(Roles.video == role.id for role in member.roles):
+ if member.id in self.scheduler:
+ # Member has temp permission, so cancel the task to revoke later and delete from cache
+ self.scheduler.cancel(member.id)
+ await self.task_cache.delete(member.id)
+
+ await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.")
+ log.debug(
+ f"Successfully upgraded temporary streaming permission for {member} ({member.id}) to permanent."
+ )
+ return
+
+ await ctx.send(f"{Emojis.cross_mark} This member can already stream.")
+ log.debug(f"{member} ({member.id}) already had permanent streaming permission.")
+ return
+
+ await member.add_roles(discord.Object(Roles.video), reason="Permanent streaming access granted")
+ await ctx.send(f"{Emojis.check_mark} Permanently granted {member.mention} the permission to stream.")
+ log.debug(f"Successfully gave {member} ({member.id}) permanent streaming permission.")
+
+ @commands.command(aliases=("unstream", "rstream"))
+ @commands.has_any_role(*STAFF_ROLES)
+ async def revokestream(self, ctx: commands.Context, member: discord.Member) -> None:
+ """Revoke the permission to stream from the given member."""
+ log.trace(f"Attempting to remove streaming permission from {member} ({member.id}).")
+
+ # Check if the member already has streaming permission
+ if any(Roles.video == role.id for role in member.roles):
+ if member.id in self.scheduler:
+ # Member has temp permission, so cancel the task to revoke later and delete from cache
+ self.scheduler.cancel(member.id)
+ await self.task_cache.delete(member.id)
+ await self._revoke_streaming_permission(member)
+
+ await ctx.send(f"{Emojis.check_mark} Revoked the permission to stream from {member.mention}.")
+ log.debug(f"Successfully revoked streaming permission from {member} ({member.id}).")
+ return
+
+ await ctx.send(f"{Emojis.cross_mark} This member doesn't have video permissions to remove!")
+ log.debug(f"{member} ({member.id}) didn't have the streaming permission to remove!")
+
+
+def setup(bot: Bot) -> None:
+ """Loads the Stream cog."""
+ bot.add_cog(Stream(bot))
diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py
index f9fc12dc3..9f26c34f2 100644
--- a/bot/exts/moderation/watchchannels/_watchchannel.py
+++ b/bot/exts/moderation/watchchannels/_watchchannel.py
@@ -5,9 +5,8 @@ import textwrap
from abc import abstractmethod
from collections import defaultdict, deque
from dataclasses import dataclass
-from typing import Optional
+from typing import Any, Dict, Optional
-import dateutil.parser
import discord
from discord import Color, DMChannel, Embed, HTTPException, Message, errors
from discord.ext.commands import Cog, Context
@@ -20,7 +19,7 @@ from bot.exts.filters.webhook_remover import WEBHOOK_URL_RE
from bot.exts.moderation.modlog import ModLog
from bot.pagination import LinePaginator
from bot.utils import CogABCMeta, messages
-from bot.utils.time import time_since
+from bot.utils.time import get_time_delta
log = logging.getLogger(__name__)
@@ -47,7 +46,9 @@ class WatchChannel(metaclass=CogABCMeta):
webhook_id: int,
api_endpoint: str,
api_default_params: dict,
- logger: logging.Logger
+ logger: logging.Logger,
+ *,
+ disable_header: bool = False
) -> None:
self.bot = bot
@@ -66,6 +67,7 @@ class WatchChannel(metaclass=CogABCMeta):
self.channel = None
self.webhook = None
self.message_history = MessageHistory()
+ self.disable_header = disable_header
self._start = self.bot.loop.create_task(self.start_watchchannel())
@@ -133,7 +135,10 @@ class WatchChannel(metaclass=CogABCMeta):
if not await self.fetch_user_cache():
await self.modlog.send_log_message(
title=f"Warning: Failed to retrieve user cache for the {self.__class__.__name__} watch channel",
- text="Could not retrieve the list of watched users from the API and messages will not be relayed.",
+ text=(
+ "Could not retrieve the list of watched users from the API. "
+ "Messages will not be relayed, and reviews not rescheduled."
+ ),
ping_everyone=True,
icon_url=Icons.token_removed,
colour=Color.red()
@@ -267,6 +272,9 @@ class WatchChannel(metaclass=CogABCMeta):
async def send_header(self, msg: Message) -> None:
"""Sends a header embed with information about the relayed messages to the watch channel."""
+ if self.disable_header:
+ return
+
user_id = msg.author.id
guild = self.bot.get_guild(GuildConfig.id)
@@ -274,7 +282,7 @@ class WatchChannel(metaclass=CogABCMeta):
actor = actor.display_name if actor else self.watched_users[user_id]['actor']
inserted_at = self.watched_users[user_id]['inserted_at']
- time_delta = self._get_time_delta(inserted_at)
+ time_delta = get_time_delta(inserted_at)
reason = self.watched_users[user_id]['reason']
@@ -302,35 +310,61 @@ class WatchChannel(metaclass=CogABCMeta):
The optional kwarg `update_cache` specifies whether the cache should
be refreshed by polling the API.
"""
- if update_cache:
- if not await self.fetch_user_cache():
- await ctx.send(f":x: Failed to update {self.__class__.__name__} user cache, serving from cache")
- update_cache = False
+ watched_data = await self.prepare_watched_users_data(ctx, oldest_first, update_cache)
- lines = []
- for user_id, user_data in self.watched_users.items():
- inserted_at = user_data['inserted_at']
- time_delta = self._get_time_delta(inserted_at)
- lines.append(f"• <@{user_id}> (added {time_delta})")
+ if update_cache and not watched_data["updated"]:
+ await ctx.send(f":x: Failed to update {self.__class__.__name__} user cache, serving from cache")
- if oldest_first:
- lines.reverse()
-
- lines = lines or ("There's nothing here yet.",)
+ lines = watched_data["info"].values() or ("There's nothing here yet.",)
embed = Embed(
- title=f"{self.__class__.__name__} watched users ({'updated' if update_cache else 'cached'})",
+ title=watched_data["title"],
color=Color.blue()
)
await LinePaginator.paginate(lines, ctx, embed, empty=False)
- @staticmethod
- def _get_time_delta(time_string: str) -> str:
- """Returns the time in human-readable time delta format."""
- date_time = dateutil.parser.isoparse(time_string).replace(tzinfo=None)
- time_delta = time_since(date_time, precision="minutes", max_units=1)
+ async def prepare_watched_users_data(
+ self, ctx: Context, oldest_first: bool = False, update_cache: bool = True
+ ) -> Dict[str, Any]:
+ """
+ Prepare overview information of watched users to list.
+
+ The optional kwarg `oldest_first` orders the list by oldest entry.
+
+ The optional kwarg `update_cache` specifies whether the cache should
+ be refreshed by polling the API.
+
+ Returns a dictionary with a "title" key for the list's title, and a "info" key with
+ information about each user.
+
+ The dictionary additionally has an "updated" field which is true if a cache update was
+ requested and it succeeded.
+ """
+ list_data = {}
+ if update_cache:
+ if not await self.fetch_user_cache():
+ update_cache = False
+ list_data["updated"] = update_cache
+
+ watched_iter = self.watched_users.items()
+ if oldest_first:
+ watched_iter = reversed(watched_iter)
+
+ list_data["info"] = {}
+ for user_id, user_data in watched_iter:
+ member = ctx.guild.get_member(user_id)
+ line = f"• `{user_id}`"
+ if member:
+ line += f" ({member.name}#{member.discriminator})"
+ inserted_at = user_data['inserted_at']
+ line += f", added {get_time_delta(inserted_at)}"
+ if not member: # Cross off users who left the server.
+ line = f"~~{line}~~"
+ list_data["info"][user_id] = line
+
+ list_data["title"] = f"{self.__class__.__name__} watched users ({'updated' if update_cache else 'cached'})"
- return time_delta
+ return list_data
def _remove_user(self, user_id: int) -> None:
"""Removes a user from a watch channel."""
diff --git a/bot/exts/recruitment/__init__.py b/bot/exts/recruitment/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/bot/exts/recruitment/__init__.py
diff --git a/bot/exts/recruitment/talentpool/__init__.py b/bot/exts/recruitment/talentpool/__init__.py
new file mode 100644
index 000000000..52d27eb99
--- /dev/null
+++ b/bot/exts/recruitment/talentpool/__init__.py
@@ -0,0 +1,8 @@
+from bot.bot import Bot
+
+
+def setup(bot: Bot) -> None:
+ """Load the TalentPool cog."""
+ from bot.exts.recruitment.talentpool._cog import TalentPool
+
+ bot.add_cog(TalentPool(bot))
diff --git a/bot/exts/moderation/watchchannels/talentpool.py b/bot/exts/recruitment/talentpool/_cog.py
index dd3349c3a..72604be51 100644
--- a/bot/exts/moderation/watchchannels/talentpool.py
+++ b/bot/exts/recruitment/talentpool/_cog.py
@@ -1,19 +1,24 @@
import logging
import textwrap
from collections import ChainMap
+from io import StringIO
from typing import Union
+import discord
from discord import Color, Embed, Member, User
from discord.ext.commands import Cog, Context, group, has_any_role
from bot.api import ResponseCodeError
from bot.bot import Bot
-from bot.constants import Channels, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks
+from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, STAFF_ROLES, Webhooks
from bot.converters import FetchedMember
from bot.exts.moderation.watchchannels._watchchannel import WatchChannel
+from bot.exts.recruitment.talentpool._review import Reviewer
from bot.pagination import LinePaginator
from bot.utils import time
+REASON_MAX_CHARS = 1000
+
log = logging.getLogger(__name__)
@@ -28,8 +33,12 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
api_endpoint='bot/nominations',
api_default_params={'active': 'true', 'ordering': '-inserted_at'},
logger=log,
+ disable_header=True,
)
+ self.reviewer = Reviewer(self.__class__.__name__, bot, self)
+ self.bot.loop.create_task(self.reviewer.reschedule_reviews())
+
@group(name='talentpool', aliases=('tp', 'talent', 'nomination', 'n'), invoke_without_command=True)
@has_any_role(*MODERATION_ROLES)
async def nomination_group(self, ctx: Context) -> None:
@@ -39,7 +48,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
@nomination_group.command(name='watched', aliases=('all', 'list'), root_aliases=("nominees",))
@has_any_role(*MODERATION_ROLES)
async def watched_command(
- self, ctx: Context, oldest_first: bool = False, update_cache: bool = True
+ self,
+ ctx: Context,
+ oldest_first: bool = False,
+ update_cache: bool = True
) -> None:
"""
Shows the users that are currently being monitored in the talent pool.
@@ -51,6 +63,47 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
"""
await self.list_watched_users(ctx, oldest_first=oldest_first, update_cache=update_cache)
+ async def list_watched_users(
+ self,
+ ctx: Context,
+ oldest_first: bool = False,
+ update_cache: bool = True
+ ) -> None:
+ """
+ Gives an overview of the nominated users list.
+
+ It specifies the users' mention, name, how long ago they were nominated, and whether their
+ review was scheduled or already posted.
+
+ The optional kwarg `oldest_first` orders the list by oldest entry.
+
+ The optional kwarg `update_cache` specifies whether the cache should
+ be refreshed by polling the API.
+ """
+ # TODO Once the watch channel is removed, this can be done in a smarter way, without splitting and overriding
+ # the list_watched_users function.
+ watched_data = await self.prepare_watched_users_data(ctx, oldest_first, update_cache)
+
+ if update_cache and not watched_data["updated"]:
+ await ctx.send(f":x: Failed to update {self.__class__.__name__} user cache, serving from cache")
+
+ lines = []
+ for user_id, line in watched_data["info"].items():
+ if self.watched_users[user_id]['reviewed']:
+ line += " *(reviewed)*"
+ elif user_id in self.reviewer:
+ line += " *(scheduled)*"
+ lines.append(line)
+
+ if not lines:
+ lines = ("There's nothing here yet.",)
+
+ embed = Embed(
+ title=watched_data["title"],
+ color=Color.blue()
+ )
+ await LinePaginator.paginate(lines, ctx, embed, empty=False)
+
@nomination_group.command(name='oldest')
@has_any_role(*MODERATION_ROLES)
async def oldest_command(self, ctx: Context, update_cache: bool = True) -> None:
@@ -62,15 +115,39 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
"""
await ctx.invoke(self.watched_command, oldest_first=True, update_cache=update_cache)
+ @nomination_group.command(name='forcewatch', aliases=('fw', 'forceadd', 'fa'), root_aliases=("forcenominate",))
+ @has_any_role(*MODERATION_ROLES)
+ async def force_watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None:
+ """
+ Adds the given `user` to the talent pool, from any channel.
+
+ A `reason` for adding the user to the talent pool is optional.
+ """
+ await self._watch_user(ctx, user, reason)
+
@nomination_group.command(name='watch', aliases=('w', 'add', 'a'), root_aliases=("nominate",))
@has_any_role(*STAFF_ROLES)
async def watch_command(self, ctx: Context, user: FetchedMember, *, reason: str = '') -> None:
"""
- Relay messages sent by the given `user` to the `#talent-pool` channel.
+ Adds the given `user` to the talent pool.
A `reason` for adding the user to the talent pool is optional.
- If given, it will be displayed in the header when relaying messages of this user to the channel.
+ This command can only be used in the `#nominations` channel.
"""
+ if ctx.channel.id != Channels.nominations:
+ if any(role.id in MODERATION_ROLES for role in ctx.author.roles):
+ await ctx.send(
+ f":x: Nominations should be run in the <#{Channels.nominations}> channel. "
+ "Use `!tp forcewatch` to override this check."
+ )
+ else:
+ await ctx.send(f":x: Nominations must be run in the <#{Channels.nominations}> channel")
+ return
+
+ await self._watch_user(ctx, user, reason)
+
+ async def _watch_user(self, ctx: Context, user: FetchedMember, reason: str) -> None:
+ """Adds the given user to the talent pool."""
if user.bot:
await ctx.send(f":x: I'm sorry {ctx.author}, I'm afraid I can't do that. I only watch humans.")
return
@@ -83,8 +160,8 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
await ctx.send(f":x: Failed to update the user cache; can't add {user}")
return
- if user.id in self.watched_users:
- await ctx.send(f":x: {user} is already being watched in the talent pool")
+ if len(reason) > REASON_MAX_CHARS:
+ await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.")
return
# Manual request with `raise_for_status` as False because we want the actual response
@@ -101,14 +178,20 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
async with session.post(url, **kwargs) as resp:
response_data = await resp.json()
- if resp.status == 400 and response_data.get('user', False):
- await ctx.send(":x: The specified user can't be found in the database tables")
+ if resp.status == 400:
+ if response_data.get('user', False):
+ await ctx.send(":x: The specified user can't be found in the database tables")
+ elif response_data.get('actor', False):
+ await ctx.send(":x: You have already nominated this user")
+
return
else:
resp.raise_for_status()
self.watched_users[user.id] = response_data
- msg = f":white_check_mark: Messages sent by {user} will now be relayed to the talent pool channel"
+
+ if user.id not in self.reviewer:
+ self.reviewer.schedule_review(user.id)
history = await self.bot.api_client.get(
self.api_endpoint,
@@ -119,10 +202,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
}
)
+ msg = f"✅ The nomination for {user} has been added to the talent pool"
if history:
- total = f"({len(history)} previous nominations in total)"
- start_reason = f"Watched: {textwrap.shorten(history[0]['reason'], width=500, placeholder='...')}"
- msg += f"\n\nUser's previous watch reasons {total}:```{start_reason}```"
+ msg += f"\n\n({len(history)} previous nominations in total)"
await ctx.send(msg)
@@ -163,6 +245,10 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
Providing a `reason` is required.
"""
+ if len(reason) > REASON_MAX_CHARS:
+ await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.")
+ return
+
if await self.unwatch(user.id, reason):
await ctx.send(f":white_check_mark: Messages sent by {user} will no longer be relayed")
else:
@@ -176,33 +262,98 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
@nomination_edit_group.command(name='reason')
@has_any_role(*MODERATION_ROLES)
- async def edit_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None:
- """
- Edits the reason/unnominate reason for the nomination with the given `id` depending on the status.
+ async def edit_reason_command(self, ctx: Context, nomination_id: int, actor: FetchedMember, *, reason: str) -> None:
+ """Edits the reason of a specific nominator in a specific active nomination."""
+ if len(reason) > REASON_MAX_CHARS:
+ await ctx.send(f":x: Maxiumum allowed characters for the reason is {REASON_MAX_CHARS}.")
+ return
- If the nomination is active, the reason for nominating the user will be edited;
- If the nomination is no longer active, the reason for ending the nomination will be edited instead.
- """
try:
nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}")
except ResponseCodeError as e:
if e.response.status == 404:
- self.log.trace(f"Nomination API 404: Can't nomination with id {nomination_id}")
+ self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}")
await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`")
return
else:
raise
- field = "reason" if nomination["active"] else "end_reason"
+ if not nomination["active"]:
+ await ctx.send(":x: Can't edit the reason of an inactive nomination.")
+ return
+
+ if not any(entry["actor"] == actor.id for entry in nomination["entries"]):
+ await ctx.send(f":x: {actor} doesn't have an entry in this nomination.")
+ return
- self.log.trace(f"Changing {field} for nomination with id {nomination_id} to {reason}")
+ self.log.trace(f"Changing reason for nomination with id {nomination_id} of actor {actor} to {repr(reason)}")
await self.bot.api_client.patch(
f"{self.api_endpoint}/{nomination_id}",
- json={field: reason}
+ json={"actor": actor.id, "reason": reason}
+ )
+ await self.fetch_user_cache() # Update cache
+ await ctx.send(":white_check_mark: Successfully updated nomination reason.")
+
+ @nomination_edit_group.command(name='end_reason')
+ @has_any_role(*MODERATION_ROLES)
+ async def edit_end_reason_command(self, ctx: Context, nomination_id: int, *, reason: str) -> None:
+ """Edits the unnominate reason for the nomination with the given `id`."""
+ if len(reason) > REASON_MAX_CHARS:
+ await ctx.send(f":x: Maxiumum allowed characters for the end reason is {REASON_MAX_CHARS}.")
+ return
+
+ try:
+ nomination = await self.bot.api_client.get(f"{self.api_endpoint}/{nomination_id}")
+ except ResponseCodeError as e:
+ if e.response.status == 404:
+ self.log.trace(f"Nomination API 404: Can't find a nomination with id {nomination_id}")
+ await ctx.send(f":x: Can't find a nomination with id `{nomination_id}`")
+ return
+ else:
+ raise
+
+ if nomination["active"]:
+ await ctx.send(":x: Can't edit the end reason of an active nomination.")
+ return
+
+ self.log.trace(f"Changing end reason for nomination with id {nomination_id} to {repr(reason)}")
+
+ await self.bot.api_client.patch(
+ f"{self.api_endpoint}/{nomination_id}",
+ json={"end_reason": reason}
)
await self.fetch_user_cache() # Update cache.
- await ctx.send(f":white_check_mark: Updated the {field} of the nomination!")
+ await ctx.send(":white_check_mark: Updated the end reason of the nomination!")
+
+ @nomination_group.command(aliases=('mr',))
+ @has_any_role(*MODERATION_ROLES)
+ async def mark_reviewed(self, ctx: Context, user_id: int) -> None:
+ """Mark a user's nomination as reviewed and cancel the review task."""
+ if not await self.reviewer.mark_reviewed(ctx, user_id):
+ return
+ await ctx.send(f"{Emojis.check_mark} The user with ID `{user_id}` was marked as reviewed.")
+
+ @nomination_group.command(aliases=('gr',))
+ @has_any_role(*MODERATION_ROLES)
+ async def get_review(self, ctx: Context, user_id: int) -> None:
+ """Get the user's review as a markdown file."""
+ review = (await self.reviewer.make_review(user_id))[0]
+ if review:
+ file = discord.File(StringIO(review), f"{user_id}_review.md")
+ await ctx.send(file=file)
+ else:
+ await ctx.send(f"There doesn't appear to be an active nomination for {user_id}")
+
+ @nomination_group.command(aliases=('review',))
+ @has_any_role(*MODERATION_ROLES)
+ async def post_review(self, ctx: Context, user_id: int) -> None:
+ """Post the automatic review for the user ahead of time."""
+ if not await self.reviewer.mark_reviewed(ctx, user_id):
+ return
+
+ await self.reviewer.post_review(user_id, update_database=False)
+ await ctx.message.add_reaction(Emojis.check_mark)
@Cog.listener()
async def on_member_ban(self, guild: Guild, user: Union[User, Member]) -> None:
@@ -232,19 +383,28 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
)
self._remove_user(user_id)
+ self.reviewer.cancel(user_id)
+
return True
def _nomination_to_string(self, nomination_object: dict) -> str:
"""Creates a string representation of a nomination."""
guild = self.bot.get_guild(Guild.id)
+ entries = []
+ for site_entry in nomination_object["entries"]:
+ actor_id = site_entry["actor"]
+ actor = guild.get_member(actor_id)
+
+ reason = site_entry["reason"] or "*None*"
+ created = time.format_infraction(site_entry["inserted_at"])
+ entries.append(
+ f"Actor: {actor.mention if actor else actor_id}\nCreated: {created}\nReason: {reason}"
+ )
- actor_id = nomination_object["actor"]
- actor = guild.get_member(actor_id)
+ entries_string = "\n\n".join(entries)
active = nomination_object["active"]
- reason = nomination_object["reason"] or "*None*"
-
start_date = time.format_infraction(nomination_object["inserted_at"])
if active:
lines = textwrap.dedent(
@@ -252,9 +412,9 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
===============
Status: **Active**
Date: {start_date}
- Actor: {actor.mention if actor else actor_id}
- Reason: {reason}
Nomination ID: `{nomination_object["id"]}`
+
+ {entries_string}
===============
"""
)
@@ -265,19 +425,19 @@ class TalentPool(WatchChannel, Cog, name="Talentpool"):
===============
Status: Inactive
Date: {start_date}
- Actor: {actor.mention if actor else actor_id}
- Reason: {reason}
+ Nomination ID: `{nomination_object["id"]}`
+
+ {entries_string}
End date: {end_date}
Unwatch reason: {nomination_object["end_reason"]}
- Nomination ID: `{nomination_object["id"]}`
===============
"""
)
return lines.strip()
-
-def setup(bot: Bot) -> None:
- """Load the TalentPool cog."""
- bot.add_cog(TalentPool(bot))
+ def cog_unload(self) -> None:
+ """Cancels all review tasks on cog unload."""
+ super().cog_unload()
+ self.reviewer.cancel_all()
diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py
new file mode 100644
index 000000000..11aa3b62b
--- /dev/null
+++ b/bot/exts/recruitment/talentpool/_review.py
@@ -0,0 +1,335 @@
+import asyncio
+import logging
+import random
+import textwrap
+import typing
+from collections import Counter
+from datetime import datetime, timedelta
+from typing import List, Optional, Union
+
+from dateutil.parser import isoparse
+from dateutil.relativedelta import relativedelta
+from discord import Emoji, Member, Message, TextChannel
+from discord.ext.commands import Context
+
+from bot.api import ResponseCodeError
+from bot.bot import Bot
+from bot.constants import Channels, Guild, Roles
+from bot.utils.scheduling import Scheduler
+from bot.utils.time import get_time_delta, humanize_delta, time_since
+
+if typing.TYPE_CHECKING:
+ from bot.exts.recruitment.talentpool._cog import TalentPool
+
+log = logging.getLogger(__name__)
+
+# Maximum amount of days before an automatic review is posted.
+MAX_DAYS_IN_POOL = 30
+
+# Maximum amount of characters allowed in a message
+MAX_MESSAGE_SIZE = 2000
+
+
+class Reviewer:
+ """Schedules, formats, and publishes reviews of helper nominees."""
+
+ def __init__(self, name: str, bot: Bot, pool: 'TalentPool'):
+ self.bot = bot
+ self._pool = pool
+ self._review_scheduler = Scheduler(name)
+
+ def __contains__(self, user_id: int) -> bool:
+ """Return True if the user with ID user_id is scheduled for review, False otherwise."""
+ return user_id in self._review_scheduler
+
+ async def reschedule_reviews(self) -> None:
+ """Reschedule all active nominations to be reviewed at the appropriate time."""
+ log.trace("Rescheduling reviews")
+ await self.bot.wait_until_guild_available()
+ # TODO Once the watch channel is removed, this can be done in a smarter way, e.g create a sync function.
+ await self._pool.fetch_user_cache()
+
+ for user_id, user_data in self._pool.watched_users.items():
+ if not user_data["reviewed"]:
+ self.schedule_review(user_id)
+
+ def schedule_review(self, user_id: int) -> None:
+ """Schedules a single user for review."""
+ log.trace(f"Scheduling review of user with ID {user_id}")
+
+ user_data = self._pool.watched_users[user_id]
+ inserted_at = isoparse(user_data['inserted_at']).replace(tzinfo=None)
+ review_at = inserted_at + timedelta(days=MAX_DAYS_IN_POOL)
+
+ # If it's over a day overdue, it's probably an old nomination and shouldn't be automatically reviewed.
+ if datetime.utcnow() - review_at < timedelta(days=1):
+ self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True))
+
+ async def post_review(self, user_id: int, update_database: bool) -> None:
+ """Format the review of a user and post it to the nomination voting channel."""
+ review, seen_emoji = await self.make_review(user_id)
+ if not review:
+ return
+
+ guild = self.bot.get_guild(Guild.id)
+ channel = guild.get_channel(Channels.nomination_voting)
+
+ log.trace(f"Posting the review of {user_id}")
+ message = (await self._bulk_send(channel, review))[-1]
+ if seen_emoji:
+ for reaction in (seen_emoji, "\N{THUMBS UP SIGN}", "\N{THUMBS DOWN SIGN}"):
+ await message.add_reaction(reaction)
+
+ if update_database:
+ nomination = self._pool.watched_users[user_id]
+ await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True})
+
+ async def make_review(self, user_id: int) -> typing.Tuple[str, Optional[Emoji]]:
+ """Format a generic review of a user and return it with the seen emoji."""
+ log.trace(f"Formatting the review of {user_id}")
+
+ nomination = self._pool.watched_users[user_id]
+ if not nomination:
+ log.trace(f"There doesn't appear to be an active nomination for {user_id}")
+ return "", None
+
+ guild = self.bot.get_guild(Guild.id)
+ member = guild.get_member(user_id)
+
+ if not member:
+ return (
+ f"I tried to review the user with ID `{user_id}`, but they don't appear to be on the server :pensive:"
+ ), None
+
+ opening = f"<@&{Roles.moderators}> <@&{Roles.admins}>\n{member.mention} ({member}) for Helper!"
+
+ current_nominations = "\n\n".join(
+ f"**<@{entry['actor']}>:** {entry['reason'] or '*no reason given*'}" for entry in nomination['entries']
+ )
+ current_nominations = f"**Nominated by:**\n{current_nominations}"
+
+ review_body = await self._construct_review_body(member)
+
+ seen_emoji = self._random_ducky(guild)
+ vote_request = (
+ "*Refer to their nomination and infraction histories for further details*.\n"
+ f"*Please react {seen_emoji} if you've seen this post."
+ " Then react :+1: for approval, or :-1: for disapproval*."
+ )
+
+ review = "\n\n".join((opening, current_nominations, review_body, vote_request))
+ return review, seen_emoji
+
+ async def _construct_review_body(self, member: Member) -> str:
+ """Formats the body of the nomination, with details of activity, infractions, and previous nominations."""
+ activity = await self._activity_review(member)
+ infractions = await self._infractions_review(member)
+ prev_nominations = await self._previous_nominations_review(member)
+
+ body = f"{activity}\n\n{infractions}"
+ if prev_nominations:
+ body += f"\n\n{prev_nominations}"
+ return body
+
+ async def _activity_review(self, member: Member) -> str:
+ """
+ Format the activity of the nominee.
+
+ Adds details on how long they've been on the server, their total message count,
+ and the channels they're the most active in.
+ """
+ log.trace(f"Fetching the metricity data for {member.id}'s review")
+ try:
+ user_activity = await self.bot.api_client.get(f"bot/users/{member.id}/metricity_review_data")
+ except ResponseCodeError as e:
+ if e.status == 404:
+ log.trace(f"The user {member.id} seems to have no activity logged in Metricity.")
+ messages = "no"
+ channels = ""
+ else:
+ log.trace(f"An unexpected error occured while fetching information of user {member.id}.")
+ raise
+ else:
+ log.trace(f"Activity found for {member.id}, formatting review.")
+ messages = user_activity["total_messages"]
+ # Making this part flexible to the amount of expected and returned channels.
+ first_channel = user_activity["top_channel_activity"][0]
+ channels = f", with {first_channel[1]} messages in {first_channel[0]}"
+
+ if len(user_activity["top_channel_activity"]) > 1:
+ channels += ", " + ", ".join(
+ f"{count} in {channel}" for channel, count in user_activity["top_channel_activity"][1: -1]
+ )
+ last_channel = user_activity["top_channel_activity"][-1]
+ channels += f", and {last_channel[1]} in {last_channel[0]}"
+
+ time_on_server = humanize_delta(relativedelta(datetime.utcnow(), member.joined_at), max_units=2)
+ review = (
+ f"{member.name} has been on the server for **{time_on_server}**"
+ f" and has **{messages} messages**{channels}."
+ )
+
+ return review
+
+ async def _infractions_review(self, member: Member) -> str:
+ """
+ Formats the review of the nominee's infractions, if any.
+
+ The infractions are listed by type and amount, and it is stated how long ago the last one was issued.
+ """
+ log.trace(f"Fetching the infraction data for {member.id}'s review")
+ infraction_list = await self.bot.api_client.get(
+ 'bot/infractions/expanded',
+ params={'user__id': str(member.id), 'ordering': '-inserted_at'}
+ )
+
+ log.trace(f"{len(infraction_list)} infractions found for {member.id}, formatting review.")
+ if not infraction_list:
+ return "They have no infractions."
+
+ # Count the amount of each type of infraction.
+ infr_stats = list(Counter(infr["type"] for infr in infraction_list).items())
+
+ # Format into a sentence.
+ if len(infr_stats) == 1:
+ infr_type, count = infr_stats[0]
+ infractions = f"{count} {self._format_infr_name(infr_type, count)}"
+ else: # We already made sure they have infractions.
+ infractions = ", ".join(
+ f"{count} {self._format_infr_name(infr_type, count)}"
+ for infr_type, count in infr_stats[:-1]
+ )
+ last_infr, last_count = infr_stats[-1]
+ infractions += f", and {last_count} {self._format_infr_name(last_infr, last_count)}"
+
+ infractions = f"**{infractions}**"
+
+ # Show when the last one was issued.
+ if len(infraction_list) == 1:
+ infractions += ", issued "
+ else:
+ infractions += ", with the last infraction issued "
+
+ # Infractions were ordered by time since insertion descending.
+ infractions += get_time_delta(infraction_list[0]['inserted_at'])
+
+ return f"They have {infractions}."
+
+ @staticmethod
+ def _format_infr_name(infr_type: str, count: int) -> str:
+ """
+ Format the infraction type in a way readable in a sentence.
+
+ Underscores are replaced with spaces, as well as *attempting* to show the appropriate plural form if necessary.
+ This function by no means covers all rules of grammar.
+ """
+ formatted = infr_type.replace("_", " ")
+ if count > 1:
+ if infr_type.endswith(('ch', 'sh')):
+ formatted += "e"
+ formatted += "s"
+
+ return formatted
+
+ async def _previous_nominations_review(self, member: Member) -> Optional[str]:
+ """
+ Formats the review of the nominee's previous nominations.
+
+ The number of previous nominations and unnominations are shown, as well as the reason the last one ended.
+ """
+ log.trace(f"Fetching the nomination history data for {member.id}'s review")
+ history = await self.bot.api_client.get(
+ self._pool.api_endpoint,
+ params={
+ "user__id": str(member.id),
+ "active": "false",
+ "ordering": "-inserted_at"
+ }
+ )
+
+ log.trace(f"{len(history)} previous nominations found for {member.id}, formatting review.")
+ if not history:
+ return
+
+ num_entries = sum(len(nomination["entries"]) for nomination in history)
+
+ nomination_times = f"{num_entries} times" if num_entries > 1 else "once"
+ rejection_times = f"{len(history)} times" if len(history) > 1 else "once"
+ end_time = time_since(isoparse(history[0]['ended_at']).replace(tzinfo=None), max_units=2)
+
+ review = (
+ f"They were nominated **{nomination_times}** before"
+ f", but their nomination was called off **{rejection_times}**."
+ f"\nThe last one ended {end_time} with the reason: {history[0]['end_reason']}"
+ )
+
+ return review
+
+ @staticmethod
+ def _random_ducky(guild: Guild) -> Union[Emoji, str]:
+ """Picks a random ducky emoji to be used to mark the vote as seen. If no duckies found returns :eyes:."""
+ duckies = [emoji for emoji in guild.emojis if emoji.name.startswith("ducky")]
+ if not duckies:
+ return ":eyes:"
+ return random.choice(duckies)
+
+ @staticmethod
+ async def _bulk_send(channel: TextChannel, text: str) -> List[Message]:
+ """
+ Split a text into several if necessary, and post them to the channel.
+
+ Returns the resulting message objects.
+ """
+ messages = textwrap.wrap(text, width=MAX_MESSAGE_SIZE, replace_whitespace=False)
+ log.trace(f"The provided string will be sent to the channel {channel.id} as {len(messages)} messages.")
+
+ results = []
+ for message in messages:
+ await asyncio.sleep(1)
+ results.append(await channel.send(message))
+
+ return results
+
+ async def mark_reviewed(self, ctx: Context, user_id: int) -> bool:
+ """
+ Mark an active nomination as reviewed, updating the database and canceling the review task.
+
+ Returns True if the user was successfully marked as reviewed, False otherwise.
+ """
+ log.trace(f"Updating user {user_id} as reviewed")
+ await self._pool.fetch_user_cache()
+ if user_id not in self._pool.watched_users:
+ log.trace(f"Can't find a nominated user with id {user_id}")
+ await ctx.send(f":x: Can't find a currently nominated user with id `{user_id}`")
+ return False
+
+ nomination = self._pool.watched_users[user_id]
+ if nomination["reviewed"]:
+ await ctx.send(":x: This nomination was already reviewed, but here's a cookie :cookie:")
+ return False
+
+ await self.bot.api_client.patch(f"{self._pool.api_endpoint}/{nomination['id']}", json={"reviewed": True})
+ if user_id in self._review_scheduler:
+ self._review_scheduler.cancel(user_id)
+
+ return True
+
+ def cancel(self, user_id: int) -> None:
+ """
+ Cancels the review of the nominee with ID `user_id`.
+
+ It's important to note that this applies only until reschedule_reviews is called again.
+ To permanently cancel someone's review, either remove them from the pool, or use mark_reviewed.
+ """
+ log.trace(f"Canceling the review of user {user_id}.")
+ self._review_scheduler.cancel(user_id)
+
+ def cancel_all(self) -> None:
+ """
+ Cancels all reviews.
+
+ It's important to note that this applies only until reschedule_reviews is called again.
+ To permanently cancel someone's review, either remove them from the pool, or use mark_reviewed.
+ """
+ log.trace("Canceling all reviews.")
+ self._review_scheduler.cancel_all()
diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py
index 8acaf9131..cb662e852 100644
--- a/bot/exts/utils/clean.py
+++ b/bot/exts/utils/clean.py
@@ -3,7 +3,7 @@ import random
import re
from typing import Iterable, Optional
-from discord import Colour, Embed, Message, TextChannel, User
+from discord import Colour, Embed, Message, TextChannel, User, errors
from discord.ext import commands
from discord.ext.commands import Cog, Context, group, has_any_role
@@ -115,7 +115,11 @@ class Clean(Cog):
# Delete the invocation first
self.mod_log.ignore(Event.message_delete, ctx.message.id)
- await ctx.message.delete()
+ try:
+ await ctx.message.delete()
+ except errors.NotFound:
+ # Invocation message has already been deleted
+ log.info("Tried to delete invocation message, but it was already deleted.")
messages = []
message_ids = []
diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py
index a7ab43f37..6f2da3131 100644
--- a/bot/exts/utils/internal.py
+++ b/bot/exts/utils/internal.py
@@ -240,12 +240,12 @@ async def func(): # (None,) -> Any
stats_embed = discord.Embed(
title="WebSocket statistics",
- description=f"Receiving {per_s:0.2f} event per second.",
+ description=f"Receiving {per_s:0.2f} events per second.",
color=discord.Color.blurple()
)
for event_type, count in self.socket_events.most_common(25):
- stats_embed.add_field(name=event_type, value=count, inline=False)
+ stats_embed.add_field(name=event_type, value=f"{count:,}", inline=True)
await ctx.send(embed=stats_embed)
diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py
index 9f480c067..da95240bb 100644
--- a/bot/exts/utils/snekbox.py
+++ b/bot/exts/utils/snekbox.py
@@ -13,7 +13,7 @@ from discord.ext.commands import Cog, Context, command, guild_only
from bot.bot import Bot
from bot.constants import Categories, Channels, Roles, URLs
-from bot.decorators import in_whitelist
+from bot.decorators import not_in_blacklist
from bot.utils import send_to_paste_service
from bot.utils.messages import wait_for_deletion
@@ -38,9 +38,9 @@ RAW_CODE_REGEX = re.compile(
MAX_PASTE_LEN = 10000
-# `!eval` command whitelists
-EVAL_CHANNELS = (Channels.bot_commands, Channels.esoteric)
-EVAL_CATEGORIES = (Categories.help_available, Categories.help_in_use, Categories.voice)
+# `!eval` command whitelists and blacklists.
+NO_EVAL_CHANNELS = (Channels.python_general,)
+NO_EVAL_CATEGORIES = ()
EVAL_ROLES = (Roles.helpers, Roles.moderators, Roles.admins, Roles.owners, Roles.python_community, Roles.partners)
SIGKILL = 9
@@ -280,7 +280,7 @@ class Snekbox(Cog):
@command(name="eval", aliases=("e",))
@guild_only()
- @in_whitelist(channels=EVAL_CHANNELS, categories=EVAL_CATEGORIES, roles=EVAL_ROLES)
+ @not_in_blacklist(channels=NO_EVAL_CHANNELS, categories=NO_EVAL_CATEGORIES, override_roles=EVAL_ROLES)
async def eval_command(self, ctx: Context, *, code: str = None) -> None:
"""
Run Python code and get the results.
diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py
index eb92dfca7..8d9d27c64 100644
--- a/bot/exts/utils/utils.py
+++ b/bot/exts/utils/utils.py
@@ -9,11 +9,12 @@ from discord.ext.commands import BadArgument, Cog, Context, clean_content, comma
from discord.utils import snowflake_time
from bot.bot import Bot
-from bot.constants import Channels, MODERATION_ROLES, STAFF_ROLES
+from bot.constants import Channels, MODERATION_ROLES, Roles, STAFF_ROLES
from bot.converters import Snowflake
from bot.decorators import in_whitelist
from bot.pagination import LinePaginator
from bot.utils import messages
+from bot.utils.checks import has_no_roles_check
from bot.utils.time import time_since
log = logging.getLogger(__name__)
@@ -156,21 +157,35 @@ class Utils(Cog):
@command(aliases=("snf", "snfl", "sf"))
@in_whitelist(channels=(Channels.bot_commands,), roles=STAFF_ROLES)
- async def snowflake(self, ctx: Context, snowflake: Snowflake) -> None:
+ async def snowflake(self, ctx: Context, *snowflakes: Snowflake) -> None:
"""Get Discord snowflake creation time."""
- created_at = snowflake_time(snowflake)
- embed = Embed(
- description=f"**Created at {created_at}** ({time_since(created_at, max_units=3)}).",
- colour=Colour.blue()
- )
+ if len(snowflakes) > 1 and await has_no_roles_check(ctx, *STAFF_ROLES):
+ raise BadArgument("Cannot process more than one snowflake in one invocation.")
+
+ if not snowflakes:
+ raise BadArgument("At least one snowflake must be provided.")
+
+ embed = Embed(colour=Colour.blue())
embed.set_author(
- name=f"Snowflake: {snowflake}",
+ name=f"Snowflake{'s'[:len(snowflakes)^1]}", # Deals with pluralisation
icon_url="https://github.com/twitter/twemoji/blob/master/assets/72x72/2744.png?raw=true"
)
- await ctx.send(embed=embed)
+
+ lines = []
+ for snowflake in snowflakes:
+ created_at = snowflake_time(snowflake)
+ lines.append(f"**{snowflake}**\nCreated at {created_at} ({time_since(created_at, max_units=3)}).")
+
+ await LinePaginator.paginate(
+ lines,
+ ctx=ctx,
+ embed=embed,
+ max_lines=5,
+ max_size=1000
+ )
@command(aliases=("poll",))
- @has_any_role(*MODERATION_ROLES)
+ @has_any_role(*MODERATION_ROLES, Roles.project_leads, Roles.domain_leads)
async def vote(self, ctx: Context, title: clean_content(fix_channel_mentions=True), *options: str) -> None:
"""
Build a quick voting poll with matching reactions with the provided options.
diff --git a/bot/pagination.py b/bot/pagination.py
index 3b16cc9ff..c5c84afd9 100644
--- a/bot/pagination.py
+++ b/bot/pagination.py
@@ -2,14 +2,14 @@ import asyncio
import logging
import typing as t
from contextlib import suppress
+from functools import partial
import discord
-from discord import Member
from discord.abc import User
from discord.ext.commands import Context, Paginator
from bot import constants
-from bot.constants import MODERATION_ROLES
+from bot.utils import messages
FIRST_EMOJI = "\u23EE" # [:track_previous:]
LEFT_EMOJI = "\u2B05" # [:arrow_left:]
@@ -220,29 +220,6 @@ class LinePaginator(Paginator):
>>> embed.set_author(name="Some Operation", url=url, icon_url=icon)
>>> await LinePaginator.paginate([line for line in lines], ctx, embed)
"""
- def event_check(reaction_: discord.Reaction, user_: discord.Member) -> bool:
- """Make sure that this reaction is what we want to operate on."""
- no_restrictions = (
- # The reaction was by a whitelisted user
- user_.id == restrict_to_user.id
- # The reaction was by a moderator
- or isinstance(user_, Member) and any(role.id in MODERATION_ROLES for role in user_.roles)
- )
-
- return (
- # Conditions for a successful pagination:
- all((
- # Reaction is on this message
- reaction_.message.id == message.id,
- # Reaction is one of the pagination emotes
- str(reaction_.emoji) in PAGINATION_EMOJI,
- # Reaction was not made by the Bot
- user_.id != ctx.bot.user.id,
- # There were no restrictions
- no_restrictions
- ))
- )
-
paginator = cls(prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines,
scale_to_size=scale_to_size)
current_page = 0
@@ -303,9 +280,16 @@ class LinePaginator(Paginator):
log.trace(f"Adding reaction: {repr(emoji)}")
await message.add_reaction(emoji)
+ check = partial(
+ messages.reaction_check,
+ message_id=message.id,
+ allowed_emoji=PAGINATION_EMOJI,
+ allowed_users=(restrict_to_user.id,),
+ )
+
while True:
try:
- reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=event_check)
+ reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=check)
log.trace(f"Got reaction: {reaction}")
except asyncio.TimeoutError:
log.debug("Timed out waiting for a reaction")
diff --git a/bot/resources/elements.json b/bot/resources/elements.json
deleted file mode 100644
index a3ac5b99f..000000000
--- a/bot/resources/elements.json
+++ /dev/null
@@ -1,119 +0,0 @@
-[
- "hydrogen",
- "helium",
- "lithium",
- "beryllium",
- "boron",
- "carbon",
- "nitrogen",
- "oxygen",
- "fluorine",
- "neon",
- "sodium",
- "magnesium",
- "aluminium",
- "silicon",
- "phosphorus",
- "sulfur",
- "chlorine",
- "argon",
- "potassium",
- "calcium",
- "scandium",
- "titanium",
- "vanadium",
- "chromium",
- "manganese",
- "iron",
- "cobalt",
- "nickel",
- "copper",
- "zinc",
- "gallium",
- "germanium",
- "arsenic",
- "bromine",
- "krypton",
- "rubidium",
- "strontium",
- "yttrium",
- "zirconium",
- "niobium",
- "molybdenum",
- "technetium",
- "ruthenium",
- "rhodium",
- "palladium",
- "silver",
- "cadmium",
- "indium",
- "tin",
- "antimony",
- "tellurium",
- "iodine",
- "xenon",
- "caesium",
- "barium",
- "lanthanum",
- "cerium",
- "praseodymium",
- "neodymium",
- "promethium",
- "samarium",
- "europium",
- "gadolinium",
- "terbium",
- "dysprosium",
- "holmium",
- "erbium",
- "thulium",
- "ytterbium",
- "lutetium",
- "hafnium",
- "tantalum",
- "tungsten",
- "rhenium",
- "osmium",
- "iridium",
- "platinum",
- "gold",
- "mercury",
- "thallium",
- "lead",
- "bismuth",
- "polonium",
- "astatine",
- "radon",
- "francium",
- "radium",
- "actinium",
- "thorium",
- "protactinium",
- "uranium",
- "neptunium",
- "plutonium",
- "americium",
- "curium",
- "berkelium",
- "californium",
- "einsteinium",
- "fermium",
- "mendelevium",
- "nobelium",
- "lawrencium",
- "rutherfordium",
- "dubnium",
- "seaborgium",
- "bohrium",
- "hassium",
- "meitnerium",
- "darmstadtium",
- "roentgenium",
- "copernicium",
- "nihonium",
- "flerovium",
- "moscovium",
- "livermorium",
- "tennessine",
- "oganesson"
-]
diff --git a/bot/resources/foods.json b/bot/resources/foods.json
new file mode 100644
index 000000000..61d9ea98f
--- /dev/null
+++ b/bot/resources/foods.json
@@ -0,0 +1,52 @@
+[
+ "apple",
+ "avocado",
+ "bagel",
+ "banana",
+ "bread",
+ "broccoli",
+ "burrito",
+ "cake",
+ "candy",
+ "carrot",
+ "cheese",
+ "cherries",
+ "chestnut",
+ "chili",
+ "chocolate",
+ "coconut",
+ "coffee",
+ "cookie",
+ "corn",
+ "croissant",
+ "cupcake",
+ "donut",
+ "dumpling",
+ "falafel",
+ "grapes",
+ "honey",
+ "kiwi",
+ "lemon",
+ "lollipop",
+ "mango",
+ "mushroom",
+ "orange",
+ "pancakes",
+ "peanut",
+ "pear",
+ "pie",
+ "pineapple",
+ "popcorn",
+ "potato",
+ "pretzel",
+ "ramen",
+ "rice",
+ "salad",
+ "spaghetti",
+ "stew",
+ "strawberry",
+ "sushi",
+ "taco",
+ "tomato",
+ "watermelon"
+]
diff --git a/bot/resources/stars.json b/bot/resources/stars.json
index c0b253120..5ecad0213 100644
--- a/bot/resources/stars.json
+++ b/bot/resources/stars.json
@@ -17,7 +17,7 @@
"Bruce Springsteen",
"Bruno Mars",
"Bryan Adams",
- "Celine Dion",
+ "Céline Dion",
"Cher",
"Christina Aguilera",
"David Bowie",
diff --git a/bot/resources/tags/comparison.md b/bot/resources/tags/comparison.md
new file mode 100644
index 000000000..12844bd2f
--- /dev/null
+++ b/bot/resources/tags/comparison.md
@@ -0,0 +1,12 @@
+**Assignment vs. Comparison**
+
+The assignment operator (`=`) is used to assign variables.
+```python
+x = 5
+print(x) # Prints 5
+```
+The equality operator (`==`) is used to compare values.
+```python
+if x == 5:
+ print("The value of x is 5")
+```
diff --git a/bot/resources/tags/customchecks.md b/bot/resources/tags/customchecks.md
new file mode 100644
index 000000000..23ff7a66f
--- /dev/null
+++ b/bot/resources/tags/customchecks.md
@@ -0,0 +1,21 @@
+**Custom Command Checks in discord.py**
+
+Often you may find the need to use checks that don't exist by default in discord.py. Fortunately, discord.py provides `discord.ext.commands.check` which allows you to create you own checks like this:
+```py
+from discord.ext.commands import check, Context
+
+def in_any_channel(*channels):
+ async def predicate(ctx: Context):
+ return ctx.channel.id in channels
+ return check(predicate)
+```
+This check is to check whether the invoked command is in a given set of channels. The inner function, named `predicate` here, is used to perform the actual check on the command, and check logic should go in this function. It must be an async function, and always provides a single `commands.Context` argument which you can use to create check logic. This check function should return a boolean value indicating whether the check passed (return `True`) or failed (return `False`).
+
+The check can now be used like any other commands check as a decorator of a command, such as this:
+```py
[email protected](name="ping")
+@in_any_channel(728343273562701984)
+async def ping(ctx: Context):
+ ...
+```
+This would lock the `ping` command to only be used in the channel `728343273562701984`. If this check function fails it will raise a `CheckFailure` exception, which can be handled in your error handler.
diff --git a/bot/resources/tags/customhelp.md b/bot/resources/tags/customhelp.md
new file mode 100644
index 000000000..6f0b17642
--- /dev/null
+++ b/bot/resources/tags/customhelp.md
@@ -0,0 +1,3 @@
+**Custom help commands in discord.py**
+
+To learn more about how to create custom help commands in discord.py by subclassing the help command, please see [this tutorial](https://gist.github.com/InterStella0/b78488fb28cadf279dfd3164b9f0cf96#embed-minimalhelpcommand) by Stella#2000
diff --git a/bot/resources/tags/empty-json.md b/bot/resources/tags/empty-json.md
new file mode 100644
index 000000000..935544bb7
--- /dev/null
+++ b/bot/resources/tags/empty-json.md
@@ -0,0 +1,11 @@
+When using JSON, you might run into the following error:
+```
+JSONDecodeError: Expecting value: line 1 column 1 (char 0)
+```
+This error could have appeared because you just created the JSON file and there is nothing in it at the moment.
+
+Whilst having empty data is no problem, the file itself may never be completely empty.
+
+You most likely wanted to structure your JSON as a dictionary. To do this, edit your empty JSON file so that it instead contains `{}`.
+
+Different data types are also supported. If you wish to read more on these, please refer to [this article](https://www.tutorialspoint.com/json/json_data_types.htm).
diff --git a/bot/resources/tags/inline.md b/bot/resources/tags/inline.md
index a6a7c35d6..4ece74ef7 100644
--- a/bot/resources/tags/inline.md
+++ b/bot/resources/tags/inline.md
@@ -1,16 +1,7 @@
**Inline codeblocks**
-In addition to multi-line codeblocks, discord has support for inline codeblocks as well. These are small codeblocks that are usually a single line, that can fit between non-codeblocks on the same line.
+Inline codeblocks look `like this`. To create them you surround text with single backticks, so \`hello\` would become `hello`.
-The following is an example of how it's done:
+Note that backticks are not quotes, see [this](https://superuser.com/questions/254076/how-do-i-type-the-tick-and-backtick-characters-on-windows/254077#254077) if you are struggling to find the backtick key.
-The \`\_\_init\_\_\` method customizes the newly created instance.
-
-And results in the following:
-
-The `__init__` method customizes the newly created instance.
-
-**Note:**
-• These are **backticks** not quotes
-• Avoid using them for multiple lines
-• Useful for negating formatting you don't want
+For how to make multiline codeblocks see the `!codeblock` tag.
diff --git a/bot/resources/tags/intents.md b/bot/resources/tags/intents.md
new file mode 100644
index 000000000..464caf0ba
--- /dev/null
+++ b/bot/resources/tags/intents.md
@@ -0,0 +1,19 @@
+**Using intents in discord.py**
+
+Intents are a feature of Discord that tells the gateway exactly which events to send your bot. By default, discord.py has all intents enabled, except for the `Members` and `Presences` intents, which are needed for events such as `on_member` and to get members' statuses.
+
+To enable one of these intents, you need to first go to the [Discord developer portal](https://discord.com/developers/applications), then to the bot page of your bot's application. Scroll down to the `Privileged Gateway Intents` section, then enable the intents that you need.
+
+Next, in your bot you need to set the intents you want to connect with in the bot's constructor using the `intents` keyword argument, like this:
+
+```py
+from discord import Intents
+from discord.ext import commands
+
+intents = Intents.default()
+intents.members = True
+
+bot = commands.Bot(command_prefix="!", intents=intents)
+```
+
+For more info about using intents, see the [discord.py docs on intents](https://discordpy.readthedocs.io/en/latest/intents.html), and for general information about them, see the [Discord developer documentation on intents](https://discord.com/developers/docs/topics/gateway#gateway-intents).
diff --git a/bot/resources/tags/off-topic.md b/bot/resources/tags/off-topic.md
index c7f98a813..6a864a1d5 100644
--- a/bot/resources/tags/off-topic.md
+++ b/bot/resources/tags/off-topic.md
@@ -6,3 +6,5 @@ There are three off-topic channels:
• <#463035268514185226>
Their names change randomly every 24 hours, but you can always find them under the `OFF-TOPIC/GENERAL` category in the channel list.
+
+Please read our [off-topic etiquette](https://pythondiscord.com/pages/resources/guides/off-topic-etiquette/) before participating in conversations.
diff --git a/bot/resources/tags/pep8.md b/bot/resources/tags/pep8.md
index cab4c4db8..57b176122 100644
--- a/bot/resources/tags/pep8.md
+++ b/bot/resources/tags/pep8.md
@@ -1,3 +1,5 @@
-**PEP 8** is the official style guide for Python. It includes comprehensive guidelines for code formatting, variable naming, and making your code easy to read. Professional Python developers are usually required to follow the guidelines, and will often use code-linters like `flake8` to verify that the code they\'re writing complies with the style guide.
+**PEP 8** is the official style guide for Python. It includes comprehensive guidelines for code formatting, variable naming, and making your code easy to read. Professional Python developers are usually required to follow the guidelines, and will often use code-linters like flake8 to verify that the code they're writing complies with the style guide.
-You can find the PEP 8 document [here](https://www.python.org/dev/peps/pep-0008).
+More information:
+• [PEP 8 document](https://www.python.org/dev/peps/pep-0008)
+• [Our PEP 8 song!](https://www.youtube.com/watch?v=hgI0p1zf31k) :notes:
diff --git a/bot/resources/tags/ytdl.md b/bot/resources/tags/ytdl.md
index e34ecff44..df28024a0 100644
--- a/bot/resources/tags/ytdl.md
+++ b/bot/resources/tags/ytdl.md
@@ -1,12 +1,12 @@
Per [PyDis' Rule 5](https://pythondiscord.com/pages/rules), we are unable to assist with questions related to youtube-dl, commonly used by Discord bots to stream audio, as its use violates YouTube's Terms of Service.
-For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?template=terms), as of 2019-07-22:
+For reference, this usage is covered by the following clauses in [YouTube's TOS](https://www.youtube.com/static?gl=GB&template=terms), as of 2021-03-17:
```
-The following restrictions apply to your use of the Service. You are not allowed to:
+The following restrictions apply to your use of the Service. You are not allowed to:
-1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as specifically permitted by the Service; (b) with prior written permission from YouTube and, if applicable, the respective rights holders; or (c) as permitted by applicable law;
+1. access, reproduce, download, distribute, transmit, broadcast, display, sell, license, alter, modify or otherwise use any part of the Service or any Content except: (a) as specifically permitted by the Service; (b) with prior written permission from YouTube and, if applicable, the respective rights holders; or (c) as permitted by applicable law;
-3. access the Service using any automated means (such as robots, botnets or scrapers) except: (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; (b) with YouTube’s prior written permission; or (c) as permitted by applicable law;
+3. access the Service using any automated means (such as robots, botnets or scrapers) except: (a) in the case of public search engines, in accordance with YouTube’s robots.txt file; (b) with YouTube’s prior written permission; or (c) as permitted by applicable law;
9. use the Service to view or listen to Content other than for personal, non-commercial use (for example, you may not publicly screen videos or stream music from the Service)
```
diff --git a/bot/utils/checks.py b/bot/utils/checks.py
index 460a937d8..3d0c8a50c 100644
--- a/bot/utils/checks.py
+++ b/bot/utils/checks.py
@@ -20,8 +20,8 @@ from bot import constants
log = logging.getLogger(__name__)
-class InWhitelistCheckFailure(CheckFailure):
- """Raised when the `in_whitelist` check fails."""
+class ContextCheckFailure(CheckFailure):
+ """Raised when a context-specific check fails."""
def __init__(self, redirect_channel: Optional[int]) -> None:
self.redirect_channel = redirect_channel
@@ -36,6 +36,10 @@ class InWhitelistCheckFailure(CheckFailure):
super().__init__(error_message)
+class InWhitelistCheckFailure(ContextCheckFailure):
+ """Raised when the `in_whitelist` check fails."""
+
+
def in_whitelist_check(
ctx: Context,
channels: Container[int] = (),
diff --git a/bot/utils/function.py b/bot/utils/function.py
index 3ab32fe3c..9bc44e753 100644
--- a/bot/utils/function.py
+++ b/bot/utils/function.py
@@ -1,14 +1,23 @@
"""Utilities for interaction with functions."""
+import functools
import inspect
+import logging
+import types
import typing as t
+log = logging.getLogger(__name__)
+
Argument = t.Union[int, str]
BoundArgs = t.OrderedDict[str, t.Any]
Decorator = t.Callable[[t.Callable], t.Callable]
ArgValGetter = t.Callable[[BoundArgs], t.Any]
+class GlobalNameConflictError(Exception):
+ """Raised when there's a conflict between the globals used to resolve annotations of wrapped and its wrapper."""
+
+
def get_arg_value(name_or_pos: Argument, arguments: BoundArgs) -> t.Any:
"""
Return a value from `arguments` based on a name or position.
@@ -73,3 +82,66 @@ def get_bound_args(func: t.Callable, args: t.Tuple, kwargs: t.Dict[str, t.Any])
bound_args.apply_defaults()
return bound_args.arguments
+
+
+def update_wrapper_globals(
+ wrapper: types.FunctionType,
+ wrapped: types.FunctionType,
+ *,
+ ignored_conflict_names: t.Set[str] = frozenset(),
+) -> types.FunctionType:
+ """
+ Update globals of `wrapper` with the globals from `wrapped`.
+
+ For forwardrefs in command annotations discordpy uses the __global__ attribute of the function
+ to resolve their values, with decorators that replace the function this breaks because they have
+ their own globals.
+
+ This function creates a new function functionally identical to `wrapper`, which has the globals replaced with
+ a merge of `wrapped`s globals and the `wrapper`s globals.
+
+ An exception will be raised in case `wrapper` and `wrapped` share a global name that is used by
+ `wrapped`'s typehints and is not in `ignored_conflict_names`,
+ as this can cause incorrect objects being used by discordpy's converters.
+ """
+ annotation_global_names = (
+ ann.split(".", maxsplit=1)[0] for ann in wrapped.__annotations__.values() if isinstance(ann, str)
+ )
+ # Conflicting globals from both functions' modules that are also used in the wrapper and in wrapped's annotations.
+ shared_globals = set(wrapper.__code__.co_names) & set(annotation_global_names)
+ shared_globals &= set(wrapped.__globals__) & set(wrapper.__globals__) - ignored_conflict_names
+ if shared_globals:
+ raise GlobalNameConflictError(
+ f"wrapper and the wrapped function share the following "
+ f"global names used by annotations: {', '.join(shared_globals)}. Resolve the conflicts or add "
+ f"the name to the `ignored_conflict_names` set to suppress this error if this is intentional."
+ )
+
+ new_globals = wrapper.__globals__.copy()
+ new_globals.update((k, v) for k, v in wrapped.__globals__.items() if k not in wrapper.__code__.co_names)
+ return types.FunctionType(
+ code=wrapper.__code__,
+ globals=new_globals,
+ name=wrapper.__name__,
+ argdefs=wrapper.__defaults__,
+ closure=wrapper.__closure__,
+ )
+
+
+def command_wraps(
+ wrapped: types.FunctionType,
+ assigned: t.Sequence[str] = functools.WRAPPER_ASSIGNMENTS,
+ updated: t.Sequence[str] = functools.WRAPPER_UPDATES,
+ *,
+ ignored_conflict_names: t.Set[str] = frozenset(),
+) -> t.Callable[[types.FunctionType], types.FunctionType]:
+ """Update the decorated function to look like `wrapped` and update globals for discordpy forwardref evaluation."""
+ def decorator(wrapper: types.FunctionType) -> types.FunctionType:
+ return functools.update_wrapper(
+ update_wrapper_globals(wrapper, wrapped, ignored_conflict_names=ignored_conflict_names),
+ wrapped,
+ assigned,
+ updated,
+ )
+
+ return decorator
diff --git a/bot/utils/lock.py b/bot/utils/lock.py
index e44776340..ec6f92cd4 100644
--- a/bot/utils/lock.py
+++ b/bot/utils/lock.py
@@ -1,13 +1,15 @@
import asyncio
import inspect
import logging
+import types
from collections import defaultdict
-from functools import partial, wraps
+from functools import partial
from typing import Any, Awaitable, Callable, Hashable, Union
from weakref import WeakValueDictionary
from bot.errors import LockedResourceError
from bot.utils import function
+from bot.utils.function import command_wraps
log = logging.getLogger(__name__)
__lock_dicts = defaultdict(WeakValueDictionary)
@@ -17,6 +19,35 @@ _IdCallable = Callable[[function.BoundArgs], _IdCallableReturn]
ResourceId = Union[Hashable, _IdCallable]
+class SharedEvent:
+ """
+ Context manager managing an internal event exposed through the wait coro.
+
+ While any code is executing in this context manager, the underlying event will not be set;
+ when all of the holders finish the event will be set.
+ """
+
+ def __init__(self):
+ self._active_count = 0
+ self._event = asyncio.Event()
+ self._event.set()
+
+ def __enter__(self):
+ """Increment the count of the active holders and clear the internal event."""
+ self._active_count += 1
+ self._event.clear()
+
+ def __exit__(self, _exc_type, _exc_val, _exc_tb): # noqa: ANN001
+ """Decrement the count of the active holders; if 0 is reached set the internal event."""
+ self._active_count -= 1
+ if not self._active_count:
+ self._event.set()
+
+ async def wait(self) -> None:
+ """Wait for all active holders to exit."""
+ await self._event.wait()
+
+
def lock(
namespace: Hashable,
resource_id: ResourceId,
@@ -41,10 +72,10 @@ def lock(
If decorating a command, this decorator must go before (below) the `command` decorator.
"""
- def decorator(func: Callable) -> Callable:
+ def decorator(func: types.FunctionType) -> types.FunctionType:
name = func.__name__
- @wraps(func)
+ @command_wraps(func)
async def wrapper(*args, **kwargs) -> Any:
log.trace(f"{name}: mutually exclusive decorator called")
diff --git a/bot/utils/messages.py b/bot/utils/messages.py
index 077dd9569..2beead6af 100644
--- a/bot/utils/messages.py
+++ b/bot/utils/messages.py
@@ -3,6 +3,7 @@ import contextlib
import logging
import random
import re
+from functools import partial
from io import BytesIO
from typing import List, Optional, Sequence, Union
@@ -12,24 +13,66 @@ from discord.ext.commands import Context
import bot
from bot.constants import Emojis, MODERATION_ROLES, NEGATIVE_REPLIES
+from bot.utils import scheduling
log = logging.getLogger(__name__)
+def reaction_check(
+ reaction: discord.Reaction,
+ user: discord.abc.User,
+ *,
+ message_id: int,
+ allowed_emoji: Sequence[str],
+ allowed_users: Sequence[int],
+ allow_mods: bool = True,
+) -> bool:
+ """
+ Check if a reaction's emoji and author are allowed and the message is `message_id`.
+
+ If the user is not allowed, remove the reaction. Ignore reactions made by the bot.
+ If `allow_mods` is True, allow users with moderator roles even if they're not in `allowed_users`.
+ """
+ right_reaction = (
+ user != bot.instance.user
+ and reaction.message.id == message_id
+ and str(reaction.emoji) in allowed_emoji
+ )
+ if not right_reaction:
+ return False
+
+ is_moderator = (
+ allow_mods
+ and any(role.id in MODERATION_ROLES for role in getattr(user, "roles", []))
+ )
+
+ if user.id in allowed_users or is_moderator:
+ log.trace(f"Allowed reaction {reaction} by {user} on {reaction.message.id}.")
+ return True
+ else:
+ log.trace(f"Removing reaction {reaction} by {user} on {reaction.message.id}: disallowed user.")
+ scheduling.create_task(
+ reaction.message.remove_reaction(reaction.emoji, user),
+ HTTPException, # Suppress the HTTPException if adding the reaction fails
+ name=f"remove_reaction-{reaction}-{reaction.message.id}-{user}"
+ )
+ return False
+
+
async def wait_for_deletion(
message: discord.Message,
- user_ids: Sequence[discord.abc.Snowflake],
+ user_ids: Sequence[int],
deletion_emojis: Sequence[str] = (Emojis.trashcan,),
timeout: float = 60 * 5,
attach_emojis: bool = True,
- allow_moderation_roles: bool = True
+ allow_mods: bool = True
) -> None:
"""
Wait for up to `timeout` seconds for a reaction by any of the specified `user_ids` to delete the message.
An `attach_emojis` bool may be specified to determine whether to attach the given
`deletion_emojis` to the message in the given `context`.
- An `allow_moderation_roles` bool may also be specified to allow anyone with a role in `MODERATION_ROLES` to delete
+ An `allow_mods` bool may also be specified to allow anyone with a role in `MODERATION_ROLES` to delete
the message.
"""
if message.guild is None:
@@ -43,16 +86,13 @@ async def wait_for_deletion(
log.trace(f"Aborting wait_for_deletion: message {message.id} deleted prematurely.")
return
- def check(reaction: discord.Reaction, user: discord.Member) -> bool:
- """Check that the deletion emoji is reacted by the appropriate user."""
- return (
- reaction.message.id == message.id
- and str(reaction.emoji) in deletion_emojis
- and (
- user.id in user_ids
- or allow_moderation_roles and any(role.id in MODERATION_ROLES for role in user.roles)
- )
- )
+ check = partial(
+ reaction_check,
+ message_id=message.id,
+ allowed_emoji=deletion_emojis,
+ allowed_users=user_ids,
+ allow_mods=allow_mods,
+ )
with contextlib.suppress(asyncio.TimeoutError):
await bot.instance.wait_for('reaction_add', check=check, timeout=timeout)
@@ -141,14 +181,14 @@ def sub_clyde(username: Optional[str]) -> Optional[str]:
return username # Empty string or None
-async def send_denial(ctx: Context, reason: str) -> None:
+async def send_denial(ctx: Context, reason: str) -> discord.Message:
"""Send an embed denying the user with the given reason."""
embed = discord.Embed()
embed.colour = discord.Colour.red()
embed.title = random.choice(NEGATIVE_REPLIES)
embed.description = reason
- await ctx.send(embed=embed)
+ return await ctx.send(embed=embed)
def format_user(user: discord.abc.User) -> str:
diff --git a/bot/utils/scheduling.py b/bot/utils/scheduling.py
index 4dd036e4f..2dc485f24 100644
--- a/bot/utils/scheduling.py
+++ b/bot/utils/scheduling.py
@@ -59,14 +59,18 @@ class Scheduler:
def schedule_at(self, time: datetime, task_id: t.Hashable, coroutine: t.Coroutine) -> None:
"""
- Schedule `coroutine` to be executed at the given naïve UTC `time`.
+ Schedule `coroutine` to be executed at the given `time`.
+
+ If `time` is timezone aware, then use that timezone to calculate now() when subtracting.
+ If `time` is naïve, then use UTC.
If `time` is in the past, schedule `coroutine` immediately.
If a task with `task_id` already exists, close `coroutine` instead of scheduling it. This
prevents unawaited coroutine warnings. Don't pass a coroutine that'll be re-used elsewhere.
"""
- delay = (time - datetime.utcnow()).total_seconds()
+ now_datetime = datetime.now(time.tzinfo) if time.tzinfo else datetime.utcnow()
+ delay = (time - now_datetime).total_seconds()
if delay > 0:
coroutine = self._await_later(delay, task_id, coroutine)
@@ -157,18 +161,18 @@ class Scheduler:
self._log.error(f"Error in task #{task_id} {id(done_task)}!", exc_info=exception)
-def create_task(*args, **kwargs) -> asyncio.Task:
+def create_task(coro: t.Awaitable, *suppressed_exceptions: t.Type[Exception], **kwargs) -> asyncio.Task:
"""Wrapper for `asyncio.create_task` which logs exceptions raised in the task."""
- task = asyncio.create_task(*args, **kwargs)
- task.add_done_callback(_log_task_exception)
+ task = asyncio.create_task(coro, **kwargs)
+ task.add_done_callback(partial(_log_task_exception, suppressed_exceptions=suppressed_exceptions))
return task
-def _log_task_exception(task: asyncio.Task) -> None:
+def _log_task_exception(task: asyncio.Task, *, suppressed_exceptions: t.Tuple[t.Type[Exception]]) -> None:
"""Retrieve and log the exception raised in `task` if one exists."""
with contextlib.suppress(asyncio.CancelledError):
exception = task.exception()
# Log the exception if one exists.
- if exception:
+ if exception and not isinstance(exception, suppressed_exceptions):
log = logging.getLogger(__name__)
log.error(f"Error in task {task.get_name()} {id(task)}!", exc_info=exception)
diff --git a/bot/utils/services.py b/bot/utils/services.py
index 5949c9e48..db9c93d0f 100644
--- a/bot/utils/services.py
+++ b/bot/utils/services.py
@@ -47,7 +47,14 @@ async def send_to_paste_service(contents: str, *, extension: str = "") -> Option
continue
elif "key" in response_json:
log.info(f"Successfully uploaded contents to paste service behind key {response_json['key']}.")
- return URLs.paste_service.format(key=response_json['key']) + extension
+
+ paste_link = URLs.paste_service.format(key=response_json['key']) + extension
+
+ if extension == '.py':
+ return paste_link
+
+ return paste_link + "?noredirect"
+
log.warning(
f"Got unexpected JSON response from paste service: {response_json}\n"
f"trying again ({attempt}/{FAILED_REQUEST_ATTEMPTS})."
diff --git a/bot/utils/time.py b/bot/utils/time.py
index 47e49904b..466f0adc2 100644
--- a/bot/utils/time.py
+++ b/bot/utils/time.py
@@ -1,5 +1,6 @@
import asyncio
import datetime
+import re
from typing import Optional
import dateutil.parser
@@ -8,6 +9,16 @@ from dateutil.relativedelta import relativedelta
RFC1123_FORMAT = "%a, %d %b %Y %H:%M:%S GMT"
INFRACTION_FORMAT = "%Y-%m-%d %H:%M"
+_DURATION_REGEX = re.compile(
+ r"((?P<years>\d+?) ?(years|year|Y|y) ?)?"
+ r"((?P<months>\d+?) ?(months|month|m) ?)?"
+ r"((?P<weeks>\d+?) ?(weeks|week|W|w) ?)?"
+ r"((?P<days>\d+?) ?(days|day|D|d) ?)?"
+ r"((?P<hours>\d+?) ?(hours|hour|H|h) ?)?"
+ r"((?P<minutes>\d+?) ?(minutes|minute|M) ?)?"
+ r"((?P<seconds>\d+?) ?(seconds|second|S|s))?"
+)
+
def _stringify_time_unit(value: int, unit: str) -> str:
"""
@@ -74,6 +85,45 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units:
return humanized
+def get_time_delta(time_string: str) -> str:
+ """Returns the time in human-readable time delta format."""
+ date_time = dateutil.parser.isoparse(time_string).replace(tzinfo=None)
+ time_delta = time_since(date_time, precision="minutes", max_units=1)
+
+ return time_delta
+
+
+def parse_duration_string(duration: str) -> Optional[relativedelta]:
+ """
+ Converts a `duration` string to a relativedelta object.
+
+ The function supports the following symbols for each unit of time:
+ - years: `Y`, `y`, `year`, `years`
+ - months: `m`, `month`, `months`
+ - weeks: `w`, `W`, `week`, `weeks`
+ - days: `d`, `D`, `day`, `days`
+ - hours: `H`, `h`, `hour`, `hours`
+ - minutes: `M`, `minute`, `minutes`
+ - seconds: `S`, `s`, `second`, `seconds`
+ The units need to be provided in descending order of magnitude.
+ If the string does represent a durationdelta object, it will return None.
+ """
+ match = _DURATION_REGEX.fullmatch(duration)
+ if not match:
+ return None
+
+ duration_dict = {unit: int(amount) for unit, amount in match.groupdict(default=0).items()}
+ delta = relativedelta(**duration_dict)
+
+ return delta
+
+
+def relativedelta_to_timedelta(delta: relativedelta) -> datetime.timedelta:
+ """Converts a relativedelta object to a timedelta object."""
+ utcnow = datetime.datetime.utcnow()
+ return utcnow + delta - utcnow
+
+
def time_since(past_datetime: datetime.datetime, precision: str = "seconds", max_units: int = 6) -> str:
"""
Takes a datetime and returns a human-readable string that describes how long ago that datetime was.
diff --git a/config-default.yml b/config-default.yml
index 89f5718fd..dba354117 100644
--- a/config-default.yml
+++ b/config-default.yml
@@ -49,9 +49,9 @@ style:
badge_verified_bot: "<:verified_bot:811645219220750347>"
bot: "<:bot:812712599464443914>"
- defcon_disabled: "<:defcondisabled:470326273952972810>"
- defcon_enabled: "<:defconenabled:470326274213150730>"
- defcon_updated: "<:defconsettingsupdated:470326274082996224>"
+ defcon_shutdown: "<:defcondisabled:470326273952972810>"
+ defcon_unshutdown: "<:defconenabled:470326274213150730>"
+ defcon_update: "<:defconsettingsupdated:470326274082996224>"
failmail: "<:failmail:633660039931887616>"
@@ -85,13 +85,14 @@ style:
crown_red: "https://cdn.discordapp.com/emojis/469964154879344640.png"
defcon_denied: "https://cdn.discordapp.com/emojis/472475292078964738.png"
- defcon_disabled: "https://cdn.discordapp.com/emojis/470326273952972810.png"
- defcon_enabled: "https://cdn.discordapp.com/emojis/470326274213150730.png"
- defcon_updated: "https://cdn.discordapp.com/emojis/472472638342561793.png"
+ defcon_shutdown: "https://cdn.discordapp.com/emojis/470326273952972810.png"
+ defcon_unshutdown: "https://cdn.discordapp.com/emojis/470326274213150730.png"
+ defcon_update: "https://cdn.discordapp.com/emojis/472472638342561793.png"
filtering: "https://cdn.discordapp.com/emojis/472472638594482195.png"
- green_checkmark: "https://raw.githubusercontent.com/python-discord/branding/master/icons/checkmark/green-checkmark-dist.png"
+ green_checkmark: "https://raw.githubusercontent.com/python-discord/branding/main/icons/checkmark/green-checkmark-dist.png"
+ green_questionmark: "https://raw.githubusercontent.com/python-discord/branding/main/icons/checkmark/green-question-mark-dist.png"
guild_update: "https://cdn.discordapp.com/emojis/469954765141442561.png"
hash_blurple: "https://cdn.discordapp.com/emojis/469950142942806017.png"
@@ -140,6 +141,7 @@ guild:
help_dormant: 691405908919451718
help_in_use: 696958401460043776
logs: &LOGS 468520609152892958
+ moderators: &MODS_CATEGORY 749736277464842262
modmail: &MODMAIL 714494672835444826
voice: 356013253765234688
@@ -151,7 +153,6 @@ guild:
python_events: &PYEVENTS_CHANNEL 729674110270963822
python_news: &PYNEWS_CHANNEL 704372456592506880
reddit: &REDDIT_CHANNEL 458224812528238616
- user_event_announcements: &USER_EVENT_A 592000283102674944
# Development
dev_contrib: &DEV_CONTRIB 635950537262759947
@@ -164,13 +165,13 @@ guild:
# Python Help: Available
cooldown: 720603994149486673
+ how_to_get_help: 704250143020417084
# Topical
discord_py: 343944376055103488
# Logs
attachment_log: &ATTACH_LOG 649243850006855680
- dm_log: 653713721625018428
message_log: &MESSAGE_LOG 467752170159079424
mod_log: &MOD_LOG 282638479504965634
user_log: 528976905546760203
@@ -194,13 +195,12 @@ guild:
helpers: &HELPERS 385474242440986624
incidents: 714214212200562749
incidents_archive: 720668923636351037
- mods: &MODS 305126844661760000
mod_alerts: 473092532147060736
- mod_meta: &MOD_META 775412552795947058
- mod_spam: &MOD_SPAM 620607373828030464
- mod_tools: &MOD_TOOLS 775413915391098921
+ nominations: 822920136150745168
+ nomination_voting: 822853512709931008
organisation: &ORGANISATION 551789653284356126
staff_lounge: &STAFF_LOUNGE 464905259261755392
+ staff_info: &STAFF_INFO 396684402404622347
# Staff announcement channels
admin_announcements: &ADMIN_ANNOUNCEMENTS 749736155569848370
@@ -225,16 +225,13 @@ guild:
talent_pool: &TALENT_POOL 534321732593647616
moderation_categories:
+ - *MODS_CATEGORY
- *MODMAIL
- *LOGS
moderation_channels:
- *ADMINS
- *ADMIN_SPAM
- - *MOD_META
- - *MOD_TOOLS
- - *MODS
- - *MOD_SPAM
# Modlog cog ignores events which occur in these channels
modlog_blacklist:
@@ -262,14 +259,20 @@ guild:
# Staff
admins: &ADMINS_ROLE 267628507062992896
core_developers: 587606783669829632
+ devops: 409416496733880320
+ domain_leads: 807415650778742785
helpers: &HELPERS_ROLE 267630620367257601
moderators: &MODS_ROLE 267629731250176001
owners: &OWNERS_ROLE 267627879762755584
+ project_leads: 815701647526330398
# Code Jam
jammers: 737249140966162473
team_leaders: 737250302834638889
+ # Streaming
+ video: 764245844798079016
+
moderation_roles:
- *ADMINS_ROLE
- *MODS_ROLE
@@ -284,7 +287,6 @@ guild:
webhooks:
big_brother: 569133704568373283
dev_log: 680501655111729222
- dm_log: 654567640664244225
duck_pond: 637821475327311927
incidents_archive: 720671599790915702
python_news: &PYNEWS_WEBHOOK 704381182279942324
@@ -321,7 +323,6 @@ filter:
- *MOD_LOG
- *STAFF_LOUNGE
- *TALENT_POOL
- - *USER_EVENT_A
role_whitelist:
- *ADMINS_ROLE
@@ -339,6 +340,8 @@ keys:
urls:
# PyDis site vars
+ connect_max_retries: 3
+ connect_cooldown: 5
site: &DOMAIN "pythondiscord.com"
site_api: &API "pydis-api.default.svc.cluster.local"
site_api_schema: "http://"
@@ -356,8 +359,8 @@ urls:
discord_api: &DISCORD_API "https://discordapp.com/api/v7/"
discord_invite_api: !JOIN [*DISCORD_API, "invites"]
- # Misc URLs
- bot_avatar: "https://raw.githubusercontent.com/discord-python/branding/master/logos/logo_circle/logo_circle.png"
+ # Misc URLsw
+ bot_avatar: "https://raw.githubusercontent.com/python-discord/branding/main/logos/logo_circle/logo_circle.png"
github_bot_repo: "https://github.com/python-discord/bot"
@@ -464,8 +467,12 @@ help_channels:
cmd_whitelist:
- *HELPERS_ROLE
- # Allowed duration of inactivity before making a channel dormant
- idle_minutes: 30
+ # Allowed duration of inactivity by claimant before making a channel dormant
+ idle_minutes_claimant: 30
+
+ # Allowed duration of inactivity by others before making a channel dormant
+ # `idle_minutes_claimant` must also be met, before a channel is closed
+ idle_minutes_others: 10
# Allowed duration of inactivity when channel is empty (due to deleted messages)
# before message making a channel dormant
@@ -476,7 +483,7 @@ help_channels:
# Maximum number of channels across all 3 categories
# Note Discord has a hard limit of 50 channels per category, so this shouldn't be > 50
- max_total_channels: 32
+ max_total_channels: 42
# Prefix for help channel names
name_prefix: 'help-'
@@ -508,12 +515,12 @@ duck_pond:
- *PYEVENTS_CHANNEL
- *MAILING_LISTS
- *REDDIT_CHANNEL
- - *USER_EVENT_A
- *DUCK_POND
- *CHANGE_LOG
- *STAFF_ANNOUNCEMENTS
- *MOD_ANNOUNCEMENTS
- *ADMIN_ANNOUNCEMENTS
+ - *STAFF_INFO
python_news:
@@ -541,3 +548,7 @@ branding:
config:
required_keys: ['bot.token']
+
+
+video_permission:
+ default_permission_duration: 5 # Default duration for stream command in minutes
diff --git a/docker-compose.yml b/docker-compose.yml
index 0002d1d56..8afdd6ef1 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -57,8 +57,7 @@ services:
- web
- redis
- snekbox
+ env_file:
+ - .env
environment:
- BOT_TOKEN: ${BOT_TOKEN}
BOT_API_KEY: badbot13m0n8f570f942013fc818f234916ca531
- REDDIT_CLIENT_ID: ${REDDIT_CLIENT_ID}
- REDDIT_SECRET: ${REDDIT_SECRET}
diff --git a/tests/bot/exts/info/doc/__init__.py b/tests/bot/exts/info/doc/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/bot/exts/info/doc/__init__.py
diff --git a/tests/bot/exts/info/doc/test_parsing.py b/tests/bot/exts/info/doc/test_parsing.py
new file mode 100644
index 000000000..1663d8491
--- /dev/null
+++ b/tests/bot/exts/info/doc/test_parsing.py
@@ -0,0 +1,66 @@
+from unittest import TestCase
+
+from bot.exts.info.doc import _parsing as parsing
+
+
+class SignatureSplitter(TestCase):
+
+ def test_basic_split(self):
+ test_cases = (
+ ("0,0,0", ["0", "0", "0"]),
+ ("0,a=0,a=0", ["0", "a=0", "a=0"]),
+ )
+ self._run_tests(test_cases)
+
+ def test_commas_ignored_in_brackets(self):
+ test_cases = (
+ ("0,[0,0],0,[0,0],0", ["0", "[0,0]", "0", "[0,0]", "0"]),
+ ("(0,),0,(0,(0,),0),0", ["(0,)", "0", "(0,(0,),0)", "0"]),
+ )
+ self._run_tests(test_cases)
+
+ def test_mixed_brackets(self):
+ tests_cases = (
+ ("[0,{0},0],0,{0:0},0", ["[0,{0},0]", "0", "{0:0}", "0"]),
+ ("([0],0,0),0,(0,0),0", ["([0],0,0)", "0", "(0,0)", "0"]),
+ ("([(0,),(0,)],0),0", ["([(0,),(0,)],0)", "0"]),
+ )
+ self._run_tests(tests_cases)
+
+ def test_string_contents_ignored(self):
+ test_cases = (
+ ("'0,0',0,',',0", ["'0,0'", "0", "','", "0"]),
+ ("0,[']',0],0", ["0", "[']',0]", "0"]),
+ ("{0,0,'}}',0,'{'},0", ["{0,0,'}}',0,'{'}", "0"]),
+ )
+ self._run_tests(test_cases)
+
+ def test_mixed_quotes(self):
+ test_cases = (
+ ("\"0',0',\",'0,0',0", ["\"0',0',\"", "'0,0'", "0"]),
+ ("\",',\",'\",',0", ["\",',\"", "'\",'", "0"]),
+ )
+ self._run_tests(test_cases)
+
+ def test_quote_escaped(self):
+ test_cases = (
+ (r"'\',','\\',0", [r"'\','", r"'\\'", "0"]),
+ (r"'0\',0\\\'\\',0", [r"'0\',0\\\'\\'", "0"]),
+ )
+ self._run_tests(test_cases)
+
+ def test_real_signatures(self):
+ test_cases = (
+ ("start, stop[, step]", ["start", " stop[, step]"]),
+ ("object=b'', encoding='utf-8', errors='strict'", ["object=b''", " encoding='utf-8'", " errors='strict'"]),
+ (
+ "typename, field_names, *, rename=False, defaults=None, module=None",
+ ["typename", " field_names", " *", " rename=False", " defaults=None", " module=None"]
+ ),
+ )
+ self._run_tests(test_cases)
+
+ def _run_tests(self, test_cases):
+ for input_string, expected_output in test_cases:
+ with self.subTest(input_string=input_string):
+ self.assertEqual(list(parsing._split_parameters(input_string)), expected_output)
diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py
index 80731c9f0..a996ce477 100644
--- a/tests/bot/exts/info/test_information.py
+++ b/tests/bot/exts/info/test_information.py
@@ -283,6 +283,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
user = helpers.MockMember()
user.nick = None
user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock")
+ user.colour = 0
embed = await self.cog.create_user_embed(ctx, user)
@@ -298,6 +299,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
user = helpers.MockMember()
user.nick = "Cat lover"
user.__str__ = unittest.mock.Mock(return_value="Mr. Hemlock")
+ user.colour = 0
embed = await self.cog.create_user_embed(ctx, user)
@@ -311,10 +313,9 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
"""Created `!user` embeds should not contain mention of the @everyone-role."""
ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=1))
admins_role = helpers.MockRole(name='Admins')
- admins_role.colour = 100
# A `MockMember` has the @Everyone role by default; we add the Admins to that.
- user = helpers.MockMember(roles=[admins_role], top_role=admins_role)
+ user = helpers.MockMember(roles=[admins_role], colour=100)
embed = await self.cog.create_user_embed(ctx, user)
@@ -332,12 +333,11 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=50))
moderators_role = helpers.MockRole(name='Moderators')
- moderators_role.colour = 100
infraction_counts.return_value = ("Infractions", "expanded infractions info")
nomination_counts.return_value = ("Nominations", "nomination info")
- user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role)
+ user = helpers.MockMember(id=314, roles=[moderators_role], colour=100)
embed = await self.cog.create_user_embed(ctx, user)
infraction_counts.assert_called_once_with(user)
@@ -367,11 +367,10 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
ctx = helpers.MockContext(channel=helpers.MockTextChannel(id=100))
moderators_role = helpers.MockRole(name='Moderators')
- moderators_role.colour = 100
infraction_counts.return_value = ("Infractions", "basic infractions info")
- user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role)
+ user = helpers.MockMember(id=314, roles=[moderators_role], colour=100)
embed = await self.cog.create_user_embed(ctx, user)
infraction_counts.assert_called_once_with(user)
@@ -407,12 +406,11 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
ctx = helpers.MockContext()
moderators_role = helpers.MockRole(name='Moderators')
- moderators_role.colour = 100
- user = helpers.MockMember(id=314, roles=[moderators_role], top_role=moderators_role)
+ user = helpers.MockMember(id=314, roles=[moderators_role], colour=100)
embed = await self.cog.create_user_embed(ctx, user)
- self.assertEqual(embed.colour, discord.Colour(moderators_role.colour))
+ self.assertEqual(embed.colour, discord.Colour(100))
@unittest.mock.patch(
f"{COG_PATH}.basic_user_infraction_counts",
@@ -422,7 +420,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
"""The embed should be created with a blurple colour if the user has no assigned roles."""
ctx = helpers.MockContext()
- user = helpers.MockMember(id=217)
+ user = helpers.MockMember(id=217, colour=discord.Colour.default())
embed = await self.cog.create_user_embed(ctx, user)
self.assertEqual(embed.colour, discord.Colour.blurple())
@@ -435,7 +433,7 @@ class UserEmbedTests(unittest.IsolatedAsyncioTestCase):
"""The embed thumbnail should be set to the user's avatar in `png` format."""
ctx = helpers.MockContext()
- user = helpers.MockMember(id=217)
+ user = helpers.MockMember(id=217, colour=0)
user.avatar_url_as.return_value = "avatar url"
embed = await self.cog.create_user_embed(ctx, user)
diff --git a/tests/bot/exts/moderation/infraction/test_infractions.py b/tests/bot/exts/moderation/infraction/test_infractions.py
index 86c2617ea..08f39cd50 100644
--- a/tests/bot/exts/moderation/infraction/test_infractions.py
+++ b/tests/bot/exts/moderation/infraction/test_infractions.py
@@ -39,7 +39,7 @@ class TruncationTests(unittest.IsolatedAsyncioTestCase):
delete_message_days=0
)
self.cog.apply_infraction.assert_awaited_once_with(
- self.ctx, {"foo": "bar"}, self.target, self.ctx.guild.ban.return_value
+ self.ctx, {"foo": "bar", "purge": ""}, self.target, self.ctx.guild.ban.return_value
)
@patch("bot.exts.moderation.infraction._utils.post_infraction")
diff --git a/tests/bot/exts/moderation/infraction/test_utils.py b/tests/bot/exts/moderation/infraction/test_utils.py
index 5b62463e0..ee9ff650c 100644
--- a/tests/bot/exts/moderation/infraction/test_utils.py
+++ b/tests/bot/exts/moderation/infraction/test_utils.py
@@ -146,7 +146,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
icon_url=Icons.token_removed
- ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER),
+ ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER),
"send_result": True
},
{
@@ -164,9 +164,11 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
icon_url=Icons.token_removed
- ),
+ ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER),
"send_result": False
},
+ # Note that this test case asserts that the DM that *would* get sent to the user is formatted
+ # correctly, even though that message is deliberately never sent.
{
"args": (self.user, "note", None, None, Icons.defcon_denied),
"expected_output": Embed(
@@ -182,7 +184,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
icon_url=Icons.defcon_denied
- ),
+ ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER),
"send_result": False
},
{
@@ -200,7 +202,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
icon_url=Icons.defcon_denied
- ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER),
+ ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER),
"send_result": False
},
{
@@ -218,7 +220,7 @@ class ModerationUtilsTests(unittest.IsolatedAsyncioTestCase):
name=utils.INFRACTION_AUTHOR_NAME,
url=utils.RULES_URL,
icon_url=Icons.defcon_denied
- ).set_footer(text=utils.INFRACTION_APPEAL_FOOTER),
+ ).set_footer(text=utils.INFRACTION_APPEAL_MODMAIL_FOOTER),
"send_result": True
}
]
diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py
index c42111f3f..4af84dde5 100644
--- a/tests/bot/test_converters.py
+++ b/tests/bot/test_converters.py
@@ -10,9 +10,9 @@ from bot.converters import (
Duration,
HushDurationConverter,
ISODateTime,
+ PackageName,
TagContentConverter,
TagNameConverter,
- ValidPythonIdentifier,
)
@@ -78,24 +78,23 @@ class ConverterTests(unittest.IsolatedAsyncioTestCase):
with self.assertRaisesRegex(BadArgument, re.escape(exception_message)):
await TagNameConverter.convert(self.context, invalid_name)
- async def test_valid_python_identifier_for_valid(self):
- """ValidPythonIdentifier returns valid identifiers unchanged."""
- test_values = ('foo', 'lemon')
+ async def test_package_name_for_valid(self):
+ """PackageName returns valid package names unchanged."""
+ test_values = ('foo', 'le_mon', 'num83r')
for name in test_values:
with self.subTest(identifier=name):
- conversion = await ValidPythonIdentifier.convert(self.context, name)
+ conversion = await PackageName.convert(self.context, name)
self.assertEqual(name, conversion)
- async def test_valid_python_identifier_for_invalid(self):
- """ValidPythonIdentifier raises the proper exception for invalid identifiers."""
- test_values = ('nested.stuff', '#####')
+ async def test_package_name_for_invalid(self):
+ """PackageName raises the proper exception for invalid package names."""
+ test_values = ('text_with_a_dot.', 'UpperCaseName', 'dashed-name')
for name in test_values:
with self.subTest(identifier=name):
- exception_message = f'`{name}` is not a valid Python identifier'
- with self.assertRaisesRegex(BadArgument, re.escape(exception_message)):
- await ValidPythonIdentifier.convert(self.context, name)
+ with self.assertRaises(BadArgument):
+ await PackageName.convert(self.context, name)
async def test_duration_converter_for_valid(self):
"""Duration returns the correct `datetime` for valid duration strings."""
diff --git a/tests/bot/utils/test_services.py b/tests/bot/utils/test_services.py
index 1b48f6560..3b71022db 100644
--- a/tests/bot/utils/test_services.py
+++ b/tests/bot/utils/test_services.py
@@ -30,9 +30,9 @@ class PasteTests(unittest.IsolatedAsyncioTestCase):
"""Url with specified extension is returned on successful requests."""
key = "paste_key"
test_cases = (
- (f"https://paste_service.com/{key}.txt", "txt"),
+ (f"https://paste_service.com/{key}.txt?noredirect", "txt"),
(f"https://paste_service.com/{key}.py", "py"),
- (f"https://paste_service.com/{key}", ""),
+ (f"https://paste_service.com/{key}?noredirect", ""),
)
response = MagicMock(
json=AsyncMock(return_value={"key": key})