diff options
41 files changed, 931 insertions, 604 deletions
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..706ab462f --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,57 @@ +name: Build + +on: + workflow_run: + workflows: ["Lint & Test"] + branches: + - master + types: + - completed + +jobs: + build: + if: github.event.workflow_run.conclusion == 'success' + name: Build & Push + runs-on: ubuntu-latest + + steps: + # Create a commit SHA-based tag for the container repositories + - name: Create SHA Container Tag + id: sha_tag + run: | + tag=$(cut -c 1-7 <<< $GITHUB_SHA) + echo "::set-output name=tag::$tag" + + - name: Checkout code + uses: actions/checkout@v2 + + # The current version (v2) of Docker's build-push action uses + # buildx, which comes with BuildKit features that help us speed + # up our builds using additional cache features. Buildx also + # has a lot of other features that are not as relevant to us. + # + # See https://github.com/docker/build-push-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to Github Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GHCR_TOKEN }} + + # Build and push the container to the GitHub Container + # Repository. The container will be tagged as "latest" + # and with the short SHA of the commit. + - name: Build and push + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile + push: true + cache-from: type=registry,ref=ghcr.io/python-discord/bot:latest + cache-to: type=inline + tags: | + ghcr.io/python-discord/bot:latest + ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 8760b35ec..000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: "Code scanning - action" - -on: - push: - pull_request: - schedule: - - cron: '0 12 * * *' - -jobs: - CodeQL-Build: - - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - with: - fetch-depth: 2 - - - run: git checkout HEAD^2 - if: ${{ github.event_name == 'pull_request' }} - - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: python - - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 000000000..90555a8ee --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,39 @@ +name: Deploy + +on: + workflow_run: + workflows: ["Build"] + branches: + - master + types: + - completed + +jobs: + build: + if: github.event.workflow_run.conclusion == 'success' + name: Build & Push + runs-on: ubuntu-latest + + steps: + - name: Create SHA Container Tag + id: sha_tag + run: | + tag=$(cut -c 1-7 <<< $GITHUB_SHA) + echo "::set-output name=tag::$tag" + + - name: Checkout code + uses: actions/checkout@v2 + + - name: Authenticate with Kubernetes + uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.KUBECONFIG }} + + - name: Deploy to Kubernetes + uses: Azure/k8s-deploy@v1 + with: + manifests: | + deployment.yaml + images: 'ghcr.io/python-discord/bot:${{ steps.sha_tag.outputs.tag }}' + kubectl-version: 'latest' diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml new file mode 100644 index 000000000..5444fc3de --- /dev/null +++ b/.github/workflows/lint-test.yml @@ -0,0 +1,115 @@ +name: Lint & Test + +on: + push: + branches: + - master + pull_request: + + +jobs: + lint-test: + runs-on: ubuntu-latest + env: + # Dummy values for required bot environment variables + BOT_API_KEY: foo + BOT_SENTRY_DSN: blah + BOT_TOKEN: bar + REDDIT_CLIENT_ID: spam + REDDIT_SECRET: ham + REDIS_PASSWORD: '' + + # Configure pip to cache dependencies and do a user install + PIP_NO_CACHE_DIR: false + PIP_USER: 1 + + # Hide the graphical elements from pipenv's output + PIPENV_HIDE_EMOJIS: 1 + PIPENV_NOSPIN: 1 + + # Make sure pipenv does not try reuse an environment it's running in + PIPENV_IGNORE_VIRTUALENVS: 1 + + # Specify explicit paths for python dependencies and the pre-commit + # environment so we know which directories to cache + PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base + PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache + + steps: + - name: Add custom PYTHONUSERBASE to PATH + run: echo '${{ env.PYTHONUSERBASE }}/bin/' >> $GITHUB_PATH + + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Setup python + id: python + uses: actions/setup-python@v2 + with: + python-version: '3.8' + + # This step caches our Python dependencies. To make sure we + # only restore a cache when the dependencies, the python version, + # the runner operating system, and the dependency location haven't + # changed, we create a cache key that is a composite of those states. + # + # Only when the context is exactly the same, we will restore the cache. + - name: Python Dependency Caching + uses: actions/cache@v2 + id: python_cache + with: + path: ${{ env.PYTHONUSERBASE }} + key: "python-0-${{ runner.os }}-${{ env.PYTHONUSERBASE }}-\ + ${{ steps.python.outputs.python-version }}-\ + ${{ hashFiles('./Pipfile', './Pipfile.lock') }}" + + # Install our dependencies if we did not restore a dependency cache + - name: Install dependencies using pipenv + if: steps.python_cache.outputs.cache-hit != 'true' + run: | + pip install pipenv + pipenv install --dev --deploy --system + + # This step caches our pre-commit environment. To make sure we + # do create a new environment when our pre-commit setup changes, + # we create a cache key based on relevant factors. + - name: Pre-commit Environment Caching + uses: actions/cache@v2 + with: + path: ${{ env.PRE_COMMIT_HOME }} + key: "precommit-0-${{ runner.os }}-${{ env.PRE_COMMIT_HOME }}-\ + ${{ steps.python.outputs.python-version }}-\ + ${{ hashFiles('./.pre-commit-config.yaml') }}" + + # We will not run `flake8` here, as we will use a separate flake8 + # action. As pre-commit does not support user installs, we set + # PIP_USER=0 to not do a user install. + - name: Run pre-commit hooks + run: export PIP_USER=0; SKIP=flake8 pre-commit run --all-files + + # Run flake8 and have it format the linting errors in the format of + # the GitHub Workflow command to register error annotations. This + # means that our flake8 output is automatically added as an error + # annotation to both the run result and in the "Files" tab of a + # pull request. + # + # Format used: + # ::error file={filename},line={line},col={col}::{message} + - name: Run flake8 + run: "flake8 \ + --format='::error file=%(path)s,line=%(row)d,col=%(col)d::\ + [flake8] %(code)s: %(text)s'" + + # We run `coverage` using the `python` command so we can suppress + # irrelevant warnings in our CI output. + - name: Run tests and generate coverage report + run: | + python -Wignore -m coverage run -m unittest + coverage report -m + + # This step will publish the coverage reports coveralls.io and + # print a "job" link in the output of the GitHub Action + - name: Publish coverage report to coveralls.io + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: coveralls diff --git a/.gitignore b/.gitignore index 2074887ad..9186dbe06 100644 --- a/.gitignore +++ b/.gitignore @@ -111,6 +111,7 @@ ENV/ # Logfiles log.* *.log.* +!log.py # Custom user configuration config.yml @@ -26,6 +26,7 @@ requests = "~=2.22" sentry-sdk = "~=0.14" sphinx = "~=2.2" statsd = "~=3.3" +emoji = "~=0.6" [dev-packages] coverage = "~=5.0" @@ -39,7 +40,7 @@ flake8-tidy-imports = "~=4.0" flake8-todo = "~=0.7" pep8-naming = "~=0.9" pre-commit = "~=2.1" -unittest-xml-reporting = "~=3.0" +coveralls = "~=2.1" [requires] python_version = "3.8" @@ -48,8 +49,8 @@ python_version = "3.8" start = "python -m bot" lint = "pre-commit run --all-files" precommit = "pre-commit install" -build = "docker build -t pythondiscord/bot:latest -f Dockerfile ." -push = "docker push pythondiscord/bot:latest" +build = "docker build -t ghcr.io/python-discord/bot:latest -f Dockerfile ." +push = "docker push ghcr.io/python-discord/bot:latest" test = "coverage run -m unittest" html = "coverage html" report = "coverage report" diff --git a/Pipfile.lock b/Pipfile.lock index becd85c55..541db1627 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "073fd0c51749aafa188fdbe96c5b90dd157cb1d23bdd144801fb0d0a369ffa88" + "sha256": "3ccb368599709d2970f839fc3721cfeebcd5a2700fed7231b2ce38a080828325" }, "pipfile-spec": 6, "requires": { @@ -34,21 +34,22 @@ }, "aiohttp": { "hashes": [ - "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e", - "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326", - "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a", - "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654", - "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a", - "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4", - "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17", - "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec", - "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd", - "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48", - "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59", - "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965" + "sha256:1a4160579ffbc1b69e88cb6ca8bb0fbd4947dfcbf9fb1e2a4fc4c7a4a986c1fe", + "sha256:206c0ccfcea46e1bddc91162449c20c72f308aebdcef4977420ef329c8fcc599", + "sha256:2ad493de47a8f926386fa6d256832de3095ba285f325db917c7deae0b54a9fc8", + "sha256:319b490a5e2beaf06891f6711856ea10591cfe84fe9f3e71a721aa8f20a0872a", + "sha256:470e4c90da36b601676fe50c49a60d34eb8c6593780930b1aa4eea6f508dfa37", + "sha256:60f4caa3b7f7a477f66ccdd158e06901e1d235d572283906276e3803f6b098f5", + "sha256:66d64486172b032db19ea8522328b19cfb78a3e1e5b62ab6a0567f93f073dea0", + "sha256:687461cd974722110d1763b45c5db4d2cdee8d50f57b00c43c7590d1dd77fc5c", + "sha256:698cd7bc3c7d1b82bb728bae835724a486a8c376647aec336aa21a60113c3645", + "sha256:797456399ffeef73172945708810f3277f794965eb6ec9bd3a0c007c0476be98", + "sha256:a885432d3cabc1287bcf88ea94e1826d3aec57fd5da4a586afae4591b061d40d", + "sha256:c506853ba52e516b264b106321c424d03f3ddef2813246432fa9d1cefd361c81", + "sha256:fb83326d8295e8840e4ba774edf346e87eca78ba8a89c55d2690352842c15ba5" ], "index": "pypi", - "version": "==3.6.2" + "version": "==3.6.3" }, "aioping": { "hashes": [ @@ -68,11 +69,11 @@ }, "aiormq": { "hashes": [ - "sha256:106695a836f19c1af6c46b58e8aac80e00f86c5b3287a3c6483a1ee369cc95c9", - "sha256:9f6dbf6155fe2b7a3d24bf68de97fb812db0fac0a54e96bc1af14ea95078ba7f" + "sha256:8218dd9f7198d6e7935855468326bbacf0089f926c70baa8dd92944cb2496573", + "sha256:e584dac13a242589aaf42470fd3006cb0dc5aed6506cbd20357c7ec8bbe4a89e" ], "markers": "python_version >= '3.6'", - "version": "==3.2.3" + "version": "==3.3.1" }, "alabaster": { "hashes": [ @@ -103,35 +104,35 @@ }, "attrs": { "hashes": [ - "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594", - "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.2.0" + "version": "==20.3.0" }, "babel": { "hashes": [ - "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38", - "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4" + "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", + "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.0" + "version": "==2.9.0" }, "beautifulsoup4": { "hashes": [ - "sha256:1edf5e39f3a5bc6e38b235b369128416c7239b34f692acccececb040233032a1", - "sha256:5dfe44f8fddc89ac5453f02659d3ab1668f2c0d9684839f0785037e8c6d9ac8d", - "sha256:645d833a828722357038299b7f6879940c11dddd95b900fe5387c258b72bb883" + "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35", + "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25", + "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666" ], "index": "pypi", - "version": "==4.9.2" + "version": "==4.9.3" }, "certifi": { "hashes": [ - "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", - "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41" + "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd", + "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4" ], - "version": "==2020.6.20" + "version": "==2020.11.8" }, "cffi": { "hashes": [ @@ -183,11 +184,11 @@ }, "colorama": { "hashes": [ - "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff", - "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1" + "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", + "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" ], "markers": "sys_platform == 'win32'", - "version": "==0.4.3" + "version": "==0.4.4" }, "coloredlogs": { "hashes": [ @@ -207,11 +208,11 @@ }, "discord.py": { "hashes": [ - "sha256:3acb61fde0d862ed346a191d69c46021e6063673f63963bc984ae09a685ab211", - "sha256:e71089886aa157341644bdecad63a72ff56b44406b1a6467b66db31c8e5a5a15" + "sha256:2367359e31f6527f8a936751fc20b09d7495dd6a76b28c8fb13d4ca6c55b7563", + "sha256:def00dc50cf36d21346d71bc89f0cad8f18f9a3522978dc18c7796287d47de8b" ], "index": "pypi", - "version": "==1.5.0" + "version": "==1.5.1" }, "docutils": { "hashes": [ @@ -221,12 +222,19 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16" }, + "emoji": { + "hashes": [ + "sha256:e42da4f8d648f8ef10691bc246f682a1ec6b18373abfd9be10ec0b398823bd11" + ], + "index": "pypi", + "version": "==0.6.0" + }, "fakeredis": { "hashes": [ - "sha256:7ea0866ba5edb40fe2e9b1722535df0c7e6b91d518aa5f50d96c2fff3ea7f4c2", - "sha256:aad8836ffe0319ffbba66dcf872ac6e7e32d1f19790e31296ba58445efb0a5c7" + "sha256:8070b7fce16f828beaef2c757a4354af91698685d5232404f1aeeb233529c7a5", + "sha256:f8c8ea764d7b6fd801e7f5486e3edd32ca991d506186f1923a01fc072e33c271" ], - "version": "==1.4.3" + "version": "==1.4.4" }, "feedparser": { "hashes": [ @@ -331,40 +339,46 @@ }, "lxml": { "hashes": [ - "sha256:05a444b207901a68a6526948c7cc8f9fe6d6f24c70781488e32fd74ff5996e3f", - "sha256:08fc93257dcfe9542c0a6883a25ba4971d78297f63d7a5a26ffa34861ca78730", - "sha256:107781b213cf7201ec3806555657ccda67b1fccc4261fb889ef7fc56976db81f", - "sha256:121b665b04083a1e85ff1f5243d4a93aa1aaba281bc12ea334d5a187278ceaf1", - "sha256:1fa21263c3aba2b76fd7c45713d4428dbcc7644d73dcf0650e9d344e433741b3", - "sha256:2b30aa2bcff8e958cd85d907d5109820b01ac511eae5b460803430a7404e34d7", - "sha256:4b4a111bcf4b9c948e020fd207f915c24a6de3f1adc7682a2d92660eb4e84f1a", - "sha256:5591c4164755778e29e69b86e425880f852464a21c7bb53c7ea453bbe2633bbe", - "sha256:59daa84aef650b11bccd18f99f64bfe44b9f14a08a28259959d33676554065a1", - "sha256:5a9c8d11aa2c8f8b6043d845927a51eb9102eb558e3f936df494e96393f5fd3e", - "sha256:5dd20538a60c4cc9a077d3b715bb42307239fcd25ef1ca7286775f95e9e9a46d", - "sha256:74f48ec98430e06c1fa8949b49ebdd8d27ceb9df8d3d1c92e1fdc2773f003f20", - "sha256:786aad2aa20de3dbff21aab86b2fb6a7be68064cbbc0219bde414d3a30aa47ae", - "sha256:7ad7906e098ccd30d8f7068030a0b16668ab8aa5cda6fcd5146d8d20cbaa71b5", - "sha256:80a38b188d20c0524fe8959c8ce770a8fdf0e617c6912d23fc97c68301bb9aba", - "sha256:8f0ec6b9b3832e0bd1d57af41f9238ea7709bbd7271f639024f2fc9d3bb01293", - "sha256:92282c83547a9add85ad658143c76a64a8d339028926d7dc1998ca029c88ea6a", - "sha256:94150231f1e90c9595ccc80d7d2006c61f90a5995db82bccbca7944fd457f0f6", - "sha256:9dc9006dcc47e00a8a6a029eb035c8f696ad38e40a27d073a003d7d1443f5d88", - "sha256:a76979f728dd845655026ab991df25d26379a1a8fc1e9e68e25c7eda43004bed", - "sha256:aa8eba3db3d8761db161003e2d0586608092e217151d7458206e243be5a43843", - "sha256:bea760a63ce9bba566c23f726d72b3c0250e2fa2569909e2d83cda1534c79443", - "sha256:c3f511a3c58676147c277eff0224c061dd5a6a8e1373572ac817ac6324f1b1e0", - "sha256:c9d317efde4bafbc1561509bfa8a23c5cab66c44d49ab5b63ff690f5159b2304", - "sha256:cc411ad324a4486b142c41d9b2b6a722c534096963688d879ea6fa8a35028258", - "sha256:cdc13a1682b2a6241080745b1953719e7fe0850b40a5c71ca574f090a1391df6", - "sha256:cfd7c5dd3c35c19cec59c63df9571c67c6d6e5c92e0fe63517920e97f61106d1", - "sha256:e1cacf4796b20865789083252186ce9dc6cc59eca0c2e79cca332bdff24ac481", - "sha256:e70d4e467e243455492f5de463b72151cc400710ac03a0678206a5f27e79ddef", - "sha256:ecc930ae559ea8a43377e8b60ca6f8d61ac532fc57efb915d899de4a67928efd", - "sha256:f161af26f596131b63b236372e4ce40f3167c1b5b5d459b29d2514bd8c9dc9ee" - ], - "index": "pypi", - "version": "==4.5.2" + "sha256:098fb713b31050463751dcc694878e1d39f316b86366fb9fe3fbbe5396ac9fab", + "sha256:0e89f5d422988c65e6936e4ec0fe54d6f73f3128c80eb7ecc3b87f595523607b", + "sha256:189ad47203e846a7a4951c17694d845b6ade7917c47c64b29b86526eefc3adf5", + "sha256:1d87936cb5801c557f3e981c9c193861264c01209cb3ad0964a16310ca1b3301", + "sha256:211b3bcf5da70c2d4b84d09232534ad1d78320762e2c59dedc73bf01cb1fc45b", + "sha256:2358809cc64394617f2719147a58ae26dac9e21bae772b45cfb80baa26bfca5d", + "sha256:23c83112b4dada0b75789d73f949dbb4e8f29a0a3511647024a398ebd023347b", + "sha256:24e811118aab6abe3ce23ff0d7d38932329c513f9cef849d3ee88b0f848f2aa9", + "sha256:2d5896ddf5389560257bbe89317ca7bcb4e54a02b53a3e572e1ce4226512b51b", + "sha256:2d6571c48328be4304aee031d2d5046cbc8aed5740c654575613c5a4f5a11311", + "sha256:2e311a10f3e85250910a615fe194839a04a0f6bc4e8e5bb5cac221344e3a7891", + "sha256:302160eb6e9764168e01d8c9ec6becddeb87776e81d3fcb0d97954dd51d48e0a", + "sha256:3a7a380bfecc551cfd67d6e8ad9faa91289173bdf12e9cfafbd2bdec0d7b1ec1", + "sha256:3d9b2b72eb0dbbdb0e276403873ecfae870599c83ba22cadff2db58541e72856", + "sha256:475325e037fdf068e0c2140b818518cf6bc4aa72435c407a798b2db9f8e90810", + "sha256:4b7572145054330c8e324a72d808c8c8fbe12be33368db28c39a255ad5f7fb51", + "sha256:4fff34721b628cce9eb4538cf9a73d02e0f3da4f35a515773cce6f5fe413b360", + "sha256:56eff8c6fb7bc4bcca395fdff494c52712b7a57486e4fbde34c31bb9da4c6cc4", + "sha256:573b2f5496c7e9f4985de70b9bbb4719ffd293d5565513e04ac20e42e6e5583f", + "sha256:7ecaef52fd9b9535ae5f01a1dd2651f6608e4ec9dc136fc4dfe7ebe3c3ddb230", + "sha256:803a80d72d1f693aa448566be46ffd70882d1ad8fc689a2e22afe63035eb998a", + "sha256:8862d1c2c020cb7a03b421a9a7b4fe046a208db30994fc8ff68c627a7915987f", + "sha256:9b06690224258db5cd39a84e993882a6874676f5de582da57f3df3a82ead9174", + "sha256:a71400b90b3599eb7bf241f947932e18a066907bf84617d80817998cee81e4bf", + "sha256:bb252f802f91f59767dcc559744e91efa9df532240a502befd874b54571417bd", + "sha256:be1ebf9cc25ab5399501c9046a7dcdaa9e911802ed0e12b7d620cd4bbf0518b3", + "sha256:be7c65e34d1b50ab7093b90427cbc488260e4b3a38ef2435d65b62e9fa3d798a", + "sha256:c0dac835c1a22621ffa5e5f999d57359c790c52bbd1c687fe514ae6924f65ef5", + "sha256:c152b2e93b639d1f36ec5a8ca24cde4a8eefb2b6b83668fcd8e83a67badcb367", + "sha256:d182eada8ea0de61a45a526aa0ae4bcd222f9673424e65315c35820291ff299c", + "sha256:d18331ea905a41ae71596502bd4c9a2998902328bbabd29e3d0f5f8569fabad1", + "sha256:d20d32cbb31d731def4b1502294ca2ee99f9249b63bc80e03e67e8f8e126dea8", + "sha256:d4ad7fd3269281cb471ad6c7bafca372e69789540d16e3755dd717e9e5c9d82f", + "sha256:d6f8c23f65a4bfe4300b85f1f40f6c32569822d08901db3b6454ab785d9117cc", + "sha256:d84d741c6e35c9f3e7406cb7c4c2e08474c2a6441d59322a00dcae65aac6315d", + "sha256:e65c221b2115a91035b55a593b6eb94aa1206fa3ab374f47c6dc10d364583ff9", + "sha256:f98b6f256be6cec8dd308a8563976ddaff0bdc18b730720f6f4bee927ffe926f" + ], + "index": "pypi", + "version": "==4.6.1" }, "markdownify": { "hashes": [ @@ -415,11 +429,11 @@ }, "more-itertools": { "hashes": [ - "sha256:6f83822ae94818eae2612063a5101a7311e68ae8002005b5e05f03fd74a86a20", - "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c" + "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", + "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" ], "index": "pypi", - "version": "==8.5.0" + "version": "==8.6.0" }, "multidict": { "hashes": [ @@ -510,11 +524,11 @@ }, "pygments": { "hashes": [ - "sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998", - "sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7" + "sha256:381985fcc551eb9d37c52088a32914e00517e57f4a21609f48141ba08e193fa0", + "sha256:88a0bbcd659fcb9573703957c6b9cff9fab7295e6e76db54c9d00ae42df32773" ], "markers": "python_version >= '3.5'", - "version": "==2.7.1" + "version": "==2.7.2" }, "pyparsing": { "hashes": [ @@ -534,23 +548,25 @@ }, "pytz": { "hashes": [ - "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed", - "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048" + "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268", + "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd" ], - "version": "==2020.1" + "version": "==2020.4" }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", @@ -566,19 +582,19 @@ }, "requests": { "hashes": [ - "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", - "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" + "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8", + "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998" ], "index": "pypi", - "version": "==2.24.0" + "version": "==2.25.0" }, "sentry-sdk": { "hashes": [ - "sha256:c9c0fa1412bad87104c4eee8dd36c7bbf60b0d92ae917ab519094779b22e6d9a", - "sha256:e159f7c919d19ae86e5a4ff370fccc45149fab461fbeb93fb5a735a0b33a9cb1" + "sha256:1052f0ed084e532f66cb3e4ba617960d820152aee8b93fc6c05bd53861768c1c", + "sha256:4c42910a55a6b1fe694d5e4790d5188d105d77b5a6346c1c64cbea8c06c0e8b7" ], "index": "pypi", - "version": "==0.17.8" + "version": "==0.19.4" }, "six": { "hashes": [ @@ -597,10 +613,10 @@ }, "sortedcontainers": { "hashes": [ - "sha256:4e73a757831fc3ca4de2859c422564239a31d8213d09a2a666e375807034d2ba", - "sha256:c633ebde8580f241f274c1f8994a665c0e54a17724fecd0cae2f079e09c36d3f" + "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", + "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1" ], - "version": "==2.2.2" + "version": "==2.3.0" }, "soupsieve": { "hashes": [ @@ -676,34 +692,34 @@ }, "urllib3": { "hashes": [ - "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", - "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" + "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", + "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.25.10" + "version": "==1.26.2" }, "yarl": { "hashes": [ - "sha256:04a54f126a0732af75e5edc9addeaa2113e2ca7c6fce8974a63549a70a25e50e", - "sha256:3cc860d72ed989f3b1f3abbd6ecf38e412de722fb38b8f1b1a086315cf0d69c5", - "sha256:5d84cc36981eb5a8533be79d6c43454c8e6a39ee3118ceaadbd3c029ab2ee580", - "sha256:5e447e7f3780f44f890360ea973418025e8c0cdcd7d6a1b221d952600fd945dc", - "sha256:61d3ea3c175fe45f1498af868879c6ffeb989d4143ac542163c45538ba5ec21b", - "sha256:67c5ea0970da882eaf9efcf65b66792557c526f8e55f752194eff8ec722c75c2", - "sha256:6f6898429ec3c4cfbef12907047136fd7b9e81a6ee9f105b45505e633427330a", - "sha256:7ce35944e8e61927a8f4eb78f5bc5d1e6da6d40eadd77e3f79d4e9399e263921", - "sha256:b7c199d2cbaf892ba0f91ed36d12ff41ecd0dde46cbf64ff4bfe997a3ebc925e", - "sha256:c15d71a640fb1f8e98a1423f9c64d7f1f6a3a168f803042eaf3a5b5022fde0c1", - "sha256:c22607421f49c0cb6ff3ed593a49b6a99c6ffdeaaa6c944cdda83c2393c8864d", - "sha256:c604998ab8115db802cc55cb1b91619b2831a6128a62ca7eea577fc8ea4d3131", - "sha256:d088ea9319e49273f25b1c96a3763bf19a882cff774d1792ae6fba34bd40550a", - "sha256:db9eb8307219d7e09b33bcb43287222ef35cbcf1586ba9472b0a4b833666ada1", - "sha256:e31fef4e7b68184545c3d68baec7074532e077bd1906b040ecfba659737df188", - "sha256:e32f0fb443afcfe7f01f95172b66f279938fbc6bdaebe294b0ff6747fb6db020", - "sha256:fcbe419805c9b20db9a51d33b942feddbf6e7fb468cb20686fd7089d4164c12a" + "sha256:040b237f58ff7d800e6e0fd89c8439b841f777dd99b4a9cca04d6935564b9409", + "sha256:17668ec6722b1b7a3a05cc0167659f6c95b436d25a36c2d52db0eca7d3f72593", + "sha256:3a584b28086bc93c888a6c2aa5c92ed1ae20932f078c46509a66dce9ea5533f2", + "sha256:4439be27e4eee76c7632c2427ca5e73703151b22cae23e64adb243a9c2f565d8", + "sha256:48e918b05850fffb070a496d2b5f97fc31d15d94ca33d3d08a4f86e26d4e7c5d", + "sha256:9102b59e8337f9874638fcfc9ac3734a0cfadb100e47d55c20d0dc6087fb4692", + "sha256:9b930776c0ae0c691776f4d2891ebc5362af86f152dd0da463a6614074cb1b02", + "sha256:b3b9ad80f8b68519cc3372a6ca85ae02cc5a8807723ac366b53c0f089db19e4a", + "sha256:bc2f976c0e918659f723401c4f834deb8a8e7798a71be4382e024bcc3f7e23a8", + "sha256:c22c75b5f394f3d47105045ea551e08a3e804dc7e01b37800ca35b58f856c3d6", + "sha256:c52ce2883dc193824989a9b97a76ca86ecd1fa7955b14f87bf367a61b6232511", + "sha256:ce584af5de8830d8701b8979b18fcf450cef9a382b1a3c8ef189bedc408faf1e", + "sha256:da456eeec17fa8aa4594d9a9f27c0b1060b6a75f2419fe0c00609587b2695f4a", + "sha256:db6db0f45d2c63ddb1a9d18d1b9b22f308e52c83638c26b422d520a815c4b3fb", + "sha256:df89642981b94e7db5596818499c4b2219028f2a528c9c37cc1de45bf2fd3a3f", + "sha256:f18d68f2be6bf0e89f1521af2b1bb46e66ab0018faafa81d70f358153170a317", + "sha256:f379b7f83f23fe12823085cd6b906edc49df969eb99757f58ff382349a3303c6" ], "markers": "python_version >= '3.5'", - "version": "==1.6.0" + "version": "==1.5.1" } }, "develop": { @@ -716,11 +732,18 @@ }, "attrs": { "hashes": [ - "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594", - "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.2.0" + "version": "==20.3.0" + }, + "certifi": { + "hashes": [ + "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd", + "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4" + ], + "version": "==2020.11.8" }, "cfgv": { "hashes": [ @@ -730,6 +753,13 @@ "markers": "python_full_version >= '3.6.1'", "version": "==3.2.0" }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, "coverage": { "hashes": [ "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516", @@ -770,6 +800,14 @@ "index": "pypi", "version": "==5.3" }, + "coveralls": { + "hashes": [ + "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc", + "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617" + ], + "index": "pypi", + "version": "==2.2.0" + }, "distlib": { "hashes": [ "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb", @@ -777,6 +815,12 @@ ], "version": "==0.3.1" }, + "docopt": { + "hashes": [ + "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" + ], + "version": "==0.6.2" + }, "filelock": { "hashes": [ "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", @@ -786,19 +830,19 @@ }, "flake8": { "hashes": [ - "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c", - "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208" + "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", + "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" ], "index": "pypi", - "version": "==3.8.3" + "version": "==3.8.4" }, "flake8-annotations": { "hashes": [ - "sha256:09fe1aa3f40cb8fef632a0ab3614050a7584bb884b6134e70cf1fc9eeee642fa", - "sha256:5bda552f074fd6e34276c7761756fa07d824ffac91ce9c0a8555eb2bc5b92d7a" + "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1", + "sha256:bebd11a850f6987a943ce8cdff4159767e0f5f89b3c88aca64680c2175ee02df" ], "index": "pypi", - "version": "==2.4.0" + "version": "==2.4.1" }, "flake8-bugbear": { "hashes": [ @@ -856,11 +900,19 @@ }, "identify": { "hashes": [ - "sha256:7c22c384a2c9b32c5cc891d13f923f6b2653aa83e2d75d8f79be240d6c86c4f4", - "sha256:da683bfb7669fa749fc7731f378229e2dbf29a1d1337cbde04106f02236eb29d" + "sha256:5dd84ac64a9a115b8e0b27d1756b244b882ad264c3c423f42af8235a6e71ca12", + "sha256:c9504ba6a043ee2db0a9d69e43246bc138034895f6338d5aed1b41e4a73b1513" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.5.9" + }, + "idna": { + "hashes": [ + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.5.5" + "version": "==2.10" }, "mccabe": { "hashes": [ @@ -886,11 +938,11 @@ }, "pre-commit": { "hashes": [ - "sha256:810aef2a2ba4f31eed1941fc270e72696a1ad5590b9751839c90807d0fff6b9a", - "sha256:c54fd3e574565fe128ecc5e7d2f91279772ddb03f8729645fa812fe809084a70" + "sha256:22e6aa3bd571debb01eb7d34483f11c01b65237be4eebbf30c3d4fb65762d315", + "sha256:905ebc9b534b991baec87e934431f2d0606ba27f2b90f7f652985f5a5b8b6ae6" ], "index": "pypi", - "version": "==2.7.1" + "version": "==2.8.2" }, "pycodestyle": { "hashes": [ @@ -918,21 +970,31 @@ }, "pyyaml": { "hashes": [ - "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", - "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e", "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", - "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", - "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", - "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", - "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], "index": "pypi", "version": "==5.3.1" }, + "requests": { + "hashes": [ + "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8", + "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998" + ], + "index": "pypi", + "version": "==2.25.0" + }, "six": { "hashes": [ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", @@ -950,26 +1012,27 @@ }, "toml": { "hashes": [ - "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", - "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "version": "==0.10.1" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.10.2" }, - "unittest-xml-reporting": { + "urllib3": { "hashes": [ - "sha256:7bf515ea8cb244255a25100cd29db611a73f8d3d0aaf672ed3266307e14cc1ca", - "sha256:984cebba69e889401bfe3adb9088ca376b3a1f923f0590d005126c1bffd1a695" + "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08", + "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473" ], - "index": "pypi", - "version": "==3.0.4" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.2" }, "virtualenv": { "hashes": [ - "sha256:43add625c53c596d38f971a465553f6318decc39d98512bc100fa1b1e839c8dc", - "sha256:e0305af10299a7fb0d69393d8f04cb2965dda9351140d11ac8db4e5e3970451b" + "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2", + "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==20.0.31" + "version": "==20.1.0" } } } @@ -1,9 +1,10 @@ # Python Utility Bot -[](https://discord.gg/2B963hn) -[](https://dev.azure.com/python-discord/Python%20Discord/_build/latest?definitionId=1&branchName=master) -[](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master) -[](https://dev.azure.com/python-discord/Python%20Discord/_apis/build/status/Bot?branchName=master) +[![Discord][7]][8] +[![Lint & Test][1]][2] +[![Build][3]][4] +[![Deploy][5]][6] +[](https://coveralls.io/github/python-discord/bot) [](LICENSE) [](https://pythondiscord.com) @@ -11,3 +12,12 @@ This project is a Discord bot specifically for use with the Python Discord serve and other tools to help keep the server running like a well-oiled machine. Read the [Contributing Guide](https://pythondiscord.com/pages/contributing/bot/) on our website if you're interested in helping out. + +[1]: https://github.com/python-discord/bot/workflows/Lint%20&%20Test/badge.svg?branch=master +[2]: https://github.com/python-discord/bot/actions?query=workflow%3A%22Lint+%26+Test%22+branch%3Amaster +[3]: https://github.com/python-discord/bot/workflows/Build/badge.svg?branch=master +[4]: https://github.com/python-discord/bot/actions?query=workflow%3ABuild+branch%3Amaster +[5]: https://github.com/python-discord/bot/workflows/Deploy/badge.svg?branch=master +[6]: https://github.com/python-discord/bot/actions?query=workflow%3ADeploy+branch%3Amaster +[7]: https://img.shields.io/static/v1?label=Python%20Discord&logo=discord&message=%3E100k%20members&color=%237289DA&logoColor=white +[8]: https://discord.gg/2B963hn diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index 4500cb6e8..000000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,107 +0,0 @@ -# https://aka.ms/yaml - -variables: - PIP_NO_CACHE_DIR: false - PIP_USER: 1 - PIPENV_HIDE_EMOJIS: 1 - PIPENV_IGNORE_VIRTUALENVS: 1 - PIPENV_NOSPIN: 1 - PRE_COMMIT_HOME: $(Pipeline.Workspace)/pre-commit-cache - PYTHONUSERBASE: $(Pipeline.Workspace)/py-user-base - -jobs: - - job: test - displayName: 'Lint & Test' - pool: - vmImage: ubuntu-18.04 - - variables: - BOT_API_KEY: foo - BOT_SENTRY_DSN: blah - BOT_TOKEN: bar - REDDIT_CLIENT_ID: spam - REDDIT_SECRET: ham - WOLFRAM_API_KEY: baz - REDIS_PASSWORD: '' - - steps: - - task: UsePythonVersion@0 - displayName: 'Set Python version' - name: python - inputs: - versionSpec: '3.8.x' - addToPath: true - - - task: Cache@2 - displayName: 'Restore Python environment' - inputs: - key: python | $(Agent.OS) | "$(python.pythonLocation)" | 0 | ./Pipfile | ./Pipfile.lock - cacheHitVar: PY_ENV_RESTORED - path: $(PYTHONUSERBASE) - - - script: echo '##vso[task.prependpath]$(PYTHONUSERBASE)/bin' - displayName: 'Prepend PATH' - - - script: pip install pipenv - displayName: 'Install pipenv' - condition: and(succeeded(), ne(variables.PY_ENV_RESTORED, 'true')) - - - script: pipenv install --dev --deploy --system - displayName: 'Install project using pipenv' - condition: and(succeeded(), ne(variables.PY_ENV_RESTORED, 'true')) - - # Create an executable shell script which replaces the original pipenv binary. - # The shell script ignores the first argument and executes the rest of the args as a command. - # It makes the `pipenv run flake8` command in the pre-commit hook work by circumventing - # pipenv entirely, which is too dumb to know it should use the system interpreter rather than - # creating a new venv. - - script: | - printf '%s\n%s' '#!/bin/bash' '"${@:2}"' > $(python.pythonLocation)/bin/pipenv \ - && chmod +x $(python.pythonLocation)/bin/pipenv - displayName: 'Mock pipenv binary' - - - task: Cache@2 - displayName: 'Restore pre-commit environment' - inputs: - key: pre-commit | "$(python.pythonLocation)" | 0 | .pre-commit-config.yaml - path: $(PRE_COMMIT_HOME) - - # pre-commit's venv doesn't allow user installs - not that they're really needed anyway. - - script: export PIP_USER=0; pre-commit run --all-files - displayName: 'Run pre-commit hooks' - - - script: coverage run -m xmlrunner - displayName: Run tests - - - script: coverage report -m && coverage xml -o coverage.xml - displayName: Generate test coverage report - - - task: PublishCodeCoverageResults@1 - displayName: 'Publish Coverage Results' - condition: succeededOrFailed() - inputs: - codeCoverageTool: Cobertura - summaryFileLocation: coverage.xml - - - task: PublishTestResults@2 - condition: succeededOrFailed() - displayName: 'Publish Test Results' - inputs: - testResultsFiles: '**/TEST-*.xml' - testRunTitle: 'Bot Test Results' - - - job: build - displayName: 'Build & Push Container' - dependsOn: 'test' - condition: and(succeeded(), ne(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.SourceBranch'], 'refs/heads/master')) - - steps: - - task: Docker@2 - displayName: 'Build & Push Container' - inputs: - containerRegistry: 'DockerHub' - repository: 'pythondiscord/bot' - command: 'buildAndPush' - Dockerfile: 'Dockerfile' - buildContext: '.' - tags: 'latest' diff --git a/bot/__init__.py b/bot/__init__.py index 4fce04532..8f880b8e6 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -1,78 +1,25 @@ import asyncio -import logging import os -import sys from functools import partial, partialmethod -from logging import Logger, handlers -from pathlib import Path +from typing import TYPE_CHECKING -import coloredlogs from discord.ext import commands +from bot import log from bot.command import Command -TRACE_LEVEL = logging.TRACE = 5 -logging.addLevelName(TRACE_LEVEL, "TRACE") - - -def monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: - """ - Log 'msg % args' with severity 'TRACE'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) - """ - if self.isEnabledFor(TRACE_LEVEL): - self._log(TRACE_LEVEL, msg, args, **kwargs) - - -Logger.trace = monkeypatch_trace - -DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") - -log_level = TRACE_LEVEL if DEBUG_MODE else logging.INFO -format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s" -log_format = logging.Formatter(format_string) - -log_file = Path("logs", "bot.log") -log_file.parent.mkdir(exist_ok=True) -file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7, encoding="utf8") -file_handler.setFormatter(log_format) - -root_log = logging.getLogger() -root_log.setLevel(log_level) -root_log.addHandler(file_handler) - -if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: - coloredlogs.DEFAULT_LEVEL_STYLES = { - **coloredlogs.DEFAULT_LEVEL_STYLES, - "trace": {"color": 246}, - "critical": {"background": "red"}, - "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] - } - -if "COLOREDLOGS_LOG_FORMAT" not in os.environ: - coloredlogs.DEFAULT_LOG_FORMAT = format_string - -if "COLOREDLOGS_LOG_LEVEL" not in os.environ: - coloredlogs.DEFAULT_LOG_LEVEL = log_level - -coloredlogs.install(logger=root_log, stream=sys.stdout) - -logging.getLogger("discord").setLevel(logging.WARNING) -logging.getLogger("websockets").setLevel(logging.WARNING) -logging.getLogger("chardet").setLevel(logging.WARNING) -logging.getLogger("async_rediscache").setLevel(logging.WARNING) +if TYPE_CHECKING: + from bot.bot import Bot +log.setup() # On Windows, the selector event loop is required for aiodns. if os.name == "nt": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - # Monkey-patch discord.py decorators to use the Command subclass which supports root aliases. # Must be patched before any cogs are added. commands.command = partial(commands.command, cls=Command) commands.GroupMixin.command = partialmethod(commands.GroupMixin.command, cls=Command) + +instance: "Bot" = None # Global Bot instance. diff --git a/bot/__main__.py b/bot/__main__.py index 367be1300..257216fa7 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,76 +1,10 @@ -import asyncio -import logging - -import discord -import sentry_sdk -from async_rediscache import RedisSession -from discord.ext.commands import when_mentioned_or -from sentry_sdk.integrations.aiohttp import AioHttpIntegration -from sentry_sdk.integrations.logging import LoggingIntegration -from sentry_sdk.integrations.redis import RedisIntegration - +import bot from bot import constants from bot.bot import Bot -from bot.utils.extensions import EXTENSIONS - -# Set up Sentry. -sentry_logging = LoggingIntegration( - level=logging.DEBUG, - event_level=logging.WARNING -) - -sentry_sdk.init( - dsn=constants.Bot.sentry_dsn, - integrations=[ - sentry_logging, - AioHttpIntegration(), - RedisIntegration(), - ] -) - -# Create the redis session instance. -redis_session = RedisSession( - address=(constants.Redis.host, constants.Redis.port), - password=constants.Redis.password, - minsize=1, - maxsize=20, - use_fakeredis=constants.Redis.use_fakeredis, - global_namespace="bot", -) - -# Connect redis session to ensure it's connected before we try to access Redis -# from somewhere within the bot. We create the event loop in the same way -# discord.py normally does and pass it to the bot's __init__. -loop = asyncio.get_event_loop() -loop.run_until_complete(redis_session.connect()) - - -# Instantiate the bot. -allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES] -intents = discord.Intents().all() -intents.presences = False -intents.dm_typing = False -intents.dm_reactions = False -intents.invites = False -intents.webhooks = False -intents.integrations = False -bot = Bot( - redis_session=redis_session, - loop=loop, - command_prefix=when_mentioned_or(constants.Bot.prefix), - activity=discord.Game(name=f"Commands: {constants.Bot.prefix}help"), - case_insensitive=True, - max_messages=10_000, - allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles), - intents=intents, -) - -# Load extensions. -extensions = set(EXTENSIONS) # Create a mutable copy. -if not constants.HelpChannels.enable: - extensions.remove("bot.exts.help_channels") +from bot.log import setup_sentry -for extension in extensions: - bot.load_extension(extension) +setup_sentry() -bot.run(constants.Bot.token) +bot.instance = Bot.create() +bot.instance.load_extensions() +bot.instance.run(constants.Bot.token) diff --git a/bot/bot.py b/bot/bot.py index fbd97dc18..cdb4e72a9 100644 --- a/bot/bot.py +++ b/bot/bot.py @@ -12,7 +12,7 @@ from async_rediscache import RedisSession from discord.ext import commands from sentry_sdk import push_scope -from bot import DEBUG_MODE, api, constants +from bot import api, constants from bot.async_stats import AsyncStatsClient log = logging.getLogger('bot') @@ -41,7 +41,7 @@ class Bot(commands.Bot): statsd_url = constants.Stats.statsd_host - if DEBUG_MODE: + if constants.DEBUG_MODE: # Since statsd is UDP, there are no errors for sending to a down port. # For this reason, setting the statsd host to 127.0.0.1 for development # will effectively disable stats. @@ -99,6 +99,43 @@ class Bot(commands.Bot): # Build the FilterList cache self.loop.create_task(self.cache_filter_list_data()) + @classmethod + def create(cls) -> "Bot": + """Create and return an instance of a Bot.""" + loop = asyncio.get_event_loop() + allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES] + + intents = discord.Intents().all() + intents.presences = False + intents.dm_typing = False + intents.dm_reactions = False + intents.invites = False + intents.webhooks = False + intents.integrations = False + + return cls( + redis_session=_create_redis_session(loop), + loop=loop, + command_prefix=commands.when_mentioned_or(constants.Bot.prefix), + activity=discord.Game(name=f"Commands: {constants.Bot.prefix}help"), + case_insensitive=True, + max_messages=10_000, + allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles), + intents=intents, + ) + + def load_extensions(self) -> None: + """Load all enabled extensions.""" + # Must be done here to avoid a circular import. + from bot.utils.extensions import EXTENSIONS + + extensions = set(EXTENSIONS) # Create a mutable copy. + if not constants.HelpChannels.enable: + extensions.remove("bot.exts.help_channels") + + for extension in extensions: + self.load_extension(extension) + def add_cog(self, cog: commands.Cog) -> None: """Adds a "cog" to the bot and logs the operation.""" super().add_cog(cog) @@ -257,3 +294,22 @@ class Bot(commands.Bot): for alias in getattr(command, "root_aliases", ()): self.all_commands.pop(alias, None) + + +def _create_redis_session(loop: asyncio.AbstractEventLoop) -> RedisSession: + """ + Create and connect to a redis session. + + Ensure the connection is established before returning to prevent race conditions. + `loop` is the event loop on which to connect. The Bot should use this same event loop. + """ + redis_session = RedisSession( + address=(constants.Redis.host, constants.Redis.port), + password=constants.Redis.password, + minsize=1, + maxsize=20, + use_fakeredis=constants.Redis.use_fakeredis, + global_namespace="bot", + ) + loop.run_until_complete(redis_session.connect()) + return redis_session diff --git a/bot/constants.py b/bot/constants.py index 4d41f4eb2..6bb6aacd2 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -248,6 +248,7 @@ class Colours(metaclass=YAMLGetter): soft_red: int soft_green: int soft_orange: int + bright_green: int class DuckPond(metaclass=YAMLGetter): @@ -354,6 +355,8 @@ class Icons(metaclass=YAMLGetter): voice_state_green: str voice_state_red: str + green_checkmark: str + class CleanMessages(metaclass=YAMLGetter): section = "bot" @@ -361,6 +364,7 @@ class CleanMessages(metaclass=YAMLGetter): message_limit: int + class Stats(metaclass=YAMLGetter): section = "bot" subsection = "stats" @@ -601,6 +605,7 @@ class VoiceGate(metaclass=YAMLGetter): minimum_messages: int bot_message_delete_delay: int minimum_activity_blocks: int + voice_ping_delete_delay: int class Event(Enum): @@ -630,7 +635,7 @@ class Event(Enum): # Debug mode -DEBUG_MODE = True if 'local' in os.environ.get("SITE_URL", "local") else False +DEBUG_MODE = 'local' in os.environ.get("SITE_URL", "local") # Paths BOT_DIR = os.path.dirname(__file__) diff --git a/bot/exts/backend/sync/_cog.py b/bot/exts/backend/sync/_cog.py index 6e85e2b7d..48d2b6f02 100644 --- a/bot/exts/backend/sync/_cog.py +++ b/bot/exts/backend/sync/_cog.py @@ -18,9 +18,6 @@ class Sync(Cog): def __init__(self, bot: Bot) -> None: self.bot = bot - self.role_syncer = _syncers.RoleSyncer(self.bot) - self.user_syncer = _syncers.UserSyncer(self.bot) - self.bot.loop.create_task(self.sync_guild()) async def sync_guild(self) -> None: @@ -31,7 +28,7 @@ class Sync(Cog): if guild is None: return - for syncer in (self.role_syncer, self.user_syncer): + for syncer in (_syncers.RoleSyncer, _syncers.UserSyncer): await syncer.sync(guild) async def patch_user(self, user_id: int, json: Dict[str, Any], ignore_404: bool = False) -> None: @@ -171,10 +168,10 @@ class Sync(Cog): @commands.has_permissions(administrator=True) async def sync_roles_command(self, ctx: Context) -> None: """Manually synchronise the guild's roles with the roles on the site.""" - await self.role_syncer.sync(ctx.guild, ctx) + await _syncers.RoleSyncer.sync(ctx.guild, ctx) @sync_group.command(name='users') @commands.has_permissions(administrator=True) async def sync_users_command(self, ctx: Context) -> None: """Manually synchronise the guild's users with the users on the site.""" - await self.user_syncer.sync(ctx.guild, ctx) + await _syncers.UserSyncer.sync(ctx.guild, ctx) diff --git a/bot/exts/backend/sync/_syncers.py b/bot/exts/backend/sync/_syncers.py index 38468c2b1..2eb9f9971 100644 --- a/bot/exts/backend/sync/_syncers.py +++ b/bot/exts/backend/sync/_syncers.py @@ -6,8 +6,8 @@ from collections import namedtuple from discord import Guild from discord.ext.commands import Context +import bot from bot.api import ResponseCodeError -from bot.bot import Bot log = logging.getLogger(__name__) @@ -17,57 +17,60 @@ _Role = namedtuple('Role', ('id', 'name', 'colour', 'permissions', 'position')) _Diff = namedtuple('Diff', ('created', 'updated', 'deleted')) +# Implementation of static abstract methods are not enforced if the subclass is never instantiated. +# However, methods are kept abstract to at least symbolise that they should be abstract. class Syncer(abc.ABC): """Base class for synchronising the database with objects in the Discord cache.""" - def __init__(self, bot: Bot) -> None: - self.bot = bot - + @staticmethod @property @abc.abstractmethod - def name(self) -> str: + def name() -> str: """The name of the syncer; used in output messages and logging.""" raise NotImplementedError # pragma: no cover + @staticmethod @abc.abstractmethod - async def _get_diff(self, guild: Guild) -> _Diff: + async def _get_diff(guild: Guild) -> _Diff: """Return the difference between the cache of `guild` and the database.""" raise NotImplementedError # pragma: no cover + @staticmethod @abc.abstractmethod - async def _sync(self, diff: _Diff) -> None: + async def _sync(diff: _Diff) -> None: """Perform the API calls for synchronisation.""" raise NotImplementedError # pragma: no cover - async def sync(self, guild: Guild, ctx: t.Optional[Context] = None) -> None: + @classmethod + async def sync(cls, guild: Guild, ctx: t.Optional[Context] = None) -> None: """ Synchronise the database with the cache of `guild`. If `ctx` is given, send a message with the results. """ - log.info(f"Starting {self.name} syncer.") + log.info(f"Starting {cls.name} syncer.") if ctx: - message = await ctx.send(f"📊 Synchronising {self.name}s.") + message = await ctx.send(f"📊 Synchronising {cls.name}s.") else: message = None - diff = await self._get_diff(guild) + diff = await cls._get_diff(guild) try: - await self._sync(diff) + await cls._sync(diff) except ResponseCodeError as e: - log.exception(f"{self.name} syncer failed!") + log.exception(f"{cls.name} syncer failed!") # Don't show response text because it's probably some really long HTML. results = f"status {e.status}\n```{e.response_json or 'See log output for details'}```" - content = f":x: Synchronisation of {self.name}s failed: {results}" + content = f":x: Synchronisation of {cls.name}s failed: {results}" else: diff_dict = diff._asdict() results = (f"{name} `{len(val)}`" for name, val in diff_dict.items() if val is not None) results = ", ".join(results) - log.info(f"{self.name} syncer finished: {results}.") - content = f":ok_hand: Synchronisation of {self.name}s complete: {results}" + log.info(f"{cls.name} syncer finished: {results}.") + content = f":ok_hand: Synchronisation of {cls.name}s complete: {results}" if message: await message.edit(content=content) @@ -78,10 +81,11 @@ class RoleSyncer(Syncer): name = "role" - async def _get_diff(self, guild: Guild) -> _Diff: + @staticmethod + async def _get_diff(guild: Guild) -> _Diff: """Return the difference of roles between the cache of `guild` and the database.""" log.trace("Getting the diff for roles.") - roles = await self.bot.api_client.get('bot/roles') + roles = await bot.instance.api_client.get('bot/roles') # Pack DB roles and guild roles into one common, hashable format. # They're hashable so that they're easily comparable with sets later. @@ -110,19 +114,20 @@ class RoleSyncer(Syncer): return _Diff(roles_to_create, roles_to_update, roles_to_delete) - async def _sync(self, diff: _Diff) -> None: + @staticmethod + async def _sync(diff: _Diff) -> None: """Synchronise the database with the role cache of `guild`.""" log.trace("Syncing created roles...") for role in diff.created: - await self.bot.api_client.post('bot/roles', json=role._asdict()) + await bot.instance.api_client.post('bot/roles', json=role._asdict()) log.trace("Syncing updated roles...") for role in diff.updated: - await self.bot.api_client.put(f'bot/roles/{role.id}', json=role._asdict()) + await bot.instance.api_client.put(f'bot/roles/{role.id}', json=role._asdict()) log.trace("Syncing deleted roles...") for role in diff.deleted: - await self.bot.api_client.delete(f'bot/roles/{role.id}') + await bot.instance.api_client.delete(f'bot/roles/{role.id}') class UserSyncer(Syncer): @@ -130,7 +135,8 @@ class UserSyncer(Syncer): name = "user" - async def _get_diff(self, guild: Guild) -> _Diff: + @staticmethod + async def _get_diff(guild: Guild) -> _Diff: """Return the difference of users between the cache of `guild` and the database.""" log.trace("Getting the diff for users.") @@ -138,7 +144,7 @@ class UserSyncer(Syncer): users_to_update = [] seen_guild_users = set() - async for db_user in self._get_users(): + async for db_user in UserSyncer._get_users(): # Store user fields which are to be updated. updated_fields = {} @@ -185,24 +191,26 @@ class UserSyncer(Syncer): return _Diff(users_to_create, users_to_update, None) - async def _get_users(self) -> t.AsyncIterable: + @staticmethod + async def _get_users() -> t.AsyncIterable: """GET users from database.""" query_params = { "page": 1 } while query_params["page"]: - res = await self.bot.api_client.get("bot/users", params=query_params) + res = await bot.instance.api_client.get("bot/users", params=query_params) for user in res["results"]: yield user query_params["page"] = res["next_page_no"] - async def _sync(self, diff: _Diff) -> None: + @staticmethod + async def _sync(diff: _Diff) -> None: """Synchronise the database with the user cache of `guild`.""" log.trace("Syncing created users...") if diff.created: - await self.bot.api_client.post("bot/users", json=diff.created) + await bot.instance.api_client.post("bot/users", json=diff.created) log.trace("Syncing updated users...") if diff.updated: - await self.bot.api_client.patch("bot/users/bulk_patch", json=diff.updated) + await bot.instance.api_client.patch("bot/users/bulk_patch", json=diff.updated) diff --git a/bot/exts/help_channels.py b/bot/exts/help_channels.py index 062d4fcfe..ced2f72ef 100644 --- a/bot/exts/help_channels.py +++ b/bot/exts/help_channels.py @@ -28,17 +28,21 @@ This is a Python help channel. You can claim your own help channel in the Python """ AVAILABLE_MSG = f""" -This help channel is now **available**, which means that you can claim it by simply typing your \ -question into it. Once claimed, the channel will move into the **Python Help: Occupied** category, \ -and will be yours until it has been inactive for {constants.HelpChannels.idle_minutes} minutes or \ -is closed manually with `!close`. When that happens, it will be set to **dormant** and moved into \ -the **Help: Dormant** category. - -Try to write the best question you can by providing a detailed description and telling us what \ -you've tried already. For more information on asking a good question, \ -check out our guide on **[asking good questions]({ASKING_GUIDE_URL})**. +**Send your question here to claim the channel** +This channel will be dedicated to answering your question only. Others will try to answer and help you solve the issue. + +**Keep in mind:** +• It's always ok to just ask your question. You don't need permission. +• Explain what you expect to happen and what actually happens. +• Include a code sample and error message, if you got any. + +For more tips, check out our guide on **[asking good questions]({ASKING_GUIDE_URL})**. """ +AVAILABLE_TITLE = "Available help channel" + +AVAILABLE_FOOTER = f"Closes after {constants.HelpChannels.idle_minutes} minutes of inactivity or when you send !close." + DORMANT_MSG = f""" This help channel has been marked as **dormant**, and has been moved into the **Help: Dormant** \ category at the bottom of the channel list. It is no longer possible to send messages in this \ @@ -380,16 +384,13 @@ class HelpChannels(commands.Cog): try: self.available_category = await channel_utils.try_get_channel( - constants.Categories.help_available, - self.bot + constants.Categories.help_available ) self.in_use_category = await channel_utils.try_get_channel( - constants.Categories.help_in_use, - self.bot + constants.Categories.help_in_use ) self.dormant_category = await channel_utils.try_get_channel( - constants.Categories.help_dormant, - self.bot + constants.Categories.help_dormant ) except discord.HTTPException: log.exception("Failed to get a category; cog will be removed") @@ -500,7 +501,7 @@ class HelpChannels(commands.Cog): options should be avoided, as it may interfere with the category move we perform. """ # Get a fresh copy of the category from the bot to avoid the cache mismatch issue we had. - category = await channel_utils.try_get_channel(category_id, self.bot) + category = await channel_utils.try_get_channel(category_id) payload = [{"id": c.id, "position": c.position} for c in category.channels] @@ -837,7 +838,12 @@ class HelpChannels(commands.Cog): channel_info = f"#{channel} ({channel.id})" log.trace(f"Sending available message in {channel_info}.") - embed = discord.Embed(description=AVAILABLE_MSG) + embed = discord.Embed( + color=constants.Colours.bright_green, + description=AVAILABLE_MSG, + ) + embed.set_author(name=AVAILABLE_TITLE, icon_url=constants.Icons.green_checkmark) + embed.set_footer(text=AVAILABLE_FOOTER) msg = await self.get_last_message(channel) if self.match_bot_embed(msg, DORMANT_MSG): diff --git a/bot/exts/info/codeblock/_cog.py b/bot/exts/info/codeblock/_cog.py index 1e0feab0d..9094d9d15 100644 --- a/bot/exts/info/codeblock/_cog.py +++ b/bot/exts/info/codeblock/_cog.py @@ -114,7 +114,7 @@ class CodeBlockCog(Cog, name="Code Block"): bot_message = await message.channel.send(f"Hey {message.author.mention}!", embed=embed) self.codeblock_message_ids[message.id] = bot_message.id - self.bot.loop.create_task(wait_for_deletion(bot_message, (message.author.id,), self.bot)) + self.bot.loop.create_task(wait_for_deletion(bot_message, (message.author.id,))) # Increase amount of codeblock correction in stats self.bot.stats.incr("codeblock_corrections") diff --git a/bot/exts/info/codeblock/_parsing.py b/bot/exts/info/codeblock/_parsing.py index 65a2272c8..e35fbca22 100644 --- a/bot/exts/info/codeblock/_parsing.py +++ b/bot/exts/info/codeblock/_parsing.py @@ -36,7 +36,7 @@ _RE_CODE_BLOCK = re.compile( (?P<tick>[{''.join(_TICKS)}]) # Put all ticks into a character class within a group. \2{{2}} # Match previous group 2 more times to ensure the same char. ) - (?P<lang>[^\W_]+\n)? # Optionally match a language specifier followed by a newline. + (?P<lang>[A-Za-z0-9\+\-\.]+\n)? # Optionally match a language specifier followed by a newline. (?P<code>.+?) # Match the actual code within the block. \1 # Match the same 3 ticks used at the start of the block. """, diff --git a/bot/exts/info/doc.py b/bot/exts/info/doc.py index 7ec8caa4b..9b5bd6504 100644 --- a/bot/exts/info/doc.py +++ b/bot/exts/info/doc.py @@ -365,7 +365,7 @@ class Doc(commands.Cog): await ctx.message.delete(delay=NOT_FOUND_DELETE_DELAY) else: msg = await ctx.send(embed=doc_embed) - await wait_for_deletion(msg, (ctx.author.id,), client=self.bot) + await wait_for_deletion(msg, (ctx.author.id,)) @docs_group.command(name='set', aliases=('s',)) @commands.has_any_role(*MODERATION_ROLES) diff --git a/bot/exts/info/help.py b/bot/exts/info/help.py index 599c5d5c0..461ff82fd 100644 --- a/bot/exts/info/help.py +++ b/bot/exts/info/help.py @@ -186,7 +186,7 @@ class CustomHelpCommand(HelpCommand): """Send help for a single command.""" embed = await self.command_formatting(command) message = await self.context.send(embed=embed) - await wait_for_deletion(message, (self.context.author.id,), self.context.bot) + await wait_for_deletion(message, (self.context.author.id,)) @staticmethod def get_commands_brief_details(commands_: List[Command], return_as_list: bool = False) -> Union[List[str], str]: @@ -225,7 +225,7 @@ class CustomHelpCommand(HelpCommand): embed.description += f"\n**Subcommands:**\n{command_details}" message = await self.context.send(embed=embed) - await wait_for_deletion(message, (self.context.author.id,), self.context.bot) + await wait_for_deletion(message, (self.context.author.id,)) async def send_cog_help(self, cog: Cog) -> None: """Send help for a cog.""" @@ -241,7 +241,7 @@ class CustomHelpCommand(HelpCommand): embed.description += f"\n\n**Commands:**\n{command_details}" message = await self.context.send(embed=embed) - await wait_for_deletion(message, (self.context.author.id,), self.context.bot) + await wait_for_deletion(message, (self.context.author.id,)) @staticmethod def _category_key(command: Command) -> str: diff --git a/bot/exts/info/tags.py b/bot/exts/info/tags.py index ae95ac1ef..8f15f932b 100644 --- a/bot/exts/info/tags.py +++ b/bot/exts/info/tags.py @@ -236,7 +236,6 @@ class Tags(Cog): await wait_for_deletion( await ctx.send(embed=Embed.from_dict(tag['embed'])), [ctx.author.id], - self.bot ) elif founds and len(tag_name) >= 3: await wait_for_deletion( @@ -247,7 +246,6 @@ class Tags(Cog): ) ), [ctx.author.id], - self.bot ) else: diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py index 93d96693c..4d48d2c1b 100644 --- a/bot/exts/moderation/voice_gate.py +++ b/bot/exts/moderation/voice_gate.py @@ -4,8 +4,9 @@ from contextlib import suppress from datetime import datetime, timedelta import discord +from async_rediscache import RedisCache from dateutil import parser -from discord import Colour +from discord import Colour, Member, VoiceState from discord.ext.commands import Cog, Context, command from bot.api import ResponseCodeError @@ -17,6 +18,12 @@ from bot.utils.checks import InWhitelistCheckFailure log = logging.getLogger(__name__) +# Flag written to the cog's RedisCache as a value when the Member's (key) notification +# was already removed ~ this signals both that no further notifications should be sent, +# and that the notification does not need to be removed. The implementation relies on +# this being falsey! +NO_MSG = 0 + FAILED_MESSAGE = ( """You are not currently eligible to use voice inside Python Discord for the following reasons:\n\n{reasons}""" ) @@ -28,11 +35,22 @@ MESSAGE_FIELD_MAP = { "activity_blocks": f"have been active for fewer than {GateConf.minimum_activity_blocks} ten-minute blocks", } +VOICE_PING = ( + "Wondering why you can't talk in the voice channels? " + "Use the `!voiceverify` command in here to verify. " + "If you don't yet qualify, you'll be told why!" +) + class VoiceGate(Cog): """Voice channels verification management.""" - def __init__(self, bot: Bot): + # RedisCache[t.Union[discord.User.id, discord.Member.id], t.Union[discord.Message.id, int]] + # The cache's keys are the IDs of members who are verified or have joined a voice channel + # The cache's values are either the message ID of the ping message or 0 (NO_MSG) if no message is present + redis_cache = RedisCache() + + def __init__(self, bot: Bot) -> None: self.bot = bot @property @@ -40,6 +58,54 @@ class VoiceGate(Cog): """Get the currently loaded ModLog cog instance.""" return self.bot.get_cog("ModLog") + @redis_cache.atomic_transaction # Fully process each call until starting the next + async def _delete_ping(self, member_id: int) -> None: + """ + If `redis_cache` holds a message ID for `member_id`, delete the message. + + If the message was deleted, the value under the `member_id` key is then set to `NO_MSG`. + When `member_id` is not in the cache, or has a value of `NO_MSG` already, this function + does nothing. + """ + if message_id := await self.redis_cache.get(member_id): + log.trace(f"Removing voice gate reminder message for user: {member_id}") + with suppress(discord.NotFound): + await self.bot.http.delete_message(Channels.voice_gate, message_id) + await self.redis_cache.set(member_id, NO_MSG) + else: + log.trace(f"Voice gate reminder message for user {member_id} was already removed") + + @redis_cache.atomic_transaction + async def _ping_newcomer(self, member: discord.Member) -> bool: + """ + See if `member` should be sent a voice verification notification, and send it if so. + + Returns False if the notification was not sent. This happens when: + * The `member` has already received the notification + * The `member` is already voice-verified + + Otherwise, the notification message ID is stored in `redis_cache` and True is returned. + """ + if await self.redis_cache.contains(member.id): + log.trace("User already in cache. Ignore.") + return False + + log.trace("User not in cache and is in a voice channel.") + verified = any(Roles.voice_verified == role.id for role in member.roles) + if verified: + log.trace("User is verified, add to the cache and ignore.") + await self.redis_cache.set(member.id, NO_MSG) + return False + + log.trace("User is unverified. Send ping.") + await self.bot.wait_until_guild_available() + voice_verification_channel = self.bot.get_channel(Channels.voice_gate) + + message = await voice_verification_channel.send(f"Hello, {member.mention}! {VOICE_PING}") + await self.redis_cache.set(member.id, message.id) + + return True + @command(aliases=('voiceverify',)) @has_no_roles(Roles.voice_verified) @in_whitelist(channels=(Channels.voice_gate,), redirect=None) @@ -53,6 +119,8 @@ class VoiceGate(Cog): - You must not be actively banned from using our voice channels - You must have been active for over a certain number of 10-minute blocks """ + await self._delete_ping(ctx.author.id) # If user has received a ping in voice_verification, delete the message + try: data = await self.bot.api_client.get(f"bot/users/{ctx.author.id}/metricity_data") except ResponseCodeError as e: @@ -142,8 +210,12 @@ class VoiceGate(Cog): ctx = await self.bot.get_context(message) is_verify_command = ctx.command is not None and ctx.command.name == "voice_verify" - # When it's bot sent message, delete it after some time + # When it's a bot sent message, delete it after some time if message.author.bot: + # Comparing the message with the voice ping constant + if message.content.endswith(VOICE_PING): + log.trace("Message is the voice verification ping. Ignore.") + return with suppress(discord.NotFound): await message.delete(delay=GateConf.bot_message_delete_delay) return @@ -160,6 +232,28 @@ class VoiceGate(Cog): with suppress(discord.NotFound): await message.delete() + @Cog.listener() + async def on_voice_state_update(self, member: Member, before: VoiceState, after: VoiceState) -> None: + """Pings a user if they've never joined the voice chat before and aren't voice verified.""" + if member.bot: + log.trace("User is a bot. Ignore.") + return + + # member.voice will return None if the user is not in a voice channel + if member.voice is None: + log.trace("User not in a voice channel. Ignore.") + return + + # To avoid race conditions, checking if the user should receive a notification + # and sending it if appropriate is delegated to an atomic helper + notification_sent = await self._ping_newcomer(member) + + # Schedule the notification to be deleted after the configured delay, which is + # again delegated to an atomic helper + if notification_sent: + await asyncio.sleep(GateConf.voice_ping_delete_delay) + await self._delete_ping(member.id) + async def cog_command_error(self, ctx: Context, error: Exception) -> None: """Check for & ignore any InWhitelistCheckFailure.""" if isinstance(error, InWhitelistCheckFailure): diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py index 1b4900f42..3521c8fd4 100644 --- a/bot/exts/utils/internal.py +++ b/bot/exts/utils/internal.py @@ -30,7 +30,7 @@ class Internal(Cog): self.ln = 0 self.stdout = StringIO() - self.interpreter = Interpreter(bot) + self.interpreter = Interpreter() self.socket_since = datetime.utcnow() self.socket_event_total = 0 @@ -195,7 +195,7 @@ async def func(): # (None,) -> Any truncate_index = newline_truncate_index if len(out) > truncate_index: - paste_link = await send_to_paste_service(self.bot.http_session, out, extension="py") + paste_link = await send_to_paste_service(out, extension="py") if paste_link is not None: paste_text = f"full contents at {paste_link}" else: diff --git a/bot/exts/utils/snekbox.py b/bot/exts/utils/snekbox.py index 41cb00541..9f480c067 100644 --- a/bot/exts/utils/snekbox.py +++ b/bot/exts/utils/snekbox.py @@ -70,7 +70,7 @@ class Snekbox(Cog): if len(output) > MAX_PASTE_LEN: log.info("Full output is too long to upload") return "too long to upload" - return await send_to_paste_service(self.bot.http_session, output, extension="txt") + return await send_to_paste_service(output, extension="txt") @staticmethod def prepare_input(code: str) -> str: @@ -219,7 +219,7 @@ class Snekbox(Cog): response = await ctx.send("Attempt to circumvent filter detected. Moderator team has been alerted.") else: response = await ctx.send(msg) - self.bot.loop.create_task(wait_for_deletion(response, (ctx.author.id,), ctx.bot)) + self.bot.loop.create_task(wait_for_deletion(response, (ctx.author.id,))) log.info(f"{ctx.author}'s job had a return code of {results['returncode']}") return response diff --git a/bot/interpreter.py b/bot/interpreter.py index 8b7268746..b58f7a6b0 100644 --- a/bot/interpreter.py +++ b/bot/interpreter.py @@ -4,7 +4,7 @@ from typing import Any from discord.ext.commands import Context -from bot.bot import Bot +import bot CODE_TEMPLATE = """ async def _func(): @@ -21,8 +21,8 @@ class Interpreter(InteractiveInterpreter): write_callable = None - def __init__(self, bot: Bot): - locals_ = {"bot": bot} + def __init__(self): + locals_ = {"bot": bot.instance} super().__init__(locals_) async def run(self, code: str, ctx: Context, io: StringIO, *args, **kwargs) -> Any: diff --git a/bot/log.py b/bot/log.py new file mode 100644 index 000000000..13141de40 --- /dev/null +++ b/bot/log.py @@ -0,0 +1,86 @@ +import logging +import os +import sys +from logging import Logger, handlers +from pathlib import Path + +import coloredlogs +import sentry_sdk +from sentry_sdk.integrations.aiohttp import AioHttpIntegration +from sentry_sdk.integrations.logging import LoggingIntegration +from sentry_sdk.integrations.redis import RedisIntegration + +from bot import constants + +TRACE_LEVEL = 5 + + +def setup() -> None: + """Set up loggers.""" + logging.TRACE = TRACE_LEVEL + logging.addLevelName(TRACE_LEVEL, "TRACE") + Logger.trace = _monkeypatch_trace + + log_level = TRACE_LEVEL if constants.DEBUG_MODE else logging.INFO + format_string = "%(asctime)s | %(name)s | %(levelname)s | %(message)s" + log_format = logging.Formatter(format_string) + + log_file = Path("logs", "bot.log") + log_file.parent.mkdir(exist_ok=True) + file_handler = handlers.RotatingFileHandler(log_file, maxBytes=5242880, backupCount=7, encoding="utf8") + file_handler.setFormatter(log_format) + + root_log = logging.getLogger() + root_log.setLevel(log_level) + root_log.addHandler(file_handler) + + if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: + coloredlogs.DEFAULT_LEVEL_STYLES = { + **coloredlogs.DEFAULT_LEVEL_STYLES, + "trace": {"color": 246}, + "critical": {"background": "red"}, + "debug": coloredlogs.DEFAULT_LEVEL_STYLES["info"] + } + + if "COLOREDLOGS_LOG_FORMAT" not in os.environ: + coloredlogs.DEFAULT_LOG_FORMAT = format_string + + if "COLOREDLOGS_LOG_LEVEL" not in os.environ: + coloredlogs.DEFAULT_LOG_LEVEL = log_level + + coloredlogs.install(logger=root_log, stream=sys.stdout) + + logging.getLogger("discord").setLevel(logging.WARNING) + logging.getLogger("websockets").setLevel(logging.WARNING) + logging.getLogger("chardet").setLevel(logging.WARNING) + logging.getLogger("async_rediscache").setLevel(logging.WARNING) + + +def setup_sentry() -> None: + """Set up the Sentry logging integrations.""" + sentry_logging = LoggingIntegration( + level=logging.DEBUG, + event_level=logging.WARNING + ) + + sentry_sdk.init( + dsn=constants.Bot.sentry_dsn, + integrations=[ + sentry_logging, + AioHttpIntegration(), + RedisIntegration(), + ] + ) + + +def _monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: + """ + Log 'msg % args' with severity 'TRACE'. + + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) + """ + if self.isEnabledFor(TRACE_LEVEL): + self._log(TRACE_LEVEL, msg, args, **kwargs) diff --git a/bot/resources/tags/guilds.md b/bot/resources/tags/guilds.md new file mode 100644 index 000000000..571abb99b --- /dev/null +++ b/bot/resources/tags/guilds.md @@ -0,0 +1,3 @@ +**Communities** + +The [communities page](https://pythondiscord.com/pages/resources/communities/) on our website contains a number of communities we have partnered with as well as a [curated list](https://github.com/mhxion/awesome-discord-communities) of other communities relating to programming and technology. diff --git a/bot/rules/discord_emojis.py b/bot/rules/discord_emojis.py index 6e47f0197..41faf7ee8 100644 --- a/bot/rules/discord_emojis.py +++ b/bot/rules/discord_emojis.py @@ -2,16 +2,17 @@ import re from typing import Dict, Iterable, List, Optional, Tuple from discord import Member, Message +from emoji import demojize -DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>") +DISCORD_EMOJI_RE = re.compile(r"<:\w+:\d+>|:\w+:") CODE_BLOCK_RE = re.compile(r"```.*?```", flags=re.DOTALL) async def apply( last_message: Message, recent_messages: List[Message], config: Dict[str, int] ) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]: - """Detects total Discord emojis (excluding Unicode emojis) exceeding the limit sent by a single user.""" + """Detects total Discord emojis exceeding the limit sent by a single user.""" relevant_messages = tuple( msg for msg in recent_messages @@ -19,8 +20,9 @@ async def apply( ) # Get rid of code blocks in the message before searching for emojis. + # Convert Unicode emojis to :emoji: format to get their count. total_emojis = sum( - len(DISCORD_EMOJI_RE.findall(CODE_BLOCK_RE.sub("", msg.content))) + len(DISCORD_EMOJI_RE.findall(demojize(CODE_BLOCK_RE.sub("", msg.content)))) for msg in relevant_messages ) diff --git a/bot/utils/channel.py b/bot/utils/channel.py index 6bf70bfde..0c072184c 100644 --- a/bot/utils/channel.py +++ b/bot/utils/channel.py @@ -2,6 +2,7 @@ import logging import discord +import bot from bot import constants from bot.constants import Categories @@ -36,14 +37,14 @@ def is_in_category(channel: discord.TextChannel, category_id: int) -> bool: return getattr(channel, "category_id", None) == category_id -async def try_get_channel(channel_id: int, client: discord.Client) -> discord.abc.GuildChannel: +async def try_get_channel(channel_id: int) -> discord.abc.GuildChannel: """Attempt to get or fetch a channel and return it.""" log.trace(f"Getting the channel {channel_id}.") - channel = client.get_channel(channel_id) + channel = bot.instance.get_channel(channel_id) if not channel: log.debug(f"Channel {channel_id} is not in cache; fetching from API.") - channel = await client.fetch_channel(channel_id) + channel = await bot.instance.fetch_channel(channel_id) log.trace(f"Channel #{channel} ({channel_id}) retrieved.") return channel diff --git a/bot/utils/messages.py b/bot/utils/messages.py index b6c7cab50..42bde358d 100644 --- a/bot/utils/messages.py +++ b/bot/utils/messages.py @@ -10,6 +10,7 @@ import discord from discord.errors import HTTPException from discord.ext.commands import Context +import bot from bot.constants import Emojis, NEGATIVE_REPLIES log = logging.getLogger(__name__) @@ -18,7 +19,6 @@ log = logging.getLogger(__name__) async def wait_for_deletion( message: discord.Message, user_ids: Sequence[discord.abc.Snowflake], - client: discord.Client, deletion_emojis: Sequence[str] = (Emojis.trashcan,), timeout: float = 60 * 5, attach_emojis: bool = True, @@ -49,7 +49,7 @@ async def wait_for_deletion( ) with contextlib.suppress(asyncio.TimeoutError): - await client.wait_for('reaction_add', check=check, timeout=timeout) + await bot.instance.wait_for('reaction_add', check=check, timeout=timeout) await message.delete() diff --git a/bot/utils/services.py b/bot/utils/services.py index 087b9f969..5949c9e48 100644 --- a/bot/utils/services.py +++ b/bot/utils/services.py @@ -1,8 +1,9 @@ import logging from typing import Optional -from aiohttp import ClientConnectorError, ClientSession +from aiohttp import ClientConnectorError +import bot from bot.constants import URLs log = logging.getLogger(__name__) @@ -10,11 +11,10 @@ log = logging.getLogger(__name__) FAILED_REQUEST_ATTEMPTS = 3 -async def send_to_paste_service(http_session: ClientSession, contents: str, *, extension: str = "") -> Optional[str]: +async def send_to_paste_service(contents: str, *, extension: str = "") -> Optional[str]: """ Upload `contents` to the paste service. - `http_session` should be the current running ClientSession from aiohttp `extension` is added to the output URL When an error occurs, `None` is returned, otherwise the generated URL with the suffix. @@ -24,7 +24,7 @@ async def send_to_paste_service(http_session: ClientSession, contents: str, *, e paste_url = URLs.paste_service.format(key="documents") for attempt in range(1, FAILED_REQUEST_ATTEMPTS + 1): try: - async with http_session.post(paste_url, data=contents) as response: + async with bot.instance.http_session.post(paste_url, data=contents) as response: response_json = await response.json() except ClientConnectorError: log.warning( diff --git a/config-default.yml b/config-default.yml index 2afdcd594..60eb437af 100644 --- a/config-default.yml +++ b/config-default.yml @@ -4,13 +4,13 @@ bot: sentry_dsn: !ENV "BOT_SENTRY_DSN" redis: - host: "redis" + host: "redis.default.svc.cluster.local" port: 6379 password: !ENV "REDIS_PASSWORD" use_fakeredis: false stats: - statsd_host: "graphite" + statsd_host: "graphite.default.svc.cluster.local" presence_update_timeout: 300 cooldowns: @@ -27,6 +27,7 @@ style: soft_red: 0xcd6d6d soft_green: 0x68c290 soft_orange: 0xf9cb54 + bright_green: 0x01d277 emojis: defcon_disabled: "<:defcondisabled:470326273952972810>" @@ -119,6 +120,8 @@ style: voice_state_green: "https://cdn.discordapp.com/emojis/656899770094452754.png" voice_state_red: "https://cdn.discordapp.com/emojis/656899769905709076.png" + green_checkmark: "https://raw.githubusercontent.com/python-discord/branding/master/icons/checkmark/green-checkmark-dist.png" + guild: id: 267624335836053506 @@ -329,7 +332,7 @@ urls: paste_service: !JOIN [*SCHEMA, *PASTE, "/{key}"] # Snekbox - snekbox_eval_api: "http://snekbox:8060/eval" + snekbox_eval_api: "http://snekbox.default.svc.cluster.local/eval" # Discord API URLs discord_api: &DISCORD_API "https://discordapp.com/api/v7/" @@ -522,6 +525,7 @@ voice_gate: minimum_messages: 50 # How many messages a user must have to be eligible for voice bot_message_delete_delay: 10 # Seconds before deleting bot's response in Voice Gate minimum_activity_blocks: 3 # Number of 10 minute blocks during which a user must have been active + voice_ping_delete_delay: 60 # Seconds before deleting the bot's ping to user in Voice Gate config: diff --git a/deployment.yaml b/deployment.yaml new file mode 100644 index 000000000..ca5ff5941 --- /dev/null +++ b/deployment.yaml @@ -0,0 +1,21 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: bot +spec: + replicas: 1 + selector: + matchLabels: + app: bot + template: + metadata: + labels: + app: bot + spec: + containers: + - name: bot + image: ghcr.io/python-discord/bot:latest + imagePullPolicy: Always + envFrom: + - secretRef: + name: bot-env diff --git a/docker-compose.yml b/docker-compose.yml index 8be5aac0e..0002d1d56 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,7 +18,7 @@ services: - "127.0.0.1:6379:6379" snekbox: - image: pythondiscord/snekbox:latest + image: ghcr.io/python-discord/snekbox:latest init: true ipc: none ports: @@ -26,7 +26,7 @@ services: privileged: true web: - image: pythondiscord/site:latest + image: ghcr.io/python-discord/site:latest command: ["run", "--debug"] networks: default: diff --git a/tests/bot/exts/backend/sync/test_base.py b/tests/bot/exts/backend/sync/test_base.py index 4953550f9..3ad9db9c3 100644 --- a/tests/bot/exts/backend/sync/test_base.py +++ b/tests/bot/exts/backend/sync/test_base.py @@ -15,28 +15,21 @@ class TestSyncer(Syncer): _sync = mock.AsyncMock() -class SyncerBaseTests(unittest.TestCase): - """Tests for the syncer base class.""" - - def setUp(self): - self.bot = helpers.MockBot() - - def test_instantiation_fails_without_abstract_methods(self): - """The class must have abstract methods implemented.""" - with self.assertRaisesRegex(TypeError, "Can't instantiate abstract class"): - Syncer(self.bot) - - class SyncerSyncTests(unittest.IsolatedAsyncioTestCase): """Tests for main function orchestrating the sync.""" def setUp(self): - self.bot = helpers.MockBot(user=helpers.MockMember(bot=True)) - self.syncer = TestSyncer(self.bot) + patcher = mock.patch("bot.instance", new=helpers.MockBot(user=helpers.MockMember(bot=True))) + self.bot = patcher.start() + self.addCleanup(patcher.stop) + self.guild = helpers.MockGuild() + TestSyncer._get_diff.reset_mock(return_value=True, side_effect=True) + TestSyncer._sync.reset_mock(return_value=True, side_effect=True) + # Make sure `_get_diff` returns a MagicMock, not an AsyncMock - self.syncer._get_diff.return_value = mock.MagicMock() + TestSyncer._get_diff.return_value = mock.MagicMock() async def test_sync_message_edited(self): """The message should be edited if one was sent, even if the sync has an API error.""" @@ -48,11 +41,11 @@ class SyncerSyncTests(unittest.IsolatedAsyncioTestCase): for message, side_effect, should_edit in subtests: with self.subTest(message=message, side_effect=side_effect, should_edit=should_edit): - self.syncer._sync.side_effect = side_effect + TestSyncer._sync.side_effect = side_effect ctx = helpers.MockContext() ctx.send.return_value = message - await self.syncer.sync(self.guild, ctx) + await TestSyncer.sync(self.guild, ctx) if should_edit: message.edit.assert_called_once() @@ -67,7 +60,7 @@ class SyncerSyncTests(unittest.IsolatedAsyncioTestCase): for ctx, message in subtests: with self.subTest(ctx=ctx, message=message): - await self.syncer.sync(self.guild, ctx) + await TestSyncer.sync(self.guild, ctx) if ctx is not None: ctx.send.assert_called_once() diff --git a/tests/bot/exts/backend/sync/test_cog.py b/tests/bot/exts/backend/sync/test_cog.py index 063a82754..22a07313e 100644 --- a/tests/bot/exts/backend/sync/test_cog.py +++ b/tests/bot/exts/backend/sync/test_cog.py @@ -29,24 +29,24 @@ class SyncCogTestCase(unittest.IsolatedAsyncioTestCase): def setUp(self): self.bot = helpers.MockBot() - self.role_syncer_patcher = mock.patch( + role_syncer_patcher = mock.patch( "bot.exts.backend.sync._syncers.RoleSyncer", autospec=Syncer, spec_set=True ) - self.user_syncer_patcher = mock.patch( + user_syncer_patcher = mock.patch( "bot.exts.backend.sync._syncers.UserSyncer", autospec=Syncer, spec_set=True ) - self.RoleSyncer = self.role_syncer_patcher.start() - self.UserSyncer = self.user_syncer_patcher.start() - self.cog = Sync(self.bot) + self.RoleSyncer = role_syncer_patcher.start() + self.UserSyncer = user_syncer_patcher.start() - def tearDown(self): - self.role_syncer_patcher.stop() - self.user_syncer_patcher.stop() + self.addCleanup(role_syncer_patcher.stop) + self.addCleanup(user_syncer_patcher.stop) + + self.cog = Sync(self.bot) @staticmethod def response_error(status: int) -> ResponseCodeError: @@ -73,8 +73,6 @@ class SyncCogTests(SyncCogTestCase): Sync(self.bot) - self.RoleSyncer.assert_called_once_with(self.bot) - self.UserSyncer.assert_called_once_with(self.bot) sync_guild.assert_called_once_with() self.bot.loop.create_task.assert_called_once_with(mock_sync_guild_coro) @@ -83,8 +81,8 @@ class SyncCogTests(SyncCogTestCase): for guild in (helpers.MockGuild(), None): with self.subTest(guild=guild): self.bot.reset_mock() - self.cog.role_syncer.reset_mock() - self.cog.user_syncer.reset_mock() + self.RoleSyncer.reset_mock() + self.UserSyncer.reset_mock() self.bot.get_guild = mock.MagicMock(return_value=guild) @@ -94,11 +92,11 @@ class SyncCogTests(SyncCogTestCase): self.bot.get_guild.assert_called_once_with(constants.Guild.id) if guild is None: - self.cog.role_syncer.sync.assert_not_called() - self.cog.user_syncer.sync.assert_not_called() + self.RoleSyncer.sync.assert_not_called() + self.UserSyncer.sync.assert_not_called() else: - self.cog.role_syncer.sync.assert_called_once_with(guild) - self.cog.user_syncer.sync.assert_called_once_with(guild) + self.RoleSyncer.sync.assert_called_once_with(guild) + self.UserSyncer.sync.assert_called_once_with(guild) async def patch_user_helper(self, side_effect: BaseException) -> None: """Helper to set a side effect for bot.api_client.patch and then assert it is called.""" @@ -394,14 +392,14 @@ class SyncCogCommandTests(SyncCogTestCase, CommandTestCase): ctx = helpers.MockContext() await self.cog.sync_roles_command(self.cog, ctx) - self.cog.role_syncer.sync.assert_called_once_with(ctx.guild, ctx) + self.RoleSyncer.sync.assert_called_once_with(ctx.guild, ctx) async def test_sync_users_command(self): """sync() should be called on the UserSyncer.""" ctx = helpers.MockContext() await self.cog.sync_users_command(self.cog, ctx) - self.cog.user_syncer.sync.assert_called_once_with(ctx.guild, ctx) + self.UserSyncer.sync.assert_called_once_with(ctx.guild, ctx) async def test_commands_require_admin(self): """The sync commands should only run if the author has the administrator permission.""" diff --git a/tests/bot/exts/backend/sync/test_roles.py b/tests/bot/exts/backend/sync/test_roles.py index 7b9f40cad..541074336 100644 --- a/tests/bot/exts/backend/sync/test_roles.py +++ b/tests/bot/exts/backend/sync/test_roles.py @@ -22,8 +22,9 @@ class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): """Tests for determining differences between roles in the DB and roles in the Guild cache.""" def setUp(self): - self.bot = helpers.MockBot() - self.syncer = RoleSyncer(self.bot) + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) @staticmethod def get_guild(*roles): @@ -44,7 +45,7 @@ class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.bot.api_client.get.return_value = [fake_role()] guild = self.get_guild(fake_role()) - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await RoleSyncer._get_diff(guild) expected_diff = (set(), set(), set()) self.assertEqual(actual_diff, expected_diff) @@ -56,7 +57,7 @@ class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.bot.api_client.get.return_value = [fake_role(id=41, name="old"), fake_role()] guild = self.get_guild(updated_role, fake_role()) - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await RoleSyncer._get_diff(guild) expected_diff = (set(), {_Role(**updated_role)}, set()) self.assertEqual(actual_diff, expected_diff) @@ -68,7 +69,7 @@ class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.bot.api_client.get.return_value = [fake_role()] guild = self.get_guild(fake_role(), new_role) - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await RoleSyncer._get_diff(guild) expected_diff = ({_Role(**new_role)}, set(), set()) self.assertEqual(actual_diff, expected_diff) @@ -80,7 +81,7 @@ class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.bot.api_client.get.return_value = [fake_role(), deleted_role] guild = self.get_guild(fake_role()) - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await RoleSyncer._get_diff(guild) expected_diff = (set(), set(), {_Role(**deleted_role)}) self.assertEqual(actual_diff, expected_diff) @@ -98,7 +99,7 @@ class RoleSyncerDiffTests(unittest.IsolatedAsyncioTestCase): ] guild = self.get_guild(fake_role(), new, updated) - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await RoleSyncer._get_diff(guild) expected_diff = ({_Role(**new)}, {_Role(**updated)}, {_Role(**deleted)}) self.assertEqual(actual_diff, expected_diff) @@ -108,8 +109,9 @@ class RoleSyncerSyncTests(unittest.IsolatedAsyncioTestCase): """Tests for the API requests that sync roles.""" def setUp(self): - self.bot = helpers.MockBot() - self.syncer = RoleSyncer(self.bot) + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) async def test_sync_created_roles(self): """Only POST requests should be made with the correct payload.""" @@ -117,7 +119,7 @@ class RoleSyncerSyncTests(unittest.IsolatedAsyncioTestCase): role_tuples = {_Role(**role) for role in roles} diff = _Diff(role_tuples, set(), set()) - await self.syncer._sync(diff) + await RoleSyncer._sync(diff) calls = [mock.call("bot/roles", json=role) for role in roles] self.bot.api_client.post.assert_has_calls(calls, any_order=True) @@ -132,7 +134,7 @@ class RoleSyncerSyncTests(unittest.IsolatedAsyncioTestCase): role_tuples = {_Role(**role) for role in roles} diff = _Diff(set(), role_tuples, set()) - await self.syncer._sync(diff) + await RoleSyncer._sync(diff) calls = [mock.call(f"bot/roles/{role['id']}", json=role) for role in roles] self.bot.api_client.put.assert_has_calls(calls, any_order=True) @@ -147,7 +149,7 @@ class RoleSyncerSyncTests(unittest.IsolatedAsyncioTestCase): role_tuples = {_Role(**role) for role in roles} diff = _Diff(set(), set(), role_tuples) - await self.syncer._sync(diff) + await RoleSyncer._sync(diff) calls = [mock.call(f"bot/roles/{role['id']}") for role in roles] self.bot.api_client.delete.assert_has_calls(calls, any_order=True) diff --git a/tests/bot/exts/backend/sync/test_users.py b/tests/bot/exts/backend/sync/test_users.py index 9f380a15d..61673e1bb 100644 --- a/tests/bot/exts/backend/sync/test_users.py +++ b/tests/bot/exts/backend/sync/test_users.py @@ -1,4 +1,5 @@ import unittest +from unittest import mock from bot.exts.backend.sync._syncers import UserSyncer, _Diff from tests import helpers @@ -19,8 +20,9 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): """Tests for determining differences between users in the DB and users in the Guild cache.""" def setUp(self): - self.bot = helpers.MockBot() - self.syncer = UserSyncer(self.bot) + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) @staticmethod def get_guild(*members): @@ -57,7 +59,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): } guild = self.get_guild() - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([], [], None) self.assertEqual(actual_diff, expected_diff) @@ -73,7 +75,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): guild = self.get_guild(fake_user()) guild.get_member.return_value = self.get_mock_member(fake_user()) - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([], [], None) self.assertEqual(actual_diff, expected_diff) @@ -94,7 +96,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.get_mock_member(fake_user()) ] - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([], [{"id": 99, "name": "new"}], None) self.assertEqual(actual_diff, expected_diff) @@ -114,7 +116,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): self.get_mock_member(fake_user()), self.get_mock_member(new_user) ] - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([new_user], [], None) self.assertEqual(actual_diff, expected_diff) @@ -133,7 +135,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): None ] - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([], [{"id": 63, "in_guild": False}], None) self.assertEqual(actual_diff, expected_diff) @@ -157,7 +159,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): None ] - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([new_user], [{"id": 55, "name": "updated"}, {"id": 63, "in_guild": False}], None) self.assertEqual(actual_diff, expected_diff) @@ -176,7 +178,7 @@ class UserSyncerDiffTests(unittest.IsolatedAsyncioTestCase): None ] - actual_diff = await self.syncer._get_diff(guild) + actual_diff = await UserSyncer._get_diff(guild) expected_diff = ([], [], None) self.assertEqual(actual_diff, expected_diff) @@ -186,15 +188,16 @@ class UserSyncerSyncTests(unittest.IsolatedAsyncioTestCase): """Tests for the API requests that sync users.""" def setUp(self): - self.bot = helpers.MockBot() - self.syncer = UserSyncer(self.bot) + patcher = mock.patch("bot.instance", new=helpers.MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) async def test_sync_created_users(self): """Only POST requests should be made with the correct payload.""" users = [fake_user(id=111), fake_user(id=222)] diff = _Diff(users, [], None) - await self.syncer._sync(diff) + await UserSyncer._sync(diff) self.bot.api_client.post.assert_called_once_with("bot/users", json=diff.created) @@ -206,7 +209,7 @@ class UserSyncerSyncTests(unittest.IsolatedAsyncioTestCase): users = [fake_user(id=111), fake_user(id=222)] diff = _Diff([], users, None) - await self.syncer._sync(diff) + await UserSyncer._sync(diff) self.bot.api_client.patch.assert_called_once_with("bot/users/bulk_patch", json=diff.updated) diff --git a/tests/bot/exts/utils/test_snekbox.py b/tests/bot/exts/utils/test_snekbox.py index 9a42d0610..321a92445 100644 --- a/tests/bot/exts/utils/test_snekbox.py +++ b/tests/bot/exts/utils/test_snekbox.py @@ -42,9 +42,7 @@ class SnekboxTests(unittest.IsolatedAsyncioTestCase): async def test_upload_output(self, mock_paste_util): """Upload the eval output to the URLs.paste_service.format(key="documents") endpoint.""" await self.cog.upload_output("Test output.") - mock_paste_util.assert_called_once_with( - self.bot.http_session, "Test output.", extension="txt" - ) + mock_paste_util.assert_called_once_with("Test output.", extension="txt") def test_prepare_input(self): cases = ( diff --git a/tests/bot/rules/test_discord_emojis.py b/tests/bot/rules/test_discord_emojis.py index 9a72723e2..66c2d9f92 100644 --- a/tests/bot/rules/test_discord_emojis.py +++ b/tests/bot/rules/test_discord_emojis.py @@ -5,11 +5,12 @@ from tests.bot.rules import DisallowedCase, RuleTest from tests.helpers import MockMessage discord_emoji = "<:abcd:1234>" # Discord emojis follow the format <:name:id> +unicode_emoji = "🧪" -def make_msg(author: str, n_emojis: int) -> MockMessage: +def make_msg(author: str, n_emojis: int, emoji: str = discord_emoji) -> MockMessage: """Build a MockMessage instance with content containing `n_emojis` arbitrary emojis.""" - return MockMessage(author=author, content=discord_emoji * n_emojis) + return MockMessage(author=author, content=emoji * n_emojis) class DiscordEmojisRuleTests(RuleTest): @@ -20,16 +21,22 @@ class DiscordEmojisRuleTests(RuleTest): self.config = {"max": 2, "interval": 10} async def test_allows_messages_within_limit(self): - """Cases with a total amount of discord emojis within limit.""" + """Cases with a total amount of discord and unicode emojis within limit.""" cases = ( [make_msg("bob", 2)], [make_msg("alice", 1), make_msg("bob", 2), make_msg("alice", 1)], + [make_msg("bob", 2, unicode_emoji)], + [ + make_msg("alice", 1, unicode_emoji), + make_msg("bob", 2, unicode_emoji), + make_msg("alice", 1, unicode_emoji) + ], ) await self.run_allowed(cases) async def test_disallows_messages_beyond_limit(self): - """Cases with more than the allowed amount of discord emojis.""" + """Cases with more than the allowed amount of discord and unicode emojis.""" cases = ( DisallowedCase( [make_msg("bob", 3)], @@ -41,6 +48,20 @@ class DiscordEmojisRuleTests(RuleTest): ("alice",), 4, ), + DisallowedCase( + [make_msg("bob", 3, unicode_emoji)], + ("bob",), + 3, + ), + DisallowedCase( + [ + make_msg("alice", 2, unicode_emoji), + make_msg("bob", 2, unicode_emoji), + make_msg("alice", 2, unicode_emoji) + ], + ("alice",), + 4 + ) ) await self.run_disallowed(cases) diff --git a/tests/bot/utils/test_services.py b/tests/bot/utils/test_services.py index 5e0855704..1b48f6560 100644 --- a/tests/bot/utils/test_services.py +++ b/tests/bot/utils/test_services.py @@ -5,11 +5,14 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch from aiohttp import ClientConnectorError from bot.utils.services import FAILED_REQUEST_ATTEMPTS, send_to_paste_service +from tests.helpers import MockBot class PasteTests(unittest.IsolatedAsyncioTestCase): def setUp(self) -> None: - self.http_session = MagicMock() + patcher = patch("bot.instance", new=MockBot()) + self.bot = patcher.start() + self.addCleanup(patcher.stop) @patch("bot.utils.services.URLs.paste_service", "https://paste_service.com/{key}") async def test_url_and_sent_contents(self): @@ -17,10 +20,10 @@ class PasteTests(unittest.IsolatedAsyncioTestCase): response = MagicMock( json=AsyncMock(return_value={"key": ""}) ) - self.http_session.post().__aenter__.return_value = response - self.http_session.post.reset_mock() - await send_to_paste_service(self.http_session, "Content") - self.http_session.post.assert_called_once_with("https://paste_service.com/documents", data="Content") + self.bot.http_session.post.return_value.__aenter__.return_value = response + self.bot.http_session.post.reset_mock() + await send_to_paste_service("Content") + self.bot.http_session.post.assert_called_once_with("https://paste_service.com/documents", data="Content") @patch("bot.utils.services.URLs.paste_service", "https://paste_service.com/{key}") async def test_paste_returns_correct_url_on_success(self): @@ -34,41 +37,41 @@ class PasteTests(unittest.IsolatedAsyncioTestCase): response = MagicMock( json=AsyncMock(return_value={"key": key}) ) - self.http_session.post().__aenter__.return_value = response + self.bot.http_session.post.return_value.__aenter__.return_value = response for expected_output, extension in test_cases: with self.subTest(msg=f"Send contents with extension {repr(extension)}"): self.assertEqual( - await send_to_paste_service(self.http_session, "", extension=extension), + await send_to_paste_service("", extension=extension), expected_output ) async def test_request_repeated_on_json_errors(self): """Json with error message and invalid json are handled as errors and requests repeated.""" test_cases = ({"message": "error"}, {"unexpected_key": None}, {}) - self.http_session.post().__aenter__.return_value = response = MagicMock() - self.http_session.post.reset_mock() + self.bot.http_session.post.return_value.__aenter__.return_value = response = MagicMock() + self.bot.http_session.post.reset_mock() for error_json in test_cases: with self.subTest(error_json=error_json): response.json = AsyncMock(return_value=error_json) - result = await send_to_paste_service(self.http_session, "") - self.assertEqual(self.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) + result = await send_to_paste_service("") + self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) self.assertIsNone(result) - self.http_session.post.reset_mock() + self.bot.http_session.post.reset_mock() async def test_request_repeated_on_connection_errors(self): """Requests are repeated in the case of connection errors.""" - self.http_session.post = MagicMock(side_effect=ClientConnectorError(Mock(), Mock())) - result = await send_to_paste_service(self.http_session, "") - self.assertEqual(self.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) + self.bot.http_session.post = MagicMock(side_effect=ClientConnectorError(Mock(), Mock())) + result = await send_to_paste_service("") + self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) self.assertIsNone(result) async def test_general_error_handled_and_request_repeated(self): """All `Exception`s are handled, logged and request repeated.""" - self.http_session.post = MagicMock(side_effect=Exception) - result = await send_to_paste_service(self.http_session, "") - self.assertEqual(self.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) + self.bot.http_session.post = MagicMock(side_effect=Exception) + result = await send_to_paste_service("") + self.assertEqual(self.bot.http_session.post.call_count, FAILED_REQUEST_ATTEMPTS) self.assertLogs("bot.utils", logging.ERROR) self.assertIsNone(result) |