aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.coveragerc1
-rw-r--r--.github/dependabot.yml6
-rw-r--r--.github/workflows/deploy.yaml1
-rw-r--r--.github/workflows/lint-test.yaml75
-rw-r--r--.gitignore1
-rw-r--r--.pre-commit-config.yaml1
-rw-r--r--Dockerfile17
-rw-r--r--docker-compose.yml2
-rwxr-xr-xmanage.py14
-rw-r--r--poetry.lock1062
-rw-r--r--pydis_site/apps/api/__init__.py1
-rw-r--r--pydis_site/apps/api/github_utils.py6
-rw-r--r--pydis_site/apps/api/migrations/0013_specialsnake_image.py3
-rw-r--r--pydis_site/apps/api/models/bot/message.py11
-rw-r--r--pydis_site/apps/api/models/bot/metricity.py28
-rw-r--r--pydis_site/apps/api/tests/test_filterlists.py4
-rw-r--r--pydis_site/apps/api/tests/test_github_utils.py9
-rw-r--r--pydis_site/apps/api/tests/test_infractions.py15
-rw-r--r--pydis_site/apps/api/tests/test_users.py84
-rw-r--r--pydis_site/apps/api/views.py43
-rw-r--r--pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/aoc_link.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/infraction.py19
-rw-r--r--pydis_site/apps/api/viewsets/bot/nomination.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/reminder.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/user.py59
-rw-r--r--pydis_site/apps/content/migrations/0001_add_tags.py35
-rw-r--r--pydis_site/apps/content/migrations/__init__.py0
-rw-r--r--pydis_site/apps/content/models/__init__.py3
-rw-r--r--pydis_site/apps/content/models/tag.py80
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing.md2
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md12
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md2
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md3
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md2
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md23
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md29
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/vps-services.md31
-rw-r--r--pydis_site/apps/content/resources/tags/_info.yml3
-rw-r--r--pydis_site/apps/content/tests/test_utils.py289
-rw-r--r--pydis_site/apps/content/tests/test_views.py222
-rw-r--r--pydis_site/apps/content/urls.py27
-rw-r--r--pydis_site/apps/content/utils.py282
-rw-r--r--pydis_site/apps/content/views/__init__.py3
-rw-r--r--pydis_site/apps/content/views/page_category.py14
-rw-r--r--pydis_site/apps/content/views/tags.py124
-rw-r--r--pydis_site/apps/home/tests/test_repodata_helpers.py6
-rw-r--r--pydis_site/apps/home/views/home.py8
-rw-r--r--pydis_site/apps/redirect/urls.py7
-rw-r--r--pydis_site/settings.py64
-rw-r--r--pydis_site/static/css/content/color.css7
-rw-r--r--pydis_site/static/css/content/tag.css13
-rw-r--r--pydis_site/static/images/content/fix-ssl-certificate/pem.pngbin0 -> 11619 bytes
-rw-r--r--pydis_site/static/images/content/regenerating_token.jpgbin0 -> 180570 bytes
-rw-r--r--pydis_site/static/js/content/listing.js41
-rw-r--r--pydis_site/templates/base/navbar.html3
-rw-r--r--pydis_site/templates/content/base.html6
-rw-r--r--pydis_site/templates/content/listing.html27
-rw-r--r--pydis_site/templates/content/page.html8
-rw-r--r--pydis_site/templates/content/tag.html40
-rw-r--r--pydis_site/templates/events/pages/code-jams/9/_index.html4
-rw-r--r--pydis_site/urls.py4
-rw-r--r--pyproject.toml67
-rw-r--r--static-builds/netlify_build.py11
64 files changed, 2250 insertions, 722 deletions
diff --git a/.coveragerc b/.coveragerc
index b4a9bbe4..039654db 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,7 +2,6 @@
branch = true
source =
pydis_site
- pydis_site/apps/admin
pydis_site/apps/api
pydis_site/apps/home
pydis_site/apps/staff
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000..b38df29f
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+ - package-ecosystem: "pip"
+ directory: "/"
+ schedule:
+ interval: "daily"
diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml
index 0e315327..57712dc7 100644
--- a/.github/workflows/deploy.yaml
+++ b/.github/workflows/deploy.yaml
@@ -29,7 +29,6 @@ jobs:
uses: actions/checkout@v2
with:
repository: python-discord/kubernetes
- token: ${{ secrets.REPO_TOKEN }}
- name: Authenticate with Kubernetes
uses: azure/k8s-set-context@v1
diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml
index a167ce32..f82e1d4f 100644
--- a/.github/workflows/lint-test.yaml
+++ b/.github/workflows/lint-test.yaml
@@ -10,76 +10,26 @@ on:
jobs:
lint-test:
runs-on: ubuntu-latest
- env:
- # Configure pip to cache dependencies and do a user install
- PIP_NO_CACHE_DIR: false
- PIP_USER: 1
-
- # Make sure package manager does not use virtualenv
- POETRY_VIRTUALENVS_CREATE: false
-
- # Specify explicit paths for python dependencies and the pre-commit
- # environment so we know which directories to cache
- POETRY_CACHE_DIR: ${{ github.workspace }}/.cache/py-user-base
- PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base
- PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache
steps:
- - name: Add custom PYTHONUSERBASE to PATH
- run: echo '${{ env.PYTHONUSERBASE }}/bin/' >> $GITHUB_PATH
-
- name: Checkout repository
uses: actions/checkout@v2
- - name: Setup python
- id: python
- uses: actions/setup-python@v2
+ - name: Install Python Dependencies
+ uses: HassanAbouelela/actions/setup-python@setup-python_v1.3.1
with:
- python-version: '3.9'
+ dev: true
+ python_version: '3.10'
# Start the database early to give it a chance to get ready before
# we start running tests.
- name: Run database using docker-compose
run: docker-compose run -d -p 7777:5432 --name pydis_web postgres
- # This step caches our Python dependencies. To make sure we
- # only restore a cache when the dependencies, the python version,
- # the runner operating system, and the dependency location haven't
- # changed, we create a cache key that is a composite of those states.
- #
- # Only when the context is exactly the same, we will restore the cache.
- - name: Python Dependency Caching
- uses: actions/cache@v2
- id: python_cache
- with:
- path: ${{ env.PYTHONUSERBASE }}
- key: "python-0-${{ runner.os }}-${{ env.PYTHONUSERBASE }}-\
- ${{ steps.python.outputs.python-version }}-\
- ${{ hashFiles('./pyproject.toml', './poetry.lock') }}"
-
- # Install our dependencies if we did not restore a dependency cache
- - name: Install dependencies using poetry
- if: steps.python_cache.outputs.cache-hit != 'true'
- run: |
- pip install poetry
- poetry install
-
- # This step caches our pre-commit environment. To make sure we
- # do create a new environment when our pre-commit setup changes,
- # we create a cache key based on relevant factors.
- - name: Pre-commit Environment Caching
- uses: actions/cache@v2
- with:
- path: ${{ env.PRE_COMMIT_HOME }}
- key: "precommit-0-${{ runner.os }}-${{ env.PRE_COMMIT_HOME }}-\
- ${{ steps.python.outputs.python-version }}-\
- ${{ hashFiles('./.pre-commit-config.yaml') }}"
-
# We will not run `flake8` here, as we will use a separate flake8
- # action. As pre-commit does not support user installs, we set
- # PIP_USER=0 to not do a user install.
+ # action.
- name: Run pre-commit hooks
- run: export PIP_USER=0; SKIP=flake8 pre-commit run --all-files
+ run: SKIP=flake8 pre-commit run --all-files
# Run flake8 and have it format the linting errors in the format of
# the GitHub Workflow command to register error annotations. This
@@ -99,17 +49,20 @@ jobs:
python manage.py makemigrations --check
coverage run manage.py test --no-input
coverage report -m
+ coverage lcov
env:
CI: True
DATABASE_URL: postgres://pysite:pysite@localhost:7777/pysite
METRICITY_DB_URL: postgres://pysite:pysite@localhost:7777/metricity
+ PYTHONWARNINGS: error
# This step will publish the coverage reports coveralls.io and
- # print a "job" link in the output of the GitHub Action
- - name: Publish coverage report to coveralls.io
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: coveralls
+ # link the report to the commit
+ - name: Publish Coverage Report
+ uses: coverallsapp/[email protected]
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: ./coverage.lcov
- name: Tear down docker-compose containers
run: docker-compose stop
diff --git a/.gitignore b/.gitignore
index 4fc4417d..911acdcf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,6 +36,7 @@ pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
+*.lcov
htmlcov/
.tox/
.coverage
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 25781752..b2a03559 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -22,4 +22,3 @@ repos:
entry: poetry run flake8
language: system
types: [python]
- require_serial: true
diff --git a/Dockerfile b/Dockerfile
index 2b039fab..454e58d5 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,21 +1,14 @@
-FROM --platform=linux/amd64 python:3.9-slim-buster
+FROM ghcr.io/chrislovering/python-poetry-base:3.10-slim
# Allow service to handle stops gracefully
STOPSIGNAL SIGQUIT
-# Set pip to have cleaner logs and no saved cache
-ENV PIP_NO_CACHE_DIR=false \
- POETRY_VIRTUALENVS_CREATE=false
-
-# Install poetry
-RUN pip install -U poetry
-
# Copy the project files into working directory
WORKDIR /app
# Install project dependencies
COPY pyproject.toml poetry.lock ./
-RUN poetry install --no-dev
+RUN poetry install --without dev
# Set Git SHA environment variable
ARG git_sha="development"
@@ -34,14 +27,14 @@ RUN \
SECRET_KEY=dummy_value \
DATABASE_URL=postgres://localhost \
METRICITY_DB_URL=postgres://localhost \
- python manage.py collectstatic --noinput --clear
+ poetry run python manage.py collectstatic --noinput --clear
# Build static files if we are doing a static build
ARG STATIC_BUILD=false
RUN if [ $STATIC_BUILD = "TRUE" ] ; \
- then SECRET_KEY=dummy_value python manage.py distill-local build --traceback --force ; \
+ then SECRET_KEY=dummy_value poetry run python manage.py distill-local build --traceback --force ; \
fi
# Run web server through custom manager
-ENTRYPOINT ["python", "manage.py"]
+ENTRYPOINT ["poetry", "run", "python", "manage.py"]
CMD ["run"]
diff --git a/docker-compose.yml b/docker-compose.yml
index a6f4fd18..61554ae4 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -11,7 +11,7 @@
version: "3.8"
services:
postgres:
- image: postgres:13-alpine
+ image: postgres:15-alpine
ports:
- "7777:5432"
environment:
diff --git a/manage.py b/manage.py
index 697960c6..afca6121 100755
--- a/manage.py
+++ b/manage.py
@@ -7,6 +7,7 @@ from pathlib import Path
import django
from django.contrib.auth import get_user_model
from django.core.management import call_command, execute_from_command_line
+from django.test.utils import ignore_warnings
DEFAULT_ENVS = {
"DJANGO_SETTINGS_MODULE": "pydis_site.settings",
@@ -154,7 +155,16 @@ class SiteManager:
def run_tests(self) -> None:
"""Prepare and run the test suite."""
self.prepare_environment()
- call_command(*sys.argv[1:])
+ # The whitenoise package expects a staticfiles directory to exist during startup,
+ # else it raises a warning. This is fine under normal application, but during
+ # tests, staticfiles are not, and do not need to be generated.
+ # The following line suppresses the warning.
+ # Reference: https://github.com/evansd/whitenoise/issues/215
+ with ignore_warnings(
+ message=r"No directory at: .*staticfiles",
+ module="whitenoise.base",
+ ):
+ call_command(*sys.argv[1:])
def clean_up_static_files(build_folder: Path) -> None:
@@ -185,7 +195,7 @@ def main() -> None:
# Pass any others directly to standard management commands
else:
- _static_build = "distill" in sys.argv[1]
+ _static_build = len(sys.argv) > 1 and "distill" in sys.argv[1]
if _static_build:
# Build a static version of the site with no databases and API support
diff --git a/poetry.lock b/poetry.lock
index 1bee4397..e9824ca9 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,6 +1,6 @@
[[package]]
name = "anyio"
-version = "3.6.1"
+version = "3.6.2"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
category = "main"
optional = false
@@ -11,9 +11,9 @@ idna = ">=2.8"
sniffio = ">=1.1"
[package.extras]
-doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
-test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"]
-trio = ["trio (>=0.16)"]
+doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
+test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"]
+trio = ["trio (>=0.16,<0.22)"]
[[package]]
name = "asgiref"
@@ -24,21 +24,21 @@ optional = false
python-versions = ">=3.7"
[package.extras]
-tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"]
+tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "attrs"
-version = "21.4.0"
+version = "22.1.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.5"
[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
-docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
+dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
+docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
+tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
+tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]]
name = "bandit"
@@ -55,13 +55,13 @@ PyYAML = ">=5.3.1"
stevedore = ">=1.20.0"
[package.extras]
-test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"]
+test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"]
toml = ["toml"]
-yaml = ["pyyaml"]
+yaml = ["PyYAML"]
[[package]]
name = "certifi"
-version = "2022.6.15"
+version = "2022.9.24"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
@@ -88,53 +88,37 @@ python-versions = ">=3.6.1"
[[package]]
name = "charset-normalizer"
-version = "2.1.0"
+version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.6.0"
[package.extras]
-unicode_backport = ["unicodedata2"]
+unicode-backport = ["unicodedata2"]
[[package]]
name = "colorama"
-version = "0.4.5"
+version = "0.4.6"
description = "Cross-platform colored terminal text."
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
[[package]]
name = "coverage"
-version = "5.5"
+version = "6.5.0"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
-
-[package.extras]
-toml = ["toml"]
-
-[[package]]
-name = "coveralls"
-version = "2.2.0"
-description = "Show coverage stats online via coveralls.io"
-category = "dev"
-optional = false
-python-versions = ">= 3.5"
-
-[package.dependencies]
-coverage = ">=4.1,<6.0"
-docopt = ">=0.6.1"
-requests = ">=1.0.0"
+python-versions = ">=3.7"
[package.extras]
-yaml = ["PyYAML (>=3.10)"]
+toml = ["tomli"]
[[package]]
name = "cryptography"
-version = "37.0.4"
+version = "38.0.3"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
category = "main"
optional = false
@@ -145,15 +129,15 @@ cffi = ">=1.12"
[package.extras]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
-docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
+docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
-sdist = ["setuptools_rust (>=0.11.4)"]
+sdist = ["setuptools-rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
-test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
+test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
[[package]]
name = "distlib"
-version = "0.3.4"
+version = "0.3.6"
description = "Distribution utilities"
category = "dev"
optional = false
@@ -161,14 +145,14 @@ python-versions = "*"
[[package]]
name = "django"
-version = "4.0.6"
+version = "4.1.3"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
category = "main"
optional = false
python-versions = ">=3.8"
[package.dependencies]
-asgiref = ">=3.4.1,<4"
+asgiref = ">=3.5.2,<4"
sqlparse = ">=0.2.2"
tzdata = {version = "*", markers = "sys_platform == \"win32\""}
@@ -178,7 +162,7 @@ bcrypt = ["bcrypt"]
[[package]]
name = "django-distill"
-version = "2.9.2"
+version = "3.0.1"
description = "Static site renderer and publisher for Django."
category = "main"
optional = false
@@ -190,22 +174,27 @@ requests = "*"
[[package]]
name = "django-environ"
-version = "0.4.5"
-description = "Django-environ allows you to utilize 12factor inspired environment variables to configure your Django application."
+version = "0.9.0"
+description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application."
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.4,<4"
+
+[package.extras]
+develop = ["coverage[toml] (>=5.0a4)", "furo (>=2021.8.17b43,<2021.9.0)", "pytest (>=4.6.11)", "sphinx (>=3.5.0)", "sphinx-notfound-page"]
+docs = ["furo (>=2021.8.17b43,<2021.9.0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"]
+testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)"]
[[package]]
name = "django-filter"
-version = "21.1"
+version = "22.1"
description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically."
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
-Django = ">=2.2"
+Django = ">=3.2"
[[package]]
name = "django-prometheus"
@@ -231,68 +220,60 @@ Django = ">=2.0"
libsass = ">=0.19,<1.0"
[package.extras]
-dev = ["flake8 (>=3.8,<4.0)", "flake8-annotations (>=2.0,<3.0)", "flake8-bugbear (>=20.1,<21.0)", "flake8-docstrings (>=1.4,<2.0)", "flake8-import-order (>=0.18,<1.0)", "flake8-tidy-imports (>=4.0,<5.0)", "flake8-todo (>=0.7,<1.0)", "flake8-string-format (>=0.3,<1.0)", "pdoc (>=0.3,<1.0)", "pep8-naming (>=0.9,<1.0)", "pre-commit (>=2.1,<3.0)", "PyGithub (>=1.43,<2.0)", "wheel (>=0.33,<1.0)"]
+dev = ["PyGithub (>=1.43,<2.0)", "flake8 (>=3.8,<4.0)", "flake8-annotations (>=2.0,<3.0)", "flake8-bugbear (>=20.1,<21.0)", "flake8-docstrings (>=1.4,<2.0)", "flake8-import-order (>=0.18,<1.0)", "flake8-string-format (>=0.3,<1.0)", "flake8-tidy-imports (>=4.0,<5.0)", "flake8-todo (>=0.7,<1.0)", "pdoc (>=0.3,<1.0)", "pep8-naming (>=0.9,<1.0)", "pre-commit (>=2.1,<3.0)", "wheel (>=0.33,<1.0)"]
[[package]]
name = "djangorestframework"
-version = "3.13.1"
+version = "3.14.0"
description = "Web APIs for Django, made easy."
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
-django = ">=2.2"
+django = ">=3.0"
pytz = "*"
[[package]]
-name = "docopt"
-version = "0.6.2"
-description = "Pythonic argument parser, that will make you smile"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
name = "filelock"
-version = "3.7.1"
+version = "3.8.0"
description = "A platform independent file lock."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
-testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
+docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"]
+testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"]
[[package]]
name = "flake8"
-version = "3.9.2"
+version = "5.0.4"
description = "the modular source code checker: pep8 pyflakes and co"
category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.6.1"
[package.dependencies]
-mccabe = ">=0.6.0,<0.7.0"
-pycodestyle = ">=2.7.0,<2.8.0"
-pyflakes = ">=2.3.0,<2.4.0"
+mccabe = ">=0.7.0,<0.8.0"
+pycodestyle = ">=2.9.0,<2.10.0"
+pyflakes = ">=2.5.0,<2.6.0"
[[package]]
name = "flake8-annotations"
-version = "2.9.0"
+version = "2.9.1"
description = "Flake8 Type Annotation Checks"
category = "dev"
optional = false
python-versions = ">=3.7,<4.0"
[package.dependencies]
-attrs = ">=21.4,<22.0"
+attrs = ">=21.4"
flake8 = ">=3.7"
[[package]]
name = "flake8-bandit"
-version = "3.0.0"
+version = "4.1.1"
description = "Automated security testing with bandit and flake8."
category = "dev"
optional = false
@@ -300,24 +281,22 @@ python-versions = ">=3.6"
[package.dependencies]
bandit = ">=1.7.3"
-flake8 = "*"
-flake8-polyfill = "*"
-pycodestyle = "*"
+flake8 = ">=5.0.0"
[[package]]
name = "flake8-bugbear"
-version = "20.11.1"
+version = "22.10.27"
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
attrs = ">=19.2.0"
flake8 = ">=3.0.0"
[package.extras]
-dev = ["coverage", "black", "hypothesis", "hypothesmith"]
+dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"]
[[package]]
name = "flake8-docstrings"
@@ -341,17 +320,7 @@ python-versions = "*"
[package.dependencies]
pycodestyle = "*"
-
-[[package]]
-name = "flake8-polyfill"
-version = "1.0.2"
-description = "Polyfill package for Flake8 plugins"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-flake8 = "*"
+setuptools = "*"
[[package]]
name = "flake8-string-format"
@@ -398,8 +367,8 @@ python-versions = ">=3.6"
smmap = ">=3.0.1,<6"
[[package]]
-name = "gitpython"
-version = "3.1.27"
+name = "GitPython"
+version = "3.1.29"
description = "GitPython is a python library used to interact with Git repositories"
category = "dev"
optional = false
@@ -410,15 +379,18 @@ gitdb = ">=4.0.1,<5"
[[package]]
name = "gunicorn"
-version = "20.0.4"
+version = "20.1.0"
description = "WSGI HTTP Server for UNIX"
category = "main"
optional = false
-python-versions = ">=3.4"
+python-versions = ">=3.5"
+
+[package.dependencies]
+setuptools = ">=3.0"
[package.extras]
-eventlet = ["eventlet (>=0.9.7)"]
-gevent = ["gevent (>=0.13)"]
+eventlet = ["eventlet (>=0.24.1)"]
+gevent = ["gevent (>=1.4.0)"]
setproctitle = ["setproctitle"]
tornado = ["tornado (>=0.2)"]
@@ -450,7 +422,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "httpx"
-version = "0.23.0"
+version = "0.23.1"
description = "The next generation HTTP client."
category = "main"
optional = false
@@ -458,19 +430,19 @@ python-versions = ">=3.7"
[package.dependencies]
certifi = "*"
-httpcore = ">=0.15.0,<0.16.0"
+httpcore = ">=0.15.0,<0.17.0"
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
sniffio = "*"
[package.extras]
-brotli = ["brotlicffi", "brotli"]
-cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "identify"
-version = "2.5.1"
+version = "2.5.8"
description = "File identification library for Python"
category = "dev"
optional = false
@@ -481,29 +453,13 @@ license = ["ukkonen"]
[[package]]
name = "idna"
-version = "3.3"
+version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
-name = "importlib-metadata"
-version = "4.12.0"
-description = "Read metadata from Python packages"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-zipp = ">=0.5"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
-perf = ["ipython"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
-
-[[package]]
name = "libsass"
version = "0.21.0"
description = "Sass for Python: A straightforward binding of libsass for Python."
@@ -515,26 +471,23 @@ python-versions = "*"
six = "*"
[[package]]
-name = "markdown"
-version = "3.3.7"
+name = "Markdown"
+version = "3.4.1"
description = "Python implementation of Markdown."
category = "main"
optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
+python-versions = ">=3.7"
[package.extras]
testing = ["coverage", "pyyaml"]
[[package]]
name = "mccabe"
-version = "0.6.1"
+version = "0.7.0"
description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
[[package]]
name = "mslex"
@@ -552,9 +505,12 @@ category = "dev"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+[package.dependencies]
+setuptools = "*"
+
[[package]]
name = "pbr"
-version = "5.9.0"
+version = "5.11.0"
description = "Python Build Reasonableness"
category = "dev"
optional = false
@@ -562,11 +518,11 @@ python-versions = ">=2.6"
[[package]]
name = "pep8-naming"
-version = "0.13.0"
+version = "0.13.2"
description = "Check PEP-8 naming conventions, plugin for flake8"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
[package.dependencies]
flake8 = ">=3.9.1"
@@ -580,8 +536,8 @@ optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
-test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"]
+test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
[[package]]
name = "pre-commit"
@@ -601,7 +557,7 @@ virtualenv = ">=20.0.8"
[[package]]
name = "prometheus-client"
-version = "0.14.1"
+version = "0.15.0"
description = "Python client for the Prometheus monitoring system."
category = "main"
optional = false
@@ -612,30 +568,30 @@ twisted = ["twisted"]
[[package]]
name = "psutil"
-version = "5.9.1"
+version = "5.9.3"
description = "Cross-platform lib for process and system monitoring in Python."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.extras]
-test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"]
+test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
[[package]]
name = "psycopg2-binary"
-version = "2.8.6"
+version = "2.9.5"
description = "psycopg2 - Python-PostgreSQL Database Adapter"
category = "main"
optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
+python-versions = ">=3.6"
[[package]]
name = "pycodestyle"
-version = "2.7.0"
+version = "2.9.1"
description = "Python style guide checker"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.6"
[[package]]
name = "pycparser"
@@ -661,44 +617,55 @@ toml = ["toml"]
[[package]]
name = "pyfakefs"
-version = "4.5.6"
+version = "5.0.0"
description = "pyfakefs implements a fake file system that mocks the Python file system modules."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[[package]]
name = "pyflakes"
-version = "2.3.1"
+version = "2.5.0"
description = "passive checker of Python programs"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.6"
[[package]]
-name = "pyjwt"
-version = "2.4.0"
+name = "PyJWT"
+version = "2.6.0"
description = "JSON Web Token implementation in Python"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
-cryptography = {version = ">=3.3.1", optional = true, markers = "extra == \"crypto\""}
+cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""}
[package.extras]
-crypto = ["cryptography (>=3.3.1)"]
-dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"]
-docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
-tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"]
+crypto = ["cryptography (>=3.4.0)"]
+dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
+docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
+tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
+
+[[package]]
+name = "pymdown-extensions"
+version = "9.8"
+description = "Extension pack for Python Markdown."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+markdown = ">=3.2"
[[package]]
name = "python-dotenv"
-version = "0.17.1"
+version = "0.21.0"
description = "Read key-value pairs from a .env file and set them as environment variables"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
[package.extras]
cli = ["click (>=5.0)"]
@@ -716,23 +683,23 @@ PyYAML = "*"
[package.extras]
docs = ["sphinx"]
-test = ["pytest", "toml", "pyaml"]
+test = ["pyaml", "pytest", "toml"]
[[package]]
name = "pytz"
-version = "2022.1"
+version = "2022.5"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
python-versions = "*"
[[package]]
-name = "pyyaml"
-version = "5.4.1"
+name = "PyYAML"
+version = "6.0"
description = "YAML parser and emitter for Python"
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+python-versions = ">=3.6"
[[package]]
name = "requests"
@@ -750,7 +717,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
-use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rfc3986"
@@ -768,7 +735,7 @@ idna2008 = ["idna"]
[[package]]
name = "sentry-sdk"
-version = "0.20.3"
+version = "1.11.0"
description = "Python client for Sentry (https://sentry.io)"
category = "main"
optional = false
@@ -776,7 +743,7 @@ python-versions = "*"
[package.dependencies]
certifi = "*"
-urllib3 = ">=1.10.0"
+urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""}
[package.extras]
aiohttp = ["aiohttp (>=3.5)"]
@@ -786,15 +753,33 @@ celery = ["celery (>=3)"]
chalice = ["chalice (>=1.16.0)"]
django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
-flask = ["flask (>=0.11)", "blinker (>=1.1)"]
-pure_eval = ["pure-eval", "executing", "asttokens"]
+fastapi = ["fastapi (>=0.79.0)"]
+flask = ["blinker (>=1.1)", "flask (>=0.11)"]
+httpx = ["httpx (>=0.16.0)"]
+pure-eval = ["asttokens", "executing", "pure-eval"]
+pymongo = ["pymongo (>=3.1)"]
pyspark = ["pyspark (>=2.4.4)"]
+quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"]
sanic = ["sanic (>=0.8)"]
sqlalchemy = ["sqlalchemy (>=1.2)"]
+starlette = ["starlette (>=0.19.1)"]
tornado = ["tornado (>=5)"]
[[package]]
+name = "setuptools"
+version = "65.5.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
@@ -812,11 +797,11 @@ python-versions = ">=3.6"
[[package]]
name = "sniffio"
-version = "1.2.0"
+version = "1.3.0"
description = "Sniff out which async library your code is running under"
category = "main"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.7"
[[package]]
name = "snowballstemmer"
@@ -828,7 +813,7 @@ python-versions = "*"
[[package]]
name = "sqlparse"
-version = "0.4.2"
+version = "0.4.3"
description = "A non-validating SQL parser."
category = "main"
optional = false
@@ -836,27 +821,28 @@ python-versions = ">=3.5"
[[package]]
name = "stevedore"
-version = "3.5.0"
+version = "4.1.0"
description = "Manage dynamic plugins for Python applications"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
[package.dependencies]
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
[[package]]
name = "taskipy"
-version = "1.7.0"
+version = "1.10.3"
description = "tasks runner for python projects"
category = "dev"
optional = false
python-versions = ">=3.6,<4.0"
[package.dependencies]
-mslex = ">=0.3.0,<0.4.0"
+colorama = ">=0.4.4,<0.5.0"
+mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""}
psutil = ">=5.7.2,<6.0.0"
-toml = ">=0.10.0,<0.11.0"
+tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""}
[[package]]
name = "toml"
@@ -867,8 +853,16 @@ optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
name = "tzdata"
-version = "2022.1"
+version = "2022.5"
description = "Provider of IANA time zone data"
category = "main"
optional = false
@@ -876,188 +870,279 @@ python-versions = ">=2"
[[package]]
name = "urllib3"
-version = "1.26.10"
+version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
-brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
-secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
-version = "20.15.1"
+version = "20.16.6"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.6"
[package.dependencies]
-distlib = ">=0.3.1,<1"
-filelock = ">=3.2,<4"
-platformdirs = ">=2,<3"
-six = ">=1.9.0,<2"
+distlib = ">=0.3.6,<1"
+filelock = ">=3.4.1,<4"
+platformdirs = ">=2.4,<3"
[package.extras]
-docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
-testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
+docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"]
+testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
[[package]]
name = "whitenoise"
-version = "5.3.0"
+version = "6.2.0"
description = "Radically simplified static file serving for WSGI applications"
category = "main"
optional = false
-python-versions = ">=3.5, <4"
-
-[package.extras]
-brotli = ["brotli"]
-
-[[package]]
-name = "zipp"
-version = "3.8.0"
-description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "main"
-optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
+brotli = ["Brotli"]
[metadata]
lock-version = "1.1"
-python-versions = "3.9.*"
-content-hash = "c656c07f40d32ee7d30c19a7084b40e1e851209a362a3fe882aa03c2fd286454"
+python-versions = "3.10.*"
+content-hash = "ac9503ea4d6f2a3d81a1b2add7134953caab182786a1de82e1b1c54f8b2de795"
[metadata.files]
anyio = [
- {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"},
- {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"},
+ {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"},
+ {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"},
]
asgiref = [
{file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"},
{file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"},
]
attrs = [
- {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
- {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
+ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
+ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
+]
+bandit = [
+ {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"},
+ {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"},
]
-bandit = []
certifi = [
- {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
- {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
+ {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
+ {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
+]
+cffi = [
+ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
+ {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
+ {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
+ {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
+ {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
+ {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
+ {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
+ {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
+ {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
+ {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
+ {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
+ {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
+ {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
+ {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
+ {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
+ {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
+ {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
+ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
+ {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
]
-cffi = []
cfgv = [
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
{file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
]
-charset-normalizer = []
+charset-normalizer = [
+ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
+ {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+]
colorama = [
- {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
- {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
coverage = [
- {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"},
- {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"},
- {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"},
- {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"},
- {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"},
- {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"},
- {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"},
- {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"},
- {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"},
- {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"},
- {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"},
- {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"},
- {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"},
- {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"},
- {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"},
- {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"},
- {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"},
- {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"},
- {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"},
- {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"},
- {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"},
- {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"},
- {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"},
- {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"},
- {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"},
- {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"},
- {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"},
- {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"},
- {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"},
- {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"},
- {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"},
- {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"},
- {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"},
- {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"},
- {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"},
- {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"},
- {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"},
- {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"},
- {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"},
- {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"},
- {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"},
- {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"},
- {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"},
- {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"},
- {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"},
- {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"},
- {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"},
- {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"},
- {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
-]
-coveralls = [
- {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"},
- {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"},
-]
-cryptography = []
+ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
+ {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
+ {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
+ {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
+ {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
+ {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
+ {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
+ {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
+ {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
+ {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
+ {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
+ {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
+ {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
+ {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
+ {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
+ {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
+ {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
+ {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
+ {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
+ {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"},
+ {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"},
+ {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"},
+ {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"},
+ {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"},
+ {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"},
+ {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"},
+ {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"},
+ {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"},
+ {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"},
+ {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"},
+ {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"},
+ {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"},
+ {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"},
+ {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"},
+ {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"},
+ {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"},
+ {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"},
+ {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"},
+ {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"},
+ {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"},
+ {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"},
+ {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"},
+ {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"},
+ {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"},
+ {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"},
+ {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"},
+ {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"},
+ {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"},
+ {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
+ {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
+]
+cryptography = [
+ {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"},
+ {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"},
+ {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"},
+ {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"},
+ {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"},
+ {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"},
+ {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"},
+ {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"},
+ {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"},
+ {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"},
+ {file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"},
+ {file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"},
+ {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"},
+ {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"},
+ {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"},
+ {file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"},
+ {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"},
+ {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"},
+ {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"},
+ {file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"},
+ {file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"},
+ {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"},
+ {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"},
+ {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"},
+ {file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"},
+ {file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"},
+]
distlib = [
- {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
- {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
+ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
+ {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
+]
+django = [
+ {file = "Django-4.1.3-py3-none-any.whl", hash = "sha256:6b1de6886cae14c7c44d188f580f8ba8da05750f544c80ae5ad43375ab293cd5"},
+ {file = "Django-4.1.3.tar.gz", hash = "sha256:678bbfc8604eb246ed54e2063f0765f13b321a50526bdc8cb1f943eda7fa31f1"},
]
-django = []
django-distill = [
- {file = "django-distill-2.9.2.tar.gz", hash = "sha256:91d5f45c2ff78b8efd4805ff5ec17df4ba815bbf51ca12a2cea65727d2f1d42e"},
+ {file = "django-distill-3.0.1.tar.gz", hash = "sha256:8bbac5e45d2afc61cc718d587c6026267c985305f5e599465f2ebc4b0cba9ebf"},
]
django-environ = [
- {file = "django-environ-0.4.5.tar.gz", hash = "sha256:6c9d87660142608f63ec7d5ce5564c49b603ea8ff25da595fd6098f6dc82afde"},
- {file = "django_environ-0.4.5-py2.py3-none-any.whl", hash = "sha256:c57b3c11ec1f319d9474e3e5a79134f40174b17c7cc024bbb2fad84646b120c4"},
+ {file = "django-environ-0.9.0.tar.gz", hash = "sha256:bff5381533056328c9ac02f71790bd5bf1cea81b1beeb648f28b81c9e83e0a21"},
+ {file = "django_environ-0.9.0-py2.py3-none-any.whl", hash = "sha256:f21a5ef8cc603da1870bbf9a09b7e5577ab5f6da451b843dbcc721a7bca6b3d9"},
]
django-filter = [
- {file = "django-filter-21.1.tar.gz", hash = "sha256:632a251fa8f1aadb4b8cceff932bb52fe2f826dd7dfe7f3eac40e5c463d6836e"},
- {file = "django_filter-21.1-py3-none-any.whl", hash = "sha256:f4a6737a30104c98d2e2a5fb93043f36dd7978e0c7ddc92f5998e85433ea5063"},
+ {file = "django-filter-22.1.tar.gz", hash = "sha256:ed473b76e84f7e83b2511bb2050c3efb36d135207d0128dfe3ae4b36e3594ba5"},
+ {file = "django_filter-22.1-py3-none-any.whl", hash = "sha256:ed429e34760127e3520a67f415bec4c905d4649fbe45d0d6da37e6ff5e0287eb"},
]
django-prometheus = [
{file = "django-prometheus-2.2.0.tar.gz", hash = "sha256:240378a1307c408bd5fc85614a3a57f1ce633d4a222c9e291e2bbf325173b801"},
{file = "django_prometheus-2.2.0-py2.py3-none-any.whl", hash = "sha256:e6616770d8820b8834762764bf1b76ec08e1b98e72a6f359d488a2e15fe3537c"},
]
-django-simple-bulma = []
-djangorestframework = []
-docopt = [
- {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"},
+django-simple-bulma = [
+ {file = "django-simple-bulma-2.5.0.tar.gz", hash = "sha256:d4e9f6ea857954a9bdc7a4f16453834a578cd04da5c3a96b2a3241bfcfabead2"},
+ {file = "django_simple_bulma-2.5.0-py3-none-any.whl", hash = "sha256:c413b031494d80f674068a782440c6ec5f20a12501ee7464d6f781a5777fa89c"},
+]
+djangorestframework = [
+ {file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"},
+ {file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"},
+]
+filelock = [
+ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"},
+ {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"},
]
-filelock = []
flake8 = [
- {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
- {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
+ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"},
+ {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"},
]
flake8-annotations = [
- {file = "flake8-annotations-2.9.0.tar.gz", hash = "sha256:63fb3f538970b6a8dfd84125cf5af16f7b22e52d5032acb3b7eb23645ecbda9b"},
- {file = "flake8_annotations-2.9.0-py3-none-any.whl", hash = "sha256:84f46de2964cb18fccea968d9eafce7cf857e34d913d515120795b9af6498d56"},
+ {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"},
+ {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"},
+]
+flake8-bandit = [
+ {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"},
+ {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"},
]
-flake8-bandit = []
flake8-bugbear = [
- {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"},
- {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"},
+ {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"},
+ {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"},
]
flake8-docstrings = [
{file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
@@ -1067,10 +1152,6 @@ flake8-import-order = [
{file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"},
{file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"},
]
-flake8-polyfill = [
- {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
- {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
-]
flake8-string-format = [
{file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"},
{file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"},
@@ -1086,29 +1167,34 @@ gitdb = [
{file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
{file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
]
-gitpython = [
- {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"},
- {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"},
+GitPython = [
+ {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"},
+ {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"},
]
gunicorn = [
- {file = "gunicorn-20.0.4-py2.py3-none-any.whl", hash = "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"},
- {file = "gunicorn-20.0.4.tar.gz", hash = "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626"},
+ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
+ {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"},
]
h11 = [
{file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
{file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
]
-httpcore = []
-httpx = []
+httpcore = [
+ {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"},
+ {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"},
+]
+httpx = [
+ {file = "httpx-0.23.1-py3-none-any.whl", hash = "sha256:0b9b1f0ee18b9978d637b0776bfd7f54e2ca278e063e3586d8f01cda89e042a8"},
+ {file = "httpx-0.23.1.tar.gz", hash = "sha256:202ae15319be24efe9a8bd4ed4360e68fde7b38bcc2ce87088d416f026667d19"},
+]
identify = [
- {file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"},
- {file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"},
+ {file = "identify-2.5.8-py2.py3-none-any.whl", hash = "sha256:48b7925fe122720088aeb7a6c34f17b27e706b72c61070f27fe3789094233440"},
+ {file = "identify-2.5.8.tar.gz", hash = "sha256:7a214a10313b9489a0d61467db2856ae8d0b8306fc923e03a9effa53d8aedc58"},
]
idna = [
- {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
- {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
-importlib-metadata = []
libsass = [
{file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"},
{file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"},
@@ -1121,98 +1207,156 @@ libsass = [
{file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"},
{file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"},
]
-markdown = []
+Markdown = [
+ {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"},
+ {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"},
+]
mccabe = [
- {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
- {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
mslex = [
{file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"},
{file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"},
]
-nodeenv = []
-pbr = []
-pep8-naming = []
+nodeenv = [
+ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
+ {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
+]
+pbr = [
+ {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"},
+ {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"},
+]
+pep8-naming = [
+ {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"},
+ {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"},
+]
platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
-pre-commit = []
-prometheus-client = []
+pre-commit = [
+ {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
+ {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
+]
+prometheus-client = [
+ {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"},
+ {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"},
+]
psutil = [
- {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"},
- {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"},
- {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"},
- {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"},
- {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"},
- {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"},
- {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"},
- {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"},
- {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"},
- {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"},
- {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"},
- {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"},
- {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"},
- {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"},
- {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"},
- {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"},
- {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"},
- {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"},
- {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"},
- {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"},
- {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"},
- {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"},
- {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"},
- {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"},
- {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"},
- {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"},
- {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"},
- {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"},
- {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"},
- {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"},
- {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"},
- {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"},
+ {file = "psutil-5.9.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b4a247cd3feaae39bb6085fcebf35b3b8ecd9b022db796d89c8f05067ca28e71"},
+ {file = "psutil-5.9.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5fa88e3d5d0b480602553d362c4b33a63e0c40bfea7312a7bf78799e01e0810b"},
+ {file = "psutil-5.9.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:767ef4fa33acda16703725c0473a91e1832d296c37c63896c7153ba81698f1ab"},
+ {file = "psutil-5.9.3-cp27-cp27m-win32.whl", hash = "sha256:9a4af6ed1094f867834f5f07acd1250605a0874169a5fcadbcec864aec2496a6"},
+ {file = "psutil-5.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:fa5e32c7d9b60b2528108ade2929b115167fe98d59f89555574715054f50fa31"},
+ {file = "psutil-5.9.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:fe79b4ad4836e3da6c4650cb85a663b3a51aef22e1a829c384e18fae87e5e727"},
+ {file = "psutil-5.9.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:db8e62016add2235cc87fb7ea000ede9e4ca0aa1f221b40cef049d02d5d2593d"},
+ {file = "psutil-5.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:941a6c2c591da455d760121b44097781bc970be40e0e43081b9139da485ad5b7"},
+ {file = "psutil-5.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71b1206e7909792d16933a0d2c1c7f04ae196186c51ba8567abae1d041f06dcb"},
+ {file = "psutil-5.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f57d63a2b5beaf797b87024d018772439f9d3103a395627b77d17a8d72009543"},
+ {file = "psutil-5.9.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7507f6c7b0262d3e7b0eeda15045bf5881f4ada70473b87bc7b7c93b992a7d7"},
+ {file = "psutil-5.9.3-cp310-cp310-win32.whl", hash = "sha256:1b540599481c73408f6b392cdffef5b01e8ff7a2ac8caae0a91b8222e88e8f1e"},
+ {file = "psutil-5.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:547ebb02031fdada635452250ff39942db8310b5c4a8102dfe9384ee5791e650"},
+ {file = "psutil-5.9.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d8c3cc6bb76492133474e130a12351a325336c01c96a24aae731abf5a47fe088"},
+ {file = "psutil-5.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d880053c6461c9b89cd5d4808f3b8336665fa3acdefd6777662c5ed73a851a"},
+ {file = "psutil-5.9.3-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e8b50241dd3c2ed498507f87a6602825073c07f3b7e9560c58411c14fe1e1c9"},
+ {file = "psutil-5.9.3-cp36-cp36m-win32.whl", hash = "sha256:828c9dc9478b34ab96be75c81942d8df0c2bb49edbb481f597314d92b6441d89"},
+ {file = "psutil-5.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:ed15edb14f52925869250b1375f0ff58ca5c4fa8adefe4883cfb0737d32f5c02"},
+ {file = "psutil-5.9.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d266cd05bd4a95ca1c2b9b5aac50d249cf7c94a542f47e0b22928ddf8b80d1ef"},
+ {file = "psutil-5.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e4939ff75149b67aef77980409f156f0082fa36accc475d45c705bb00c6c16a"},
+ {file = "psutil-5.9.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68fa227c32240c52982cb931801c5707a7f96dd8927f9102d6c7771ea1ff5698"},
+ {file = "psutil-5.9.3-cp37-cp37m-win32.whl", hash = "sha256:beb57d8a1ca0ae0eb3d08ccaceb77e1a6d93606f0e1754f0d60a6ebd5c288837"},
+ {file = "psutil-5.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:12500d761ac091f2426567f19f95fd3f15a197d96befb44a5c1e3cbe6db5752c"},
+ {file = "psutil-5.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba38cf9984d5462b506e239cf4bc24e84ead4b1d71a3be35e66dad0d13ded7c1"},
+ {file = "psutil-5.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:46907fa62acaac364fff0b8a9da7b360265d217e4fdeaca0a2397a6883dffba2"},
+ {file = "psutil-5.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a04a1836894c8279e5e0a0127c0db8e198ca133d28be8a2a72b4db16f6cf99c1"},
+ {file = "psutil-5.9.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a4e07611997acf178ad13b842377e3d8e9d0a5bac43ece9bfc22a96735d9a4f"},
+ {file = "psutil-5.9.3-cp38-cp38-win32.whl", hash = "sha256:6ced1ad823ecfa7d3ce26fe8aa4996e2e53fb49b7fed8ad81c80958501ec0619"},
+ {file = "psutil-5.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35feafe232d1aaf35d51bd42790cbccb882456f9f18cdc411532902370d660df"},
+ {file = "psutil-5.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:538fcf6ae856b5e12d13d7da25ad67f02113c96f5989e6ad44422cb5994ca7fc"},
+ {file = "psutil-5.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3d81165b8474087bb90ec4f333a638ccfd1d69d34a9b4a1a7eaac06648f9fbe"},
+ {file = "psutil-5.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a7826e68b0cf4ce2c1ee385d64eab7d70e3133171376cac53d7c1790357ec8f"},
+ {file = "psutil-5.9.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ec296f565191f89c48f33d9544d8d82b0d2af7dd7d2d4e6319f27a818f8d1cc"},
+ {file = "psutil-5.9.3-cp39-cp39-win32.whl", hash = "sha256:9ec95df684583b5596c82bb380c53a603bb051cf019d5c849c47e117c5064395"},
+ {file = "psutil-5.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4bd4854f0c83aa84a5a40d3b5d0eb1f3c128f4146371e03baed4589fe4f3c931"},
+ {file = "psutil-5.9.3.tar.gz", hash = "sha256:7ccfcdfea4fc4b0a02ca2c31de7fcd186beb9cff8207800e14ab66f79c773af6"},
]
psycopg2-binary = [
- {file = "psycopg2-binary-2.8.6.tar.gz", hash = "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"},
- {file = "psycopg2_binary-2.8.6-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4"},
- {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db"},
- {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f5ab93a2cb2d8338b1674be43b442a7f544a0971da062a5da774ed40587f18f5"},
- {file = "psycopg2_binary-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:b4afc542c0ac0db720cf516dd20c0846f71c248d2b3d21013aa0d4ef9c71ca25"},
- {file = "psycopg2_binary-2.8.6-cp27-cp27m-win_amd64.whl", hash = "sha256:e74a55f6bad0e7d3968399deb50f61f4db1926acf4a6d83beaaa7df986f48b1c"},
- {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c"},
- {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ad20d2eb875aaa1ea6d0f2916949f5c08a19c74d05b16ce6ebf6d24f2c9f75d1"},
- {file = "psycopg2_binary-2.8.6-cp34-cp34m-win32.whl", hash = "sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2"},
- {file = "psycopg2_binary-2.8.6-cp34-cp34m-win_amd64.whl", hash = "sha256:b8a3715b3c4e604bcc94c90a825cd7f5635417453b253499664f784fc4da0152"},
- {file = "psycopg2_binary-2.8.6-cp35-cp35m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d1b4ab59e02d9008efe10ceabd0b31e79519da6fb67f7d8e8977118832d0f449"},
- {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ac0c682111fbf404525dfc0f18a8b5f11be52657d4f96e9fcb75daf4f3984859"},
- {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550"},
- {file = "psycopg2_binary-2.8.6-cp35-cp35m-win32.whl", hash = "sha256:aaa4213c862f0ef00022751161df35804127b78adf4a2755b9f991a507e425fd"},
- {file = "psycopg2_binary-2.8.6-cp35-cp35m-win_amd64.whl", hash = "sha256:c2507d796fca339c8fb03216364cca68d87e037c1f774977c8fc377627d01c71"},
- {file = "psycopg2_binary-2.8.6-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ee69dad2c7155756ad114c02db06002f4cded41132cc51378e57aad79cc8e4f4"},
- {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e82aba2188b9ba309fd8e271702bd0d0fc9148ae3150532bbb474f4590039ffb"},
- {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d5227b229005a696cc67676e24c214740efd90b148de5733419ac9aaba3773da"},
- {file = "psycopg2_binary-2.8.6-cp36-cp36m-win32.whl", hash = "sha256:a0eb43a07386c3f1f1ebb4dc7aafb13f67188eab896e7397aa1ee95a9c884eb2"},
- {file = "psycopg2_binary-2.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:e1f57aa70d3f7cc6947fd88636a481638263ba04a742b4a37dd25c373e41491a"},
- {file = "psycopg2_binary-2.8.6-cp37-cp37m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679"},
- {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ba28584e6bca48c59eecbf7efb1576ca214b47f05194646b081717fa628dfddf"},
- {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b"},
- {file = "psycopg2_binary-2.8.6-cp37-cp37m-win32.whl", hash = "sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67"},
- {file = "psycopg2_binary-2.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:cec7e622ebc545dbb4564e483dd20e4e404da17ae07e06f3e780b2dacd5cee66"},
- {file = "psycopg2_binary-2.8.6-cp38-cp38-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ba381aec3a5dc29634f20692349d73f2d21f17653bda1decf0b52b11d694541f"},
- {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77"},
- {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94"},
- {file = "psycopg2_binary-2.8.6-cp38-cp38-win32.whl", hash = "sha256:bd1be66dde2b82f80afb9459fc618216753f67109b859a361cf7def5c7968729"},
- {file = "psycopg2_binary-2.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77"},
- {file = "psycopg2_binary-2.8.6-cp39-cp39-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83"},
- {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52"},
- {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd"},
- {file = "psycopg2_binary-2.8.6-cp39-cp39-win32.whl", hash = "sha256:6422f2ff0919fd720195f64ffd8f924c1395d30f9a495f31e2392c2efafb5056"},
- {file = "psycopg2_binary-2.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6"},
+ {file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:0775d6252ccb22b15da3b5d7adbbf8cfe284916b14b6dc0ff503a23edb01ee85"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec46ed947801652c9643e0b1dc334cfb2781232e375ba97312c2fc256597632"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3520d7af1ebc838cc6084a3281145d5cd5bdd43fdef139e6db5af01b92596cb7"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cbc554ba47ecca8cd3396ddaca85e1ecfe3e48dd57dc5e415e59551affe568e"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:5d28ecdf191db558d0c07d0f16524ee9d67896edf2b7990eea800abeb23ebd61"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:b9c33d4aef08dfecbd1736ceab8b7b3c4358bf10a0121483e5cd60d3d308cc64"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05b3d479425e047c848b9782cd7aac9c6727ce23181eb9647baf64ffdfc3da41"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e491e6489a6cb1d079df8eaa15957c277fdedb102b6a68cfbf40c4994412fd0"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:9e32cedc389bcb76d9f24ea8a012b3cb8385ee362ea437e1d012ffaed106c17d"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46850a640df62ae940e34a163f72e26aca1f88e2da79148e1862faaac985c302"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-win32.whl", hash = "sha256:3d790f84201c3698d1bfb404c917f36e40531577a6dda02e45ba29b64d539867"},
+ {file = "psycopg2_binary-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:1764546ffeaed4f9428707be61d68972eb5ede81239b46a45843e0071104d0dd"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_10_9_universal2.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:426c2ae999135d64e6a18849a7d1ad0e1bd007277e4a8f4752eaa40a96b550ff"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cf1d44e710ca3a9ce952bda2855830fe9f9017ed6259e01fcd71ea6287565f5"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024030b13bdcbd53d8a93891a2cf07719715724fc9fee40243f3bd78b4264b8f"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcda1c84a1c533c528356da5490d464a139b6e84eb77cc0b432e38c5c6dd7882"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:2ef892cabdccefe577088a79580301f09f2a713eb239f4f9f62b2b29cafb0577"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_ppc64le.whl", hash = "sha256:af0516e1711995cb08dc19bbd05bec7dbdebf4185f68870595156718d237df3e"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e72c91bda9880f097c8aa3601a2c0de6c708763ba8128006151f496ca9065935"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:56b2957a145f816726b109ee3d4e6822c23f919a7d91af5a94593723ed667835"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:f95b8aca2703d6a30249f83f4fe6a9abf2e627aa892a5caaab2267d56be7ab69"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:70831e03bd53702c941da1a1ad36c17d825a24fbb26857b40913d58df82ec18b"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:dbc332beaf8492b5731229a881807cd7b91b50dbbbaf7fe2faf46942eda64a24"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:2d964eb24c8b021623df1c93c626671420c6efadbdb8655cb2bd5e0c6fa422ba"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:95076399ec3b27a8f7fa1cc9a83417b1c920d55cf7a97f718a94efbb96c7f503"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:3fc33295cfccad697a97a76dec3f1e94ad848b7b163c3228c1636977966b51e2"},
+ {file = "psycopg2_binary-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:02551647542f2bf89073d129c73c05a25c372fc0a49aa50e0de65c3c143d8bd0"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:63e318dbe52709ed10d516a356f22a635e07a2e34c68145484ed96a19b0c4c68"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7e518a0911c50f60313cb9e74a169a65b5d293770db4770ebf004245f24b5c5"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d38a4656e4e715d637abdf7296e98d6267df0cc0a8e9a016f8ba07e4aa3eeb"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:68d81a2fe184030aa0c5c11e518292e15d342a667184d91e30644c9d533e53e1"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:7ee3095d02d6f38bd7d9a5358fcc9ea78fcdb7176921528dd709cc63f40184f5"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:46512486be6fbceef51d7660dec017394ba3e170299d1dc30928cbedebbf103a"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b911dfb727e247340d36ae20c4b9259e4a64013ab9888ccb3cbba69b77fd9636"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:422e3d43b47ac20141bc84b3d342eead8d8099a62881a501e97d15f6addabfe9"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c5682a45df7d9642eff590abc73157c887a68f016df0a8ad722dcc0f888f56d7"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:b8104f709590fff72af801e916817560dbe1698028cd0afe5a52d75ceb1fce5f"},
+ {file = "psycopg2_binary-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:7b3751857da3e224f5629400736a7b11e940b5da5f95fa631d86219a1beaafec"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:043a9fd45a03858ff72364b4b75090679bd875ee44df9c0613dc862ca6b98460"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9ffdc51001136b699f9563b1c74cc1f8c07f66ef7219beb6417a4c8aaa896c28"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c15ba5982c177bc4b23a7940c7e4394197e2d6a424a2d282e7c236b66da6d896"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc85b3777068ed30aff8242be2813038a929f2084f69e43ef869daddae50f6ee"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:215d6bf7e66732a514f47614f828d8c0aaac9a648c46a831955cb103473c7147"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:7d07f552d1e412f4b4e64ce386d4c777a41da3b33f7098b6219012ba534fb2c2"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a0adef094c49f242122bb145c3c8af442070dc0e4312db17e49058c1702606d4"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:00475004e5ed3e3bf5e056d66e5dcdf41a0dc62efcd57997acd9135c40a08a50"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7d88db096fa19d94f433420eaaf9f3c45382da2dd014b93e4bf3215639047c16"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:902844f9c4fb19b17dfa84d9e2ca053d4a4ba265723d62ea5c9c26b38e0aa1e6"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-win32.whl", hash = "sha256:4e7904d1920c0c89105c0517dc7e3f5c20fb4e56ba9cdef13048db76947f1d79"},
+ {file = "psycopg2_binary-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:a36a0e791805aa136e9cbd0ffa040d09adec8610453ee8a753f23481a0057af5"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:25382c7d174c679ce6927c16b6fbb68b10e56ee44b1acb40671e02d29f2fce7c"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9c38d3869238e9d3409239bc05bc27d6b7c99c2a460ea337d2814b35fb4fea1b"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c6527c8efa5226a9e787507652dd5ba97b62d29b53c371a85cd13f957fe4d42"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e59137cdb970249ae60be2a49774c6dfb015bd0403f05af1fe61862e9626642d"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:d4c7b3a31502184e856df1f7bbb2c3735a05a8ce0ade34c5277e1577738a5c91"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:b9a794cef1d9c1772b94a72eec6da144c18e18041d294a9ab47669bc77a80c1d"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5254cbd4f4855e11cebf678c1a848a3042d455a22a4ce61349c36aafd4c2267"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c5e65c6ac0ae4bf5bef1667029f81010b6017795dcb817ba5c7b8a8d61fab76f"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:74eddec4537ab1f701a1647214734bc52cee2794df748f6ae5908e00771f180a"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:01ad49d68dd8c5362e4bfb4158f2896dc6e0c02e87b8a3770fc003459f1a4425"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-win32.whl", hash = "sha256:937880290775033a743f4836aa253087b85e62784b63fd099ee725d567a48aa1"},
+ {file = "psycopg2_binary-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:484405b883630f3e74ed32041a87456c5e0e63a8e3429aa93e8714c366d62bd1"},
]
pycodestyle = [
- {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
- {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
+ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"},
+ {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"},
]
pycparser = [
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
@@ -1222,66 +1366,91 @@ pydocstyle = [
{file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
{file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
]
-pyfakefs = []
+pyfakefs = [
+ {file = "pyfakefs-5.0.0-py3-none-any.whl", hash = "sha256:e1b01954978fe2d9a4d75f079359d7f8d3af3bb12ff2dc8633a4cc0a0dc7fbda"},
+ {file = "pyfakefs-5.0.0.tar.gz", hash = "sha256:19d1d8f1ee520891d78b6ed05c2078e0792d545f83dee33461fbaa5cc72e187d"},
+]
pyflakes = [
- {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"},
- {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
+ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"},
+ {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"},
]
-pyjwt = [
- {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"},
- {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"},
+PyJWT = [
+ {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"},
+ {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"},
+]
+pymdown-extensions = [
+ {file = "pymdown_extensions-9.8-py3-none-any.whl", hash = "sha256:8e62688a8b1128acd42fa823f3d429d22f4284b5e6dd4d3cd56721559a5a211b"},
+ {file = "pymdown_extensions-9.8.tar.gz", hash = "sha256:1bd4a173095ef8c433b831af1f3cb13c10883be0c100ae613560668e594651f7"},
]
python-dotenv = [
- {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"},
- {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"},
+ {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"},
+ {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"},
]
python-frontmatter = [
{file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"},
{file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"},
]
pytz = [
- {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"},
- {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"},
-]
-pyyaml = [
- {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"},
- {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"},
- {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"},
- {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"},
- {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"},
- {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"},
- {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"},
- {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"},
- {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"},
- {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"},
- {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"},
- {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"},
- {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"},
- {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"},
- {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"},
- {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"},
- {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"},
- {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"},
- {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"},
- {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"},
- {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"},
- {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"},
- {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"},
- {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"},
- {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"},
- {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"},
- {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"},
- {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"},
- {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"},
-]
-requests = []
+ {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"},
+ {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"},
+]
+PyYAML = [
+ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
+ {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
+ {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
+ {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
+ {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
+ {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
+ {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
+ {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
+ {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
+ {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
+ {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
+ {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
+ {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
+ {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
+ {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
+ {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
+ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
+ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
+]
+requests = [
+ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
+ {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
+]
rfc3986 = [
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
]
sentry-sdk = [
- {file = "sentry-sdk-0.20.3.tar.gz", hash = "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237"},
- {file = "sentry_sdk-0.20.3-py2.py3-none-any.whl", hash = "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b"},
+ {file = "sentry-sdk-1.11.0.tar.gz", hash = "sha256:e7b78a1ddf97a5f715a50ab8c3f7a93f78b114c67307785ee828ef67a5d6f117"},
+ {file = "sentry_sdk-1.11.0-py2.py3-none-any.whl", hash = "sha256:f467e6c7fac23d4d42bc83eb049c400f756cd2d65ab44f0cc1165d0c7c3d40bc"},
+]
+setuptools = [
+ {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"},
+ {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
@@ -1292,37 +1461,46 @@ smmap = [
{file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
]
sniffio = [
- {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"},
- {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"},
+ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
+ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
]
snowballstemmer = [
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
]
sqlparse = [
- {file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"},
- {file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"},
+ {file = "sqlparse-0.4.3-py3-none-any.whl", hash = "sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34"},
+ {file = "sqlparse-0.4.3.tar.gz", hash = "sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268"},
]
stevedore = [
- {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"},
- {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"},
+ {file = "stevedore-4.1.0-py3-none-any.whl", hash = "sha256:3b1cbd592a87315f000d05164941ee5e164899f8fc0ce9a00bb0f321f40ef93e"},
+ {file = "stevedore-4.1.0.tar.gz", hash = "sha256:02518a8f0d6d29be8a445b7f2ac63753ff29e8f2a2faa01777568d5500d777a6"},
]
taskipy = [
- {file = "taskipy-1.7.0-py3-none-any.whl", hash = "sha256:9e284c10898e9dee01a3e72220b94b192b1daa0f560271503a6df1da53d03844"},
- {file = "taskipy-1.7.0.tar.gz", hash = "sha256:960e480b1004971e76454ecd1a0484e640744a30073a1069894a311467f85ed8"},
+ {file = "taskipy-1.10.3-py3-none-any.whl", hash = "sha256:4c0070ca53868d97989f7ab5c6f237525d52ee184f9b967576e8fe427ed9d0b8"},
+ {file = "taskipy-1.10.3.tar.gz", hash = "sha256:112beaf21e3d5569950b99162a1de003fa885fabee9e450757a6b874be914877"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
-tzdata = []
-urllib3 = []
-virtualenv = []
-whitenoise = [
- {file = "whitenoise-5.3.0-py2.py3-none-any.whl", hash = "sha256:d963ef25639d1417e8a247be36e6aedd8c7c6f0a08adcb5a89146980a96b577c"},
- {file = "whitenoise-5.3.0.tar.gz", hash = "sha256:d234b871b52271ae7ed6d9da47ffe857c76568f11dd30e28e18c5869dbd11e12"},
+tomli = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
-zipp = [
- {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"},
- {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"},
+tzdata = [
+ {file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"},
+ {file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"},
+]
+urllib3 = [
+ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
+ {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
+]
+virtualenv = [
+ {file = "virtualenv-20.16.6-py3-none-any.whl", hash = "sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108"},
+ {file = "virtualenv-20.16.6.tar.gz", hash = "sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e"},
+]
+whitenoise = [
+ {file = "whitenoise-6.2.0-py3-none-any.whl", hash = "sha256:8e9c600a5c18bd17655ef668ad55b5edf6c24ce9bdca5bf607649ca4b1e8e2c2"},
+ {file = "whitenoise-6.2.0.tar.gz", hash = "sha256:8fa943c6d4cd9e27673b70c21a07b0aa120873901e099cd46cab40f7cc96d567"},
]
diff --git a/pydis_site/apps/api/__init__.py b/pydis_site/apps/api/__init__.py
index afa5b4d5..e69de29b 100644
--- a/pydis_site/apps/api/__init__.py
+++ b/pydis_site/apps/api/__init__.py
@@ -1 +0,0 @@
-default_app_config = 'pydis_site.apps.api.apps.ApiConfig'
diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py
index 5d7bcdc3..44c571c3 100644
--- a/pydis_site/apps/api/github_utils.py
+++ b/pydis_site/apps/api/github_utils.py
@@ -11,8 +11,6 @@ from pydis_site import settings
MAX_RUN_TIME = datetime.timedelta(minutes=10)
"""The maximum time allowed before an action is declared timed out."""
-ISO_FORMAT_STRING = "%Y-%m-%dT%H:%M:%SZ"
-"""The datetime string format GitHub uses."""
class ArtifactProcessingError(Exception):
@@ -108,7 +106,7 @@ def authorize(owner: str, repo: str) -> httpx.Client:
client = httpx.Client(
base_url=settings.GITHUB_API,
headers={"Authorization": f"bearer {generate_token()}"},
- timeout=settings.TIMEOUT_PERIOD,
+ timeout=10,
)
try:
@@ -147,7 +145,7 @@ def authorize(owner: str, repo: str) -> httpx.Client:
def check_run_status(run: WorkflowRun) -> str:
"""Check if the provided run has been completed, otherwise raise an exception."""
- created_at = datetime.datetime.strptime(run.created_at, ISO_FORMAT_STRING)
+ created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT)
run_time = datetime.datetime.utcnow() - created_at
if run.status != "completed":
diff --git a/pydis_site/apps/api/migrations/0013_specialsnake_image.py b/pydis_site/apps/api/migrations/0013_specialsnake_image.py
index a0d0d318..8ba3432f 100644
--- a/pydis_site/apps/api/migrations/0013_specialsnake_image.py
+++ b/pydis_site/apps/api/migrations/0013_specialsnake_image.py
@@ -2,7 +2,6 @@
import datetime
from django.db import migrations, models
-from django.utils.timezone import utc
class Migration(migrations.Migration):
@@ -15,7 +14,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='specialsnake',
name='image',
- field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=utc)),
+ field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=datetime.timezone.utc)),
preserve_default=False,
),
]
diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py
index bfa54721..89ae27e4 100644
--- a/pydis_site/apps/api/models/bot/message.py
+++ b/pydis_site/apps/api/models/bot/message.py
@@ -1,9 +1,8 @@
-from datetime import datetime
+import datetime
from django.contrib.postgres import fields as pgfields
from django.core.validators import MinValueValidator
from django.db import models
-from django.utils import timezone
from pydis_site.apps.api.models.bot.user import User
from pydis_site.apps.api.models.mixins import ModelReprMixin
@@ -60,11 +59,11 @@ class Message(ModelReprMixin, models.Model):
)
@property
- def timestamp(self) -> datetime:
+ def timestamp(self) -> datetime.datetime:
"""Attribute that represents the message timestamp as derived from the snowflake id."""
- tz_naive_datetime = datetime.utcfromtimestamp(((self.id >> 22) + 1420070400000) / 1000)
- tz_aware_datetime = timezone.make_aware(tz_naive_datetime, timezone=timezone.utc)
- return tz_aware_datetime
+ return datetime.datetime.utcfromtimestamp(
+ ((self.id >> 22) + 1420070400000) / 1000
+ ).replace(tzinfo=datetime.timezone.utc)
class Meta:
"""Metadata provided for Django's ORM."""
diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py
index abd25ef0..f53dd33c 100644
--- a/pydis_site/apps/api/models/bot/metricity.py
+++ b/pydis_site/apps/api/models/bot/metricity.py
@@ -130,3 +130,31 @@ class Metricity:
raise NotFoundError()
return values
+
+ def total_messages_in_past_n_days(
+ self,
+ user_ids: list[str],
+ days: int
+ ) -> list[tuple[str, int]]:
+ """
+ Query activity by a list of users in the past `days` days.
+
+ Returns a list of (user_id, message_count) tuples.
+ """
+ self.cursor.execute(
+ """
+ SELECT
+ author_id, COUNT(*)
+ FROM messages
+ WHERE
+ author_id IN %s
+ AND NOT is_deleted
+ AND channel_id NOT IN %s
+ AND created_at > now() - interval '%s days'
+ GROUP BY author_id
+ """,
+ [tuple(user_ids), EXCLUDE_CHANNELS, days]
+ )
+ values = self.cursor.fetchall()
+
+ return values
diff --git a/pydis_site/apps/api/tests/test_filterlists.py b/pydis_site/apps/api/tests/test_filterlists.py
index 5a5bca60..9959617e 100644
--- a/pydis_site/apps/api/tests/test_filterlists.py
+++ b/pydis_site/apps/api/tests/test_filterlists.py
@@ -64,8 +64,8 @@ class FetchTests(AuthenticatedAPITestCase):
self.assertEqual(response.status_code, 200)
for api_type, model_type in zip(response.json(), FilterList.FilterListType.choices):
- self.assertEquals(api_type[0], model_type[0])
- self.assertEquals(api_type[1], model_type[1])
+ self.assertEqual(api_type[0], model_type[0])
+ self.assertEqual(api_type[1], model_type[1])
class CreationTests(AuthenticatedAPITestCase):
diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py
index f642f689..95bafec0 100644
--- a/pydis_site/apps/api/tests/test_github_utils.py
+++ b/pydis_site/apps/api/tests/test_github_utils.py
@@ -11,6 +11,7 @@ import rest_framework.response
import rest_framework.test
from django.urls import reverse
+from pydis_site import settings
from .. import github_utils
@@ -28,7 +29,7 @@ class GeneralUtilityTests(unittest.TestCase):
"""
self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.")
return original_encode(
- payload, "secret-encoding-key", algorithm="HS256", *args, **kwargs
+ payload, "secret-encoding-key", *args, algorithm="HS256", **kwargs
)
original_encode = jwt.encode
@@ -49,7 +50,7 @@ class CheckRunTests(unittest.TestCase):
"head_sha": "sha",
"status": "completed",
"conclusion": "success",
- "created_at": datetime.datetime.utcnow().strftime(github_utils.ISO_FORMAT_STRING),
+ "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT),
"artifacts_url": "url",
}
@@ -74,7 +75,7 @@ class CheckRunTests(unittest.TestCase):
# to guarantee the right conclusion
kwargs["created_at"] = (
datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10)
- ).strftime(github_utils.ISO_FORMAT_STRING)
+ ).strftime(settings.GITHUB_TIMESTAMP_FORMAT)
with self.assertRaises(github_utils.RunTimeoutError):
github_utils.check_run_status(github_utils.WorkflowRun(**kwargs))
@@ -178,7 +179,7 @@ class ArtifactFetcherTests(unittest.TestCase):
run = github_utils.WorkflowRun(
name="action_name",
head_sha="action_sha",
- created_at=datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING),
+ created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT),
status="completed",
conclusion="success",
artifacts_url="artifacts_url"
diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py
index f1107734..89ee4e23 100644
--- a/pydis_site/apps/api/tests/test_infractions.py
+++ b/pydis_site/apps/api/tests/test_infractions.py
@@ -56,15 +56,17 @@ class InfractionTests(AuthenticatedAPITestCase):
type='ban',
reason='He terk my jerb!',
hidden=True,
+ inserted_at=dt(2020, 10, 10, 0, 0, 0, tzinfo=timezone.utc),
expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc),
- active=True
+ active=True,
)
cls.ban_inactive = Infraction.objects.create(
user_id=cls.user.id,
actor_id=cls.user.id,
type='ban',
reason='James is an ass, and we won\'t be working with him again.',
- active=False
+ active=False,
+ inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc),
)
cls.mute_permanent = Infraction.objects.create(
user_id=cls.user.id,
@@ -72,7 +74,8 @@ class InfractionTests(AuthenticatedAPITestCase):
type='mute',
reason='He has a filthy mouth and I am his soap.',
active=True,
- expires_at=None
+ inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc),
+ expires_at=None,
)
cls.superstar_expires_soon = Infraction.objects.create(
user_id=cls.user.id,
@@ -80,7 +83,8 @@ class InfractionTests(AuthenticatedAPITestCase):
type='superstar',
reason='This one doesn\'t matter anymore.',
active=True,
- expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5)
+ inserted_at=dt(2020, 10, 10, 0, 3, 0, tzinfo=timezone.utc),
+ expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5),
)
cls.voiceban_expires_later = Infraction.objects.create(
user_id=cls.user.id,
@@ -88,7 +92,8 @@ class InfractionTests(AuthenticatedAPITestCase):
type='voice_ban',
reason='Jet engine mic',
active=True,
- expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5)
+ inserted_at=dt(2020, 10, 10, 0, 4, 0, tzinfo=timezone.utc),
+ expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5),
)
def test_list_all(self):
diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py
index 5d10069d..d86e80bb 100644
--- a/pydis_site/apps/api/tests/test_users.py
+++ b/pydis_site/apps/api/tests/test_users.py
@@ -502,6 +502,90 @@ class UserMetricityTests(AuthenticatedAPITestCase):
"total_messages": total_messages
})
+ def test_metricity_activity_data(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+ self.metricity.total_messages_in_past_n_days.return_value = [(0, 10)]
+
+ # When
+ url = reverse("api:bot:user-metricity-activity-data")
+ response = self.client.post(
+ url,
+ data=[0, 1],
+ QUERY_STRING="days=10",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 200)
+ self.metricity.total_messages_in_past_n_days.assert_called_once_with(["0", "1"], 10)
+ self.assertEqual(response.json(), {"0": 10, "1": 0})
+
+ def test_metricity_activity_data_invalid_days(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse("api:bot:user-metricity-activity-data")
+ response = self.client.post(
+ url,
+ data=[0, 1],
+ QUERY_STRING="days=fifty",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), {"days": ["This query parameter must be an integer."]})
+
+ def test_metricity_activity_data_no_days(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse('api:bot:user-metricity-activity-data')
+ response = self.client.post(
+ url,
+ data=[0, 1],
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), {'days': ["This query parameter is required."]})
+
+ def test_metricity_activity_data_no_users(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse('api:bot:user-metricity-activity-data')
+ response = self.client.post(
+ url,
+ QUERY_STRING="days=10",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), ['Expected a list of items but got type "dict".'])
+
+ def test_metricity_activity_data_invalid_users(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse('api:bot:user-metricity-activity-data')
+ response = self.client.post(
+ url,
+ data=[123, 'username'],
+ QUERY_STRING="days=10",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), {'1': ['A valid integer is required.']})
+
def mock_metricity_user(self, joined_at, total_messages, total_blocks, top_channel_activity):
patcher = patch("pydis_site.apps.api.viewsets.bot.user.Metricity")
self.metricity = patcher.start()
diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py
index ad2d948e..34167a38 100644
--- a/pydis_site/apps/api/views.py
+++ b/pydis_site/apps/api/views.py
@@ -37,12 +37,14 @@ class RulesView(APIView):
## Routes
### GET /rules
- Returns a JSON array containing the server's rules:
+ Returns a JSON array containing the server's rules
+ and keywords relating to each rule.
+ Example response:
>>> [
- ... "Eat candy.",
- ... "Wake up at 4 AM.",
- ... "Take your medicine."
+ ... ["Eat candy.", ["candy", "sweets"]],
+ ... ["Wake up at 4 AM.", ["wake_up", "early", "early_bird"]],
+ ... ["Take your medicine.", ["medicine", "health"]]
... ]
Since some of the the rules require links, this view
@@ -100,6 +102,12 @@ class RulesView(APIView):
# `format` here is the result format, we have a link format here instead.
def get(self, request, format=None): # noqa: D102,ANN001,ANN201
+ """
+ Returns a list of our community rules coupled with their keywords.
+
+ Each item in the returned list is a tuple with the rule as first item
+ and a list of keywords that match that rules as second item.
+ """
link_format = request.query_params.get('link_format', 'md')
if link_format not in ('html', 'md'):
raise ParseError(
@@ -124,35 +132,44 @@ class RulesView(APIView):
return Response([
(
- f"Follow the {pydis_coc}."
+ f"Follow the {pydis_coc}.",
+ ["coc", "conduct", "code"]
),
(
- f"Follow the {discord_community_guidelines} and {discord_tos}."
+ f"Follow the {discord_community_guidelines} and {discord_tos}.",
+ ["discord", "guidelines", "discord_tos"]
),
(
- "Respect staff members and listen to their instructions."
+ "Respect staff members and listen to their instructions.",
+ ["respect", "staff", "instructions"]
),
(
"Use English to the best of your ability. "
- "Be polite if someone speaks English imperfectly."
+ "Be polite if someone speaks English imperfectly.",
+ ["english", "language"]
),
(
"Do not provide or request help on projects that may break laws, "
- "breach terms of services, or are malicious or inappropriate."
+ "breach terms of services, or are malicious or inappropriate.",
+ ["infraction", "tos", "breach", "malicious", "inappropriate"]
),
(
- "Do not post unapproved advertising."
+ "Do not post unapproved advertising.",
+ ["ad", "ads", "advert", "advertising"]
),
(
"Keep discussions relevant to the channel topic. "
- "Each channel's description tells you the topic."
+ "Each channel's description tells you the topic.",
+ ["off-topic", "topic", "relevance"]
),
(
"Do not help with ongoing exams. When helping with homework, "
- "help people learn how to do the assignment without doing it for them."
+ "help people learn how to do the assignment without doing it for them.",
+ ["exam", "exams", "assignment", "assignments", "homework"]
),
(
- "Do not offer or ask for paid work of any kind."
+ "Do not offer or ask for paid work of any kind.",
+ ["paid", "work", "money"]
),
])
diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py
index 3a4cec60..97efb63c 100644
--- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py
+++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py
@@ -70,4 +70,4 @@ class AocCompletionistBlockViewSet(
serializer_class = AocCompletionistBlockSerializer
queryset = AocCompletionistBlock.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ("user__id", "is_blocked")
+ filterset_fields = ("user__id", "is_blocked")
diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py
index c7a96629..3cdc342d 100644
--- a/pydis_site/apps/api/viewsets/bot/aoc_link.py
+++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py
@@ -68,4 +68,4 @@ class AocAccountLinkViewSet(
serializer_class = AocAccountLinkSerializer
queryset = AocAccountLink.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ("user__id", "aoc_username")
+ filterset_fields = ("user__id", "aoc_username")
diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py
index 7f31292f..93d29391 100644
--- a/pydis_site/apps/api/viewsets/bot/infraction.py
+++ b/pydis_site/apps/api/viewsets/bot/infraction.py
@@ -1,9 +1,8 @@
-from datetime import datetime
+import datetime
from django.db import IntegrityError
from django.db.models import QuerySet
from django.http.request import HttpRequest
-from django.utils import timezone
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
@@ -154,7 +153,7 @@ class InfractionViewSet(
queryset = Infraction.objects.all()
pagination_class = LimitOffsetPaginationExtended
filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter)
- filter_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type')
+ filterset_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type')
search_fields = ('$reason',)
frozen_fields = ('id', 'inserted_at', 'type', 'user', 'actor', 'hidden')
@@ -185,23 +184,21 @@ class InfractionViewSet(
filter_expires_after = self.request.query_params.get('expires_after')
if filter_expires_after:
try:
- expires_after_parsed = datetime.fromisoformat(filter_expires_after)
+ expires_after_parsed = datetime.datetime.fromisoformat(filter_expires_after)
except ValueError:
raise ValidationError({'expires_after': ['failed to convert to datetime']})
- additional_filters['expires_at__gte'] = timezone.make_aware(
- expires_after_parsed,
- timezone=timezone.utc,
+ additional_filters['expires_at__gte'] = expires_after_parsed.replace(
+ tzinfo=datetime.timezone.utc
)
filter_expires_before = self.request.query_params.get('expires_before')
if filter_expires_before:
try:
- expires_before_parsed = datetime.fromisoformat(filter_expires_before)
+ expires_before_parsed = datetime.datetime.fromisoformat(filter_expires_before)
except ValueError:
raise ValidationError({'expires_before': ['failed to convert to datetime']})
- additional_filters['expires_at__lte'] = timezone.make_aware(
- expires_before_parsed,
- timezone=timezone.utc,
+ additional_filters['expires_at__lte'] = expires_before_parsed.replace(
+ tzinfo=datetime.timezone.utc
)
if 'expires_at__lte' in additional_filters and 'expires_at__gte' in additional_filters:
diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py
index 144daab0..6af42bcb 100644
--- a/pydis_site/apps/api/viewsets/bot/nomination.py
+++ b/pydis_site/apps/api/viewsets/bot/nomination.py
@@ -172,7 +172,7 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge
serializer_class = NominationSerializer
queryset = Nomination.objects.all()
filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter)
- filter_fields = ('user__id', 'active')
+ filterset_fields = ('user__id', 'active')
frozen_fields = ('id', 'inserted_at', 'user', 'ended_at')
frozen_on_create = ('ended_at', 'end_reason', 'active', 'inserted_at', 'reviewed')
diff --git a/pydis_site/apps/api/viewsets/bot/reminder.py b/pydis_site/apps/api/viewsets/bot/reminder.py
index 78d7cb3b..5f997052 100644
--- a/pydis_site/apps/api/viewsets/bot/reminder.py
+++ b/pydis_site/apps/api/viewsets/bot/reminder.py
@@ -125,4 +125,4 @@ class ReminderViewSet(
serializer_class = ReminderSerializer
queryset = Reminder.objects.prefetch_related('author')
filter_backends = (DjangoFilterBackend, SearchFilter)
- filter_fields = ('active', 'author__id')
+ filterset_fields = ('active', 'author__id')
diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py
index 3318b2b9..db73a83c 100644
--- a/pydis_site/apps/api/viewsets/bot/user.py
+++ b/pydis_site/apps/api/viewsets/bot/user.py
@@ -3,8 +3,9 @@ from collections import OrderedDict
from django.db.models import Q
from django_filters.rest_framework import DjangoFilterBackend
-from rest_framework import status
+from rest_framework import fields, status
from rest_framework.decorators import action
+from rest_framework.exceptions import ParseError
from rest_framework.pagination import PageNumberPagination
from rest_framework.request import Request
from rest_framework.response import Response
@@ -138,6 +139,29 @@ class UserViewSet(ModelViewSet):
- 200: returned on success
- 404: if a user with the given `snowflake` could not be found
+ ### POST /bot/users/metricity_activity_data
+ Returns a mapping of user ID to message count in a given period for
+ the given user IDs.
+
+ #### Required Query Parameters
+ - days: how many days into the past to count message from.
+
+ #### Request Format
+ >>> [
+ ... 409107086526644234,
+ ... 493839819168808962
+ ... ]
+
+ #### Response format
+ >>> {
+ ... "409107086526644234": 54,
+ ... "493839819168808962": 0
+ ... }
+
+ #### Status codes
+ - 200: returned on success
+ - 400: if request body or query parameters were missing or invalid
+
### POST /bot/users
Adds a single or multiple new users.
The roles attached to the user(s) must be roles known by the site.
@@ -237,7 +261,7 @@ class UserViewSet(ModelViewSet):
queryset = User.objects.all().order_by("id")
pagination_class = UserListPagination
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('name', 'discriminator')
+ filterset_fields = ('name', 'discriminator')
def get_serializer(self, *args, **kwargs) -> ModelSerializer:
"""Set Serializer many attribute to True if request body contains a list."""
@@ -298,3 +322,34 @@ class UserViewSet(ModelViewSet):
except NotFoundError:
return Response(dict(detail="User not found in metricity"),
status=status.HTTP_404_NOT_FOUND)
+
+ @action(detail=False, methods=["POST"])
+ def metricity_activity_data(self, request: Request) -> Response:
+ """Request handler for metricity_activity_data endpoint."""
+ if "days" in request.query_params:
+ try:
+ days = int(request.query_params["days"])
+ except ValueError:
+ raise ParseError(detail={
+ "days": ["This query parameter must be an integer."]
+ })
+ else:
+ raise ParseError(detail={
+ "days": ["This query parameter is required."]
+ })
+
+ user_id_list_validator = fields.ListField(
+ child=fields.IntegerField(min_value=0),
+ allow_empty=False
+ )
+ user_ids = [
+ str(user_id) for user_id in
+ user_id_list_validator.run_validation(request.data)
+ ]
+
+ with Metricity() as metricity:
+ data = metricity.total_messages_in_past_n_days(user_ids, days)
+
+ default_data = {user_id: 0 for user_id in user_ids}
+ response_data = default_data | dict(data)
+ return Response(response_data, status=status.HTTP_200_OK)
diff --git a/pydis_site/apps/content/migrations/0001_add_tags.py b/pydis_site/apps/content/migrations/0001_add_tags.py
new file mode 100644
index 00000000..2c31e4c1
--- /dev/null
+++ b/pydis_site/apps/content/migrations/0001_add_tags.py
@@ -0,0 +1,35 @@
+# Generated by Django 4.0.6 on 2022-08-23 09:06
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = [
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Commit',
+ fields=[
+ ('sha', models.CharField(help_text='The SHA hash of this commit.', max_length=40, primary_key=True, serialize=False)),
+ ('message', models.TextField(help_text='The commit message.')),
+ ('date', models.DateTimeField(help_text='The date and time the commit was created.')),
+ ('authors', models.TextField(help_text='The person(s) who created the commit. This is a serialized JSON object. Refer to the GitHub documentation on the commit endpoint (schema/commit.author & schema/commit.committer) for more info. https://docs.github.com/en/rest/commits/commits#get-a-commit')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Tag',
+ fields=[
+ ('last_updated', models.DateTimeField(auto_now=True, help_text='The date and time this data was last fetched.')),
+ ('sha', models.CharField(help_text="The tag's hash, as calculated by GitHub.", max_length=40)),
+ ('name', models.CharField(help_text="The tag's name.", max_length=50, primary_key=True, serialize=False)),
+ ('group', models.CharField(help_text='The group the tag belongs to.', max_length=50, null=True)),
+ ('body', models.TextField(help_text='The content of the tag.')),
+ ('last_commit', models.ForeignKey(help_text='The commit this file was last touched in.', null=True, on_delete=django.db.models.deletion.CASCADE, to='content.commit')),
+ ],
+ ),
+ ]
diff --git a/pydis_site/apps/content/migrations/__init__.py b/pydis_site/apps/content/migrations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/pydis_site/apps/content/migrations/__init__.py
diff --git a/pydis_site/apps/content/models/__init__.py b/pydis_site/apps/content/models/__init__.py
new file mode 100644
index 00000000..60007e27
--- /dev/null
+++ b/pydis_site/apps/content/models/__init__.py
@@ -0,0 +1,3 @@
+from .tag import Commit, Tag
+
+__all__ = ["Commit", "Tag"]
diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py
new file mode 100644
index 00000000..1a20d775
--- /dev/null
+++ b/pydis_site/apps/content/models/tag.py
@@ -0,0 +1,80 @@
+import collections.abc
+import json
+
+from django.db import models
+
+
+class Commit(models.Model):
+ """A git commit from the Python Discord Bot project."""
+
+ URL_BASE = "https://github.com/python-discord/bot/commit/"
+
+ sha = models.CharField(
+ help_text="The SHA hash of this commit.",
+ primary_key=True,
+ max_length=40,
+ )
+ message = models.TextField(help_text="The commit message.")
+ date = models.DateTimeField(help_text="The date and time the commit was created.")
+ authors = models.TextField(help_text=(
+ "The person(s) who created the commit. This is a serialized JSON object. "
+ "Refer to the GitHub documentation on the commit endpoint "
+ "(schema/commit.author & schema/commit.committer) for more info. "
+ "https://docs.github.com/en/rest/commits/commits#get-a-commit"
+ ))
+
+ @property
+ def url(self) -> str:
+ """The URL to the commit on GitHub."""
+ return self.URL_BASE + self.sha
+
+ def lines(self) -> collections.abc.Iterable[str]:
+ """Return each line in the commit message."""
+ for line in self.message.split("\n"):
+ yield line
+
+ def format_authors(self) -> collections.abc.Iterable[str]:
+ """Return a nice representation of the author(s)' name and email."""
+ for author in json.loads(self.authors):
+ yield f"{author['name']} <{author['email']}>"
+
+
+class Tag(models.Model):
+ """A tag from the python-discord bot repository."""
+
+ URL_BASE = "https://github.com/python-discord/bot/tree/main/bot/resources/tags"
+
+ last_updated = models.DateTimeField(
+ help_text="The date and time this data was last fetched.",
+ auto_now=True,
+ )
+ sha = models.CharField(
+ help_text="The tag's hash, as calculated by GitHub.",
+ max_length=40,
+ )
+ last_commit = models.ForeignKey(
+ Commit,
+ help_text="The commit this file was last touched in.",
+ null=True,
+ on_delete=models.CASCADE,
+ )
+ name = models.CharField(
+ help_text="The tag's name.",
+ primary_key=True,
+ max_length=50,
+ )
+ group = models.CharField(
+ help_text="The group the tag belongs to.",
+ null=True,
+ max_length=50,
+ )
+ body = models.TextField(help_text="The content of the tag.")
+
+ @property
+ def url(self) -> str:
+ """Get the URL of the tag on GitHub."""
+ url = Tag.URL_BASE
+ if self.group:
+ url += f"/{self.group}"
+ url += f"/{self.name}.md"
+ return url
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md
index 6231fe87..2822d046 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md
@@ -119,7 +119,7 @@ As mentioned in the Contributing Guidelines, we have a simple style guide for ou
[**Style Guide**](./style-guide/)
### 4. Create an issue
-The first step to any new contribution is an issue describing a problem with the current codebase or proposing a new feature. All the open issues are viewable on the GitHub repositories, for instance here is the [issues page for Sir Lancebot](https://github.com/python-discord/sir-lancebot/issues). If you have something that you want to implement open a new issue to present your idea. Otherwise you can browse the unassigned issues and ask to be assigned to one that you're interested in, either in the comments on the issue or in the [`#dev-contrib`](https://discord.gg/2h3qBv8Xaa) channel on Discord.
+The first step to any new contribution is an issue describing a problem with the current codebase or proposing a new feature. All the open issues are viewable on the GitHub repositories, for instance here is the [issues page for Sir Lancebot](https://github.com/python-discord/sir-lancebot/issues). If you have something that you want to implement open a new issue to present your idea. Otherwise, you can browse the unassigned issues and ask to be assigned to one that you're interested in, either in the comments on the issue or in the [`#dev-contrib`](https://discord.gg/2h3qBv8Xaa) channel on Discord.
[**How to write a good issue**](./issues/)
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md
index ad446cc8..02316bca 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md
@@ -88,6 +88,7 @@ urls:
# Snekbox
snekbox_eval_api: "http://localhost:8060/eval"
+ snekbox_311_eval_api: "http://localhost:8065/eval"
##### << Replace the following � characters with the channel IDs in your test server >> #####
# This assumes the template was used: https://discord.new/zmHtscpYN9E3
@@ -481,10 +482,14 @@ You are now almost ready to run the Python bot. The simplest way to do so is wit
In your `config.yml` file:
* Set `urls.site` to `"web:8000"`.
-* If you wish to work with snekbox set `urls.snekbox_eval_api` to `"http://snekbox:8060/eval"`.
+* If you wish to work with snekbox set the following:
+ * `urls.snekbox_eval_api` to `"http://snekbox:8060/eval"`
+ * `urls.snekbox_311_eval_api` to `"http://snekbox-311:8060/eval"`.
Assuming you have Docker installed **and running**, enter the cloned repo in the command line and type `docker-compose up`.
+If working with snekbox you can run `docker-compose --profile 3.10 up` to also start up a 3.10 snekbox container, in addition to the default 3.11 container!
+
After pulling the images and building the containers, your bot will start. Enter your server and type `!help` (or whatever prefix you chose instead of `!`).
Your bot is now running, but this method makes debugging with an IDE a fairly involved process. For additional running methods, continue reading the following sections.
@@ -494,12 +499,13 @@ The advantage of this method is that you can run the bot's code in your preferre
* Append the following line to your `.env` file: `BOT_API_KEY=badbot13m0n8f570f942013fc818f234916ca531`.
* In your `config.yml` file, set `urls.site` to `"localhost:8000"`. If you wish to keep using `web:8000`, then [COMPOSE_PROJECT_NAME](../docker/#compose-project-names) has to be set.
-* To work with snekbox, set `urls.snekbox_eval_api` to `"http://localhost:8060/eval"`
+* To work with snekbox, set `urls.snekbox_eval_api` to `"http://localhost:8060/eval"` and `urls.snekbox_311_eval_api` to `"http://localhost:8065/eval"`
You will need to start the services separately, but if you got the previous section with Docker working, that's pretty simple:
* `docker-compose up web` to start the site container. This is required.
* `docker-compose up snekbox` to start the snekbox container. You only need this if you're planning on working on the snekbox cog.
+* `docker-compose up snekbox-311` to start the snekbox 3.11 container. You only need this if you're planning on working on the snekbox cog.
* `docker-compose up redis` to start the Redis container. You only need this if you're not using fakeredis. For more info refer to [Working with Redis](#optional-working-with-redis).
You can start several services together: `docker-compose up web snekbox redis`.
@@ -507,7 +513,7 @@ You can start several services together: `docker-compose up web snekbox redis`.
##### Setting Up a Development Environment
The bot's code is Python code like any other. To run it locally, you will need the right version of Python with the necessary packages installed:
-1. Make sure you have [Python 3.9](https://www.python.org/downloads/) installed. It helps if it is your system's default Python version.
+1. Make sure you have [Python 3.10](https://www.python.org/downloads/) installed. It helps if it is your system's default Python version.
2. [Install Poetry](https://github.com/python-poetry/poetry#installation).
3. [Install the dependencies](../installing-project-dependencies).
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md
index 520e41ad..9786698b 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md
@@ -9,7 +9,7 @@ You should have already forked the [`site`](https://github.com/python-discord/si
### Requirements
-- [Python 3.9](https://www.python.org/downloads/)
+- [Python 3.10](https://www.python.org/downloads/)
- [Poetry](https://python-poetry.org/docs/#installation)
- `pip install poetry`
- [Git](https://git-scm.com/downloads)
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md
index 4dba45c8..b26c467c 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md
@@ -202,6 +202,3 @@ def foo(input_1: int, input_2: dict[str, int]) -> bool:
This tells us that `foo` accepts an `int` and a `dict`, with `str` keys and `int` values, and returns a `bool`.
In previous examples, we have purposely omitted annotations to keep focus on the specific points they represent.
-
-> **Note:** if the project is running Python 3.8 or below you have to use `typing.Dict` instead of `dict`, but our three main projects are all >=3.9.
-> See [PEP 585](https://www.python.org/dev/peps/pep-0585/) for more information.
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md b/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md
index f8031834..5e785cd9 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md
@@ -5,7 +5,7 @@ icon: fab fa-discord
---
## Why do we need off-topic etiquette?
-Everyone wants to have good conversations in our off-topic channels, but with tens of thousands of members, this might mean different things to different people.
+Everyone wants to have good conversations in our off-topic channels, but with hundreds of thousands of members, this might mean different things to different people.
To facilitate the best experience for everyone, here are some guidelines on conversation etiquette.
## Three things you shouldn't do
diff --git a/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md b/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md
new file mode 100644
index 00000000..096e3a90
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md
@@ -0,0 +1,23 @@
+---
+title: Fixing an SSL Certificate Verification Error
+description: A guide on fixing verification of an SSL certificate.
+---
+
+We're fixing the error Python specifies as [ssl.SSLCertVerificationError](https://docs.python.org/3/library/ssl.html#ssl.SSLCertVerificationError).
+
+# How to fix SSL Certificate issue on Windows
+
+Firstly, try updating your OS, wouldn't hurt to try.
+
+Now, if you're still having an issue, you would need to download the certificate for the SSL.
+
+The SSL Certificate, Sectigo (cert vendor) provides a download link of an [SSL certificate](https://crt.sh/?id=2835394). You should find it in the bottom left corner, shown below:
+
+A picture where to find the certificate in the website is:
+![location of certificate](/static/images/content/fix-ssl-certificate/pem.png)
+
+You have to setup the certificate yourself. To do that you can just click on it, or if that doesn't work, refer to [this link](https://portal.threatpulse.com/docs/sol/Solutions/ManagePolicy/SSL/ssl_chrome_cert_ta.htm)
+
+# How to fix SSL Certificate issue on Mac
+
+Navigate to your `Applications/Python 3.x/` folder and double-click the `Install Certificates.command` to fix this.
diff --git a/pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md b/pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md
new file mode 100644
index 00000000..9d523b4b
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md
@@ -0,0 +1,29 @@
+---
+title: Keeping Discord Bot Tokens Safe
+description: How to keep your bot tokens safe and safety measures you can take.
+---
+It's **very** important to keep a bot token safe,
+primarily because anyone who has the bot token can do whatever they want with the bot --
+such as destroying servers your bot has been added to and getting your bot banned from the API.
+
+# How to Avoid Leaking your Token
+To help prevent leaking your token,
+you should ensure that you don't upload it to an open source program/website,
+such as replit and github, as they show your code publicly.
+The best practice for storing tokens is generally utilising .env files
+([click here](https://vcokltfre.dev/tips/tokens/.) for more information on storing tokens safely).
+
+# What should I do if my token does get leaked?
+
+If for whatever reason your token gets leaked, you should immediately follow these steps:
+- Go to the list of [Discord Bot Applications](https://discord.com/developers/applications) you have and select the bot application that had the token leaked.
+- Select the Bot (1) tab on the left-hand side, next to a small image of a puzzle piece. After doing so you should see a small section named TOKEN (under your bot USERNAME and next to his avatar image)
+- Press the Regenerate button to regenerate your bot token and invalidate the old one.
+
+![Steps to Take to Reset your Discord Bot](/static/images/content/regenerating_token.jpg)
+
+Following these steps will create a new token for your bot, making it secure again and terminating any connections from the leaked token.
+The old token will stop working though, so make sure to replace the old token with the new one in your code if you haven't already.
+
+# Summary
+Make sure you keep your token secure by storing it safely, not sending it to anyone you don't trust, and regenerating your token if it does get leaked.
diff --git a/pydis_site/apps/content/resources/guides/python-guides/vps-services.md b/pydis_site/apps/content/resources/guides/python-guides/vps-services.md
deleted file mode 100644
index 0acd3e55..00000000
--- a/pydis_site/apps/content/resources/guides/python-guides/vps-services.md
+++ /dev/null
@@ -1,31 +0,0 @@
----
-title: VPS Services
-description: On different VPS services
----
-
-If you need to run your bot 24/7 (with no downtime), you should consider using a virtual private server (VPS). This is a list of VPS services that are sufficient for running Discord bots.
-
-* Europe
- * [netcup](https://www.netcup.eu/)
- * Germany & Austria data centres.
- * Great affiliate program.
- * [Yandex Cloud](https://cloud.yandex.ru/)
- * Vladimir, Ryazan, and Moscow region data centres.
- * [Scaleway](https://www.scaleway.com/)
- * France data centre.
- * [Time 4 VPS](https://www.time4vps.eu/)
- * Lithuania data centre.
-* US
- * [GalaxyGate](https://galaxygate.net/)
- * New York data centre.
- * Great affiliate program.
-* Global
- * [Linode](https://www.linode.com/)
- * [Digital Ocean](https://www.digitalocean.com/)
- * [OVHcloud](https://www.ovhcloud.com/)
- * [Vultr](https://www.vultr.com/)
-
----
-# Free hosts
-There are no reliable free options for VPS hosting. If you would rather not pay for a hosting service, you can consider self-hosting.
-Any modern hardware should be sufficient for running a bot. An old computer with a few GB of ram could be suitable, or a Raspberry Pi.
diff --git a/pydis_site/apps/content/resources/tags/_info.yml b/pydis_site/apps/content/resources/tags/_info.yml
new file mode 100644
index 00000000..054125ec
--- /dev/null
+++ b/pydis_site/apps/content/resources/tags/_info.yml
@@ -0,0 +1,3 @@
+title: Tags
+description: Useful snippets that are often used in the server.
+icon: fas fa-tags
diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py
index be5ea897..462818b5 100644
--- a/pydis_site/apps/content/tests/test_utils.py
+++ b/pydis_site/apps/content/tests/test_utils.py
@@ -1,12 +1,34 @@
+import datetime
+import json
+import tarfile
+import tempfile
+import textwrap
from pathlib import Path
+from unittest import mock
+import httpx
+import markdown
from django.http import Http404
+from django.test import TestCase
-from pydis_site.apps.content import utils
+from pydis_site import settings
+from pydis_site.apps.content import models, utils
from pydis_site.apps.content.tests.helpers import (
BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA
)
+_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.timezone.utc)
+_time_str = _time.strftime(settings.GITHUB_TIMESTAMP_FORMAT)
+TEST_COMMIT_KWARGS = {
+ "sha": "123",
+ "message": "Hello world\n\nThis is a commit message",
+ "date": _time,
+ "authors": json.dumps([
+ {"name": "Author 1", "email": "[email protected]", "date": _time_str},
+ {"name": "Author 2", "email": "[email protected]", "date": _time_str},
+ ]),
+}
+
class GetCategoryTests(MockPagesTestCase):
"""Tests for the get_category function."""
@@ -96,3 +118,268 @@ class GetPageTests(MockPagesTestCase):
def test_get_nonexistent_page_returns_404(self):
with self.assertRaises(Http404):
utils.get_page(Path(BASE_PATH, "invalid"))
+
+
+class TagUtilsTests(TestCase):
+ """Tests for the tag-related utilities."""
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.commit = models.Commit.objects.create(**TEST_COMMIT_KWARGS)
+
+ @mock.patch.object(utils, "fetch_tags")
+ def test_static_fetch(self, fetch_mock: mock.Mock):
+ """Test that the static fetch function is only called at most once during static builds."""
+ tags = [models.Tag(name="Name", body="body")]
+ fetch_mock.return_value = tags
+ result = utils.get_tags_static()
+ second_result = utils.get_tags_static()
+
+ fetch_mock.assert_called_once()
+ self.assertEqual(tags, result)
+ self.assertEqual(tags, second_result)
+
+ @mock.patch("httpx.Client.get")
+ def test_mocked_fetch(self, get_mock: mock.Mock):
+ """Test that proper data is returned from fetch, but with a mocked API response."""
+ fake_request = httpx.Request("GET", "https://google.com")
+
+ # Metadata requests
+ returns = [httpx.Response(
+ request=fake_request,
+ status_code=200,
+ json=[
+ {"type": "file", "name": "first_tag.md", "sha": "123"},
+ {"type": "file", "name": "second_tag.md", "sha": "456"},
+ {"type": "dir", "name": "some_group", "sha": "789", "url": "/some_group"},
+ ]
+ ), httpx.Response(
+ request=fake_request,
+ status_code=200,
+ json=[{"type": "file", "name": "grouped_tag.md", "sha": "789123"}]
+ )]
+
+ # Main content request
+ bodies = (
+ "This is the first tag!",
+ textwrap.dedent("""
+ ---
+ frontmatter: empty
+ ---
+ This tag has frontmatter!
+ """),
+ "This is a grouped tag!",
+ )
+
+ # Generate a tar archive with a few tags
+ with tempfile.TemporaryDirectory() as tar_folder:
+ tar_folder = Path(tar_folder)
+ with tempfile.TemporaryDirectory() as folder:
+ folder = Path(folder)
+ (folder / "ignored_file.md").write_text("This is an ignored file.")
+ tags_folder = folder / "bot/resources/tags"
+ tags_folder.mkdir(parents=True)
+
+ (tags_folder / "first_tag.md").write_text(bodies[0])
+ (tags_folder / "second_tag.md").write_text(bodies[1])
+
+ group_folder = tags_folder / "some_group"
+ group_folder.mkdir()
+ (group_folder / "grouped_tag.md").write_text(bodies[2])
+
+ with tarfile.open(tar_folder / "temp.tar", "w") as file:
+ file.add(folder, recursive=True)
+
+ body = (tar_folder / "temp.tar").read_bytes()
+
+ returns.append(httpx.Response(
+ status_code=200,
+ content=body,
+ request=fake_request,
+ ))
+
+ get_mock.side_effect = returns
+ result = utils.fetch_tags()
+
+ def sort(_tag: models.Tag) -> str:
+ return _tag.name
+
+ self.assertEqual(sorted([
+ models.Tag(name="first_tag", body=bodies[0], sha="123"),
+ models.Tag(name="second_tag", body=bodies[1], sha="245"),
+ models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name, sha="789123"),
+ ], key=sort), sorted(result, key=sort))
+
+ def test_get_real_tag(self):
+ """Test that a single tag is returned if it exists."""
+ tag = models.Tag.objects.create(name="real-tag", last_commit=self.commit)
+ result = utils.get_tag("real-tag")
+
+ self.assertEqual(tag, result)
+
+ def test_get_grouped_tag(self):
+ """Test fetching a tag from a group."""
+ tag = models.Tag.objects.create(
+ name="real-tag", group="real-group", last_commit=self.commit
+ )
+ result = utils.get_tag("real-group/real-tag")
+
+ self.assertEqual(tag, result)
+
+ def test_get_group(self):
+ """Test fetching a group of tags."""
+ included = [
+ models.Tag.objects.create(name="tag-1", group="real-group"),
+ models.Tag.objects.create(name="tag-2", group="real-group"),
+ models.Tag.objects.create(name="tag-3", group="real-group"),
+ ]
+
+ models.Tag.objects.create(name="not-included-1")
+ models.Tag.objects.create(name="not-included-2", group="other-group")
+
+ result = utils.get_tag("real-group")
+ self.assertListEqual(included, result)
+
+ def test_get_tag_404(self):
+ """Test that an error is raised when we fetch a non-existing tag."""
+ models.Tag.objects.create(name="real-tag")
+ with self.assertRaises(models.Tag.DoesNotExist):
+ utils.get_tag("fake")
+
+ @mock.patch.object(utils, "get_tag_category")
+ def test_category_pages(self, get_mock: mock.Mock):
+ """Test that the category pages function calls the correct method for tags."""
+ tag = models.Tag.objects.create(name="tag")
+ get_mock.return_value = tag
+ result = utils.get_category_pages(settings.CONTENT_PAGES_PATH / "tags")
+ self.assertEqual(tag, result)
+ get_mock.assert_called_once_with(collapse_groups=True)
+
+ def test_get_category_root(self):
+ """Test that all tags are returned and formatted properly for the tag root page."""
+ body = "normal body"
+ base = {"description": markdown.markdown(body), "icon": "fas fa-tag"}
+
+ models.Tag.objects.create(name="tag-1", body=body),
+ models.Tag.objects.create(name="tag-2", body=body),
+ models.Tag.objects.create(name="tag-3", body=body),
+
+ models.Tag.objects.create(name="tag-4", body=body, group="tag-group")
+ models.Tag.objects.create(name="tag-5", body=body, group="tag-group")
+
+ result = utils.get_tag_category(collapse_groups=True)
+
+ self.assertDictEqual({
+ "tag-1": {**base, "title": "tag-1"},
+ "tag-2": {**base, "title": "tag-2"},
+ "tag-3": {**base, "title": "tag-3"},
+ "tag-group": {
+ "title": "tag-group",
+ "description": "Contains the following tags: tag-4, tag-5",
+ "icon": "fas fa-tags"
+ }
+ }, result)
+
+ def test_get_category_group(self):
+ """Test the function for a group root page."""
+ body = "normal body"
+ base = {"description": markdown.markdown(body), "icon": "fas fa-tag"}
+
+ included = [
+ models.Tag.objects.create(name="tag-1", body=body, group="group"),
+ models.Tag.objects.create(name="tag-2", body=body, group="group"),
+ ]
+ models.Tag.objects.create(name="not-included", body=body)
+
+ result = utils.get_tag_category(included, collapse_groups=False)
+ self.assertDictEqual({
+ "tag-1": {**base, "title": "tag-1"},
+ "tag-2": {**base, "title": "tag-2"},
+ }, result)
+
+ def test_tag_url(self):
+ """Test that tag URLs are generated correctly."""
+ cases = [
+ ({"name": "tag"}, f"{models.Tag.URL_BASE}/tag.md"),
+ ({"name": "grouped", "group": "abc"}, f"{models.Tag.URL_BASE}/abc/grouped.md"),
+ ]
+
+ for options, url in cases:
+ tag = models.Tag(**options)
+ with self.subTest(tag=tag):
+ self.assertEqual(url, tag.url)
+
+ @mock.patch("httpx.Client.get")
+ def test_get_tag_commit(self, get_mock: mock.Mock):
+ """Test the get commit function with a normal tag."""
+ tag = models.Tag.objects.create(name="example")
+
+ authors = json.loads(self.commit.authors)
+
+ get_mock.return_value = httpx.Response(
+ request=httpx.Request("GET", "https://google.com"),
+ status_code=200,
+ json=[{
+ "sha": self.commit.sha,
+ "commit": {
+ "message": self.commit.message,
+ "author": authors[0],
+ "committer": authors[1],
+ }
+ }]
+ )
+
+ result = utils.get_tag(tag.name)
+ self.assertEqual(tag, result)
+
+ get_mock.assert_called_once()
+ call_params = get_mock.call_args[1]["params"]
+
+ self.assertEqual({"path": "/bot/resources/tags/example.md"}, call_params)
+ self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit)
+
+ @mock.patch("httpx.Client.get")
+ def test_get_group_tag_commit(self, get_mock: mock.Mock):
+ """Test the get commit function with a group tag."""
+ tag = models.Tag.objects.create(name="example", group="group-name")
+
+ authors = json.loads(self.commit.authors)
+ authors.pop()
+ self.commit.authors = json.dumps(authors)
+ self.commit.save()
+
+ get_mock.return_value = httpx.Response(
+ request=httpx.Request("GET", "https://google.com"),
+ status_code=200,
+ json=[{
+ "sha": self.commit.sha,
+ "commit": {
+ "message": self.commit.message,
+ "author": authors[0],
+ "committer": authors[0],
+ }
+ }]
+ )
+
+ utils.set_tag_commit(tag)
+
+ get_mock.assert_called_once()
+ call_params = get_mock.call_args[1]["params"]
+
+ self.assertEqual({"path": "/bot/resources/tags/group-name/example.md"}, call_params)
+ self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit)
+
+ @mock.patch.object(utils, "set_tag_commit")
+ def test_exiting_commit(self, set_commit_mock: mock.Mock):
+ """Test that a commit is saved when the data has not changed."""
+ tag = models.Tag.objects.create(name="tag-name", body="old body", last_commit=self.commit)
+
+ # This is only applied to the object, not to the database
+ tag.last_commit = None
+
+ utils.record_tags([tag])
+ self.assertEqual(self.commit, tag.last_commit)
+
+ result = utils.get_tag("tag-name")
+ self.assertEqual(tag, result)
+ set_commit_mock.assert_not_called()
diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py
index eadad7e3..3ef9bcc4 100644
--- a/pydis_site/apps/content/tests/test_views.py
+++ b/pydis_site/apps/content/tests/test_views.py
@@ -1,12 +1,18 @@
+import textwrap
from pathlib import Path
from unittest import TestCase
+import django.test
+import markdown
from django.http import Http404
from django.test import RequestFactory, SimpleTestCase, override_settings
+from django.urls import reverse
+from pydis_site.apps.content.models import Commit, Tag
from pydis_site.apps.content.tests.helpers import (
BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA
)
+from pydis_site.apps.content.tests.test_utils import TEST_COMMIT_KWARGS
from pydis_site.apps.content.views import PageOrCategoryView
@@ -172,7 +178,7 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase):
for item in context["breadcrumb_items"]:
item["path"] = Path(item["path"])
- self.assertEquals(
+ self.assertEqual(
context["breadcrumb_items"],
[
{"name": PARSED_CATEGORY_INFO["title"], "path": Path(".")},
@@ -180,3 +186,217 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase):
{"name": PARSED_CATEGORY_INFO["title"], "path": Path("category/subcategory")},
]
)
+
+
+class TagViewTests(django.test.TestCase):
+ """Tests for the TagView class."""
+
+ def setUp(self):
+ """Set test helpers, then set up fake filesystem."""
+ super().setUp()
+ self.commit = Commit.objects.create(**TEST_COMMIT_KWARGS)
+
+ def test_routing(self):
+ """Test that the correct template is returned for each route."""
+ Tag.objects.create(name="example", last_commit=self.commit)
+ Tag.objects.create(name="grouped-tag", group="group-name", last_commit=self.commit)
+
+ cases = [
+ ("/pages/tags/example/", "content/tag.html"),
+ ("/pages/tags/group-name/", "content/listing.html"),
+ ("/pages/tags/group-name/grouped-tag/", "content/tag.html"),
+ ]
+
+ for url, template in cases:
+ with self.subTest(url=url):
+ response = self.client.get(url)
+ self.assertEqual(200, response.status_code)
+ self.assertTemplateUsed(response, template)
+
+ def test_valid_tag_returns_200(self):
+ """Test that a page is returned for a valid tag."""
+ Tag.objects.create(name="example", body="This is the tag body.", last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(200, response.status_code)
+ self.assertIn("This is the tag body", response.content.decode("utf-8"))
+ self.assertTemplateUsed(response, "content/tag.html")
+
+ def test_invalid_tag_404(self):
+ """Test that a tag which doesn't exist raises a 404."""
+ response = self.client.get("/pages/tags/non-existent/")
+ self.assertEqual(404, response.status_code)
+
+ def test_context_tag(self):
+ """Test that the context contains the required data for a tag."""
+ body = textwrap.dedent("""
+ ---
+ unused: frontmatter
+ ----
+ Tag content here.
+ """)
+
+ tag = Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ expected = {
+ "page_title": "example",
+ "page": markdown.markdown("Tag content here."),
+ "tag": tag,
+ "breadcrumb_items": [
+ {"name": "Pages", "path": "."},
+ {"name": "Tags", "path": "tags"},
+ ]
+ }
+ for key in expected:
+ self.assertEqual(
+ expected[key], response.context.get(key), f"context.{key} did not match"
+ )
+
+ def test_context_grouped_tag(self):
+ """
+ Test the context for a tag in a group.
+
+ The only difference between this and a regular tag are the breadcrumbs,
+ so only those are checked.
+ """
+ Tag.objects.create(
+ name="example", body="Body text", group="group-name", last_commit=self.commit
+ )
+ response = self.client.get("/pages/tags/group-name/example/")
+ self.assertListEqual([
+ {"name": "Pages", "path": "."},
+ {"name": "Tags", "path": "tags"},
+ {"name": "group-name", "path": "tags/group-name"},
+ ], response.context.get("breadcrumb_items"))
+
+ def test_group_page(self):
+ """Test rendering of a group's root page."""
+ Tag.objects.create(name="tag-1", body="Body 1", group="group-name", last_commit=self.commit)
+ Tag.objects.create(name="tag-2", body="Body 2", group="group-name", last_commit=self.commit)
+ Tag.objects.create(name="not-included", last_commit=self.commit)
+
+ response = self.client.get("/pages/tags/group-name/")
+ content = response.content.decode("utf-8")
+
+ self.assertInHTML("<div class='level-left'>group-name</div>", content)
+ self.assertInHTML(
+ f"<a class='level-item fab fa-github' href='{Tag.URL_BASE}/group-name'>",
+ content
+ )
+ self.assertIn(">tag-1</span>", content)
+ self.assertIn(">tag-2</span>", content)
+ self.assertNotIn(
+ ">not-included</span>",
+ content,
+ "Tags not in this group shouldn't be rendered."
+ )
+
+ self.assertInHTML("<p>Body 1</p>", content)
+
+ def test_markdown(self):
+ """Test that markdown content is rendered properly."""
+ body = textwrap.dedent("""
+ ```py
+ Hello world!
+ ```
+
+ **This text is in bold**
+ """)
+
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ content = response.content.decode("utf-8")
+
+ self.assertInHTML('<code class="language-py">Hello world!</code>', content)
+ self.assertInHTML("<strong>This text is in bold</strong>", content)
+
+ def test_embed(self):
+ """Test that an embed from the frontmatter is treated correctly."""
+ body = textwrap.dedent("""
+ ---
+ embed:
+ title: Embed title
+ image:
+ url: https://google.com
+ ---
+ Tag body.
+ """)
+
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ content = response.content.decode("utf-8")
+
+ self.assertInHTML('<img alt="Embed title" src="https://google.com"/>', content)
+ self.assertInHTML("<p>Tag body.</p>", content)
+
+ def test_embed_title(self):
+ """Test that the page title gets set to the embed title."""
+ body = textwrap.dedent("""
+ ---
+ embed:
+ title: Embed title
+ ---
+ """)
+
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ "Embed title",
+ response.context.get("page_title"),
+ "The page title must match the embed title."
+ )
+
+ def test_hyperlinked_item(self):
+ """Test hyperlinking of tags works as intended."""
+ filler_before, filler_after = "empty filler text\n\n", "more\nfiller"
+ body = filler_before + "`!tags return`" + filler_after
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+
+ other_url = reverse("content:tag", kwargs={"location": "return"})
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ markdown.markdown(filler_before + f"[`!tags return`]({other_url})" + filler_after),
+ response.context.get("page")
+ )
+
+ def test_hyperlinked_group(self):
+ """Test hyperlinking with a group works as intended."""
+ Tag.objects.create(
+ name="example", body="!tags group-name grouped-tag", last_commit=self.commit
+ )
+ Tag.objects.create(name="grouped-tag", group="group-name")
+
+ other_url = reverse("content:tag", kwargs={"location": "group-name/grouped-tag"})
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ markdown.markdown(f"[!tags group-name grouped-tag]({other_url})"),
+ response.context.get("page")
+ )
+
+ def test_hyperlinked_extra_text(self):
+ """Test hyperlinking when a tag is followed by extra, unrelated text."""
+ Tag.objects.create(
+ name="example", body="!tags other unrelated text", last_commit=self.commit
+ )
+ Tag.objects.create(name="other")
+
+ other_url = reverse("content:tag", kwargs={"location": "other"})
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ markdown.markdown(f"[!tags other]({other_url}) unrelated text"),
+ response.context.get("page")
+ )
+
+ def test_tag_root_page(self):
+ """Test the root tag page which lists all tags."""
+ Tag.objects.create(name="tag-1", last_commit=self.commit)
+ Tag.objects.create(name="tag-2", last_commit=self.commit)
+ Tag.objects.create(name="tag-3", last_commit=self.commit)
+
+ response = self.client.get("/pages/tags/")
+ content = response.content.decode("utf-8")
+
+ self.assertTemplateUsed(response, "content/listing.html")
+ self.assertInHTML('<div class="level-left">Tags</div>', content)
+
+ for tag_number in range(1, 4):
+ self.assertIn(f"tag-{tag_number}</span>", content)
diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py
index f8496095..a7695a27 100644
--- a/pydis_site/apps/content/urls.py
+++ b/pydis_site/apps/content/urls.py
@@ -3,7 +3,7 @@ from pathlib import Path
from django_distill import distill_path
-from . import views
+from . import utils, views
app_name = "content"
@@ -29,15 +29,38 @@ def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[st
return results
-def get_all_pages() -> typing.Iterator[dict[str, str]]:
+DISTILL_RETURN = typing.Iterator[dict[str, str]]
+
+
+def get_all_pages() -> DISTILL_RETURN:
"""Yield a dict of all page categories."""
for location in __get_all_files(Path("pydis_site", "apps", "content", "resources")):
yield {"location": location}
+def get_all_tags() -> DISTILL_RETURN:
+ """Return all tag names and groups in static builds."""
+ # We instantiate the set with None here to make filtering it out later easier
+ # whether it was added in the loop or not
+ groups = {None}
+ for tag in utils.get_tags_static():
+ groups.add(tag.group)
+ yield {"location": (f"{tag.group}/" if tag.group else "") + tag.name}
+
+ groups.remove(None)
+ for group in groups:
+ yield {"location": group}
+
+
urlpatterns = [
distill_path("", views.PageOrCategoryView.as_view(), name='pages'),
distill_path(
+ "tags/<path:location>/",
+ views.TagView.as_view(),
+ name="tag",
+ distill_func=get_all_tags
+ ),
+ distill_path(
"<path:location>/",
views.PageOrCategoryView.as_view(),
name='page_category',
diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py
index d3f270ff..c12893ef 100644
--- a/pydis_site/apps/content/utils.py
+++ b/pydis_site/apps/content/utils.py
@@ -1,14 +1,41 @@
+import datetime
+import functools
+import json
+import tarfile
+import tempfile
+from io import BytesIO
from pathlib import Path
-from typing import Dict, Tuple
import frontmatter
+import httpx
import markdown
import yaml
from django.http import Http404
+from django.utils import timezone
from markdown.extensions.toc import TocExtension
+from pydis_site import settings
+from .models import Commit, Tag
-def get_category(path: Path) -> Dict[str, str]:
+TAG_CACHE_TTL = datetime.timedelta(hours=1)
+
+
+def github_client(**kwargs) -> httpx.Client:
+ """Get a client to access the GitHub API with important settings pre-configured."""
+ client = httpx.Client(
+ base_url=settings.GITHUB_API,
+ follow_redirects=True,
+ timeout=settings.TIMEOUT_PERIOD,
+ **kwargs
+ )
+ if settings.GITHUB_TOKEN: # pragma: no cover
+ if not client.headers.get("Authorization"):
+ client.headers = {"Authorization": f"token {settings.GITHUB_TOKEN}"}
+
+ return client
+
+
+def get_category(path: Path) -> dict[str, str]:
"""Load category information by name from _info.yml."""
if not path.is_dir():
raise Http404("Category not found.")
@@ -16,7 +43,7 @@ def get_category(path: Path) -> Dict[str, str]:
return yaml.safe_load(path.joinpath("_info.yml").read_text(encoding="utf-8"))
-def get_categories(path: Path) -> Dict[str, Dict]:
+def get_categories(path: Path) -> dict[str, dict]:
"""Get information for all categories."""
categories = {}
@@ -27,8 +54,253 @@ def get_categories(path: Path) -> Dict[str, Dict]:
return categories
-def get_category_pages(path: Path) -> Dict[str, Dict]:
+def get_tags_static() -> list[Tag]:
+ """
+ Fetch tag information in static builds.
+
+ This also includes some fake tags to preview the tag groups feature.
+ This will return a cached value, so it should only be used for static builds.
+ """
+ tags = fetch_tags()
+ for tag in tags[3:5]: # pragma: no cover
+ tag.group = "very-cool-group"
+ return tags
+
+
+def fetch_tags() -> list[Tag]:
+ """
+ Fetch tag data from the GitHub API.
+
+ The entire repository is downloaded and extracted locally because
+ getting file content would require one request per file, and can get rate-limited.
+ """
+ with github_client() as client:
+ # Grab metadata
+ metadata = client.get("/repos/python-discord/bot/contents/bot/resources")
+ metadata.raise_for_status()
+
+ hashes = {}
+ for entry in metadata.json():
+ if entry["type"] == "dir":
+ # Tag group
+ files = client.get(entry["url"])
+ files.raise_for_status()
+ files = files.json()
+ else:
+ files = [entry]
+
+ for file in files:
+ hashes[file["name"]] = file["sha"]
+
+ # Download the files
+ tar_file = client.get("/repos/python-discord/bot/tarball")
+ tar_file.raise_for_status()
+
+ tags = []
+ with tempfile.TemporaryDirectory() as folder:
+ with tarfile.open(fileobj=BytesIO(tar_file.content)) as repo:
+ included = []
+ for file in repo.getmembers():
+ if "/bot/resources/tags" in file.path:
+ included.append(file)
+ repo.extractall(folder, included)
+
+ for tag_file in Path(folder).rglob("*.md"):
+ name = tag_file.name
+ group = None
+ if tag_file.parent.name != "tags":
+ # Tags in sub-folders are considered part of a group
+ group = tag_file.parent.name
+
+ tags.append(Tag(
+ name=name.removesuffix(".md"),
+ sha=hashes[name],
+ group=group,
+ body=tag_file.read_text(encoding="utf-8"),
+ last_commit=None,
+ ))
+
+ return tags
+
+
+def set_tag_commit(tag: Tag) -> None:
+ """Fetch commit information from the API, and save it for the tag."""
+ if settings.STATIC_BUILD: # pragma: no cover
+ # Static builds request every page during build, which can ratelimit it.
+ # Instead, we return some fake data.
+ tag.last_commit = Commit(
+ sha="68da80efc00d9932a209d5cccd8d344cec0f09ea",
+ message="Initial Commit\n\nTHIS IS FAKE DEMO DATA",
+ date=datetime.datetime(2018, 2, 3, 12, 20, 26, tzinfo=datetime.timezone.utc),
+ authors=json.dumps([{"name": "Joseph", "email": "[email protected]"}]),
+ )
+ return
+
+ path = "/bot/resources/tags"
+ if tag.group:
+ path += f"/{tag.group}"
+ path += f"/{tag.name}.md"
+
+ # Fetch and set the commit
+ with github_client() as client:
+ data = client.get("/repos/python-discord/bot/commits", params={"path": path})
+ data.raise_for_status()
+ data = data.json()[0]
+
+ commit = data["commit"]
+ author, committer = commit["author"], commit["committer"]
+
+ date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT)
+ date = date.replace(tzinfo=datetime.timezone.utc)
+
+ if author["email"] == committer["email"]:
+ authors = [author]
+ else:
+ authors = [author, committer]
+
+ commit_obj, _ = Commit.objects.get_or_create(
+ sha=data["sha"],
+ message=commit["message"],
+ date=date,
+ authors=json.dumps(authors),
+ )
+ tag.last_commit = commit_obj
+ tag.save()
+
+
+def record_tags(tags: list[Tag]) -> None:
+ """Sync the database with an updated set of tags."""
+ # Remove entries which no longer exist
+ Tag.objects.exclude(name__in=[tag.name for tag in tags]).delete()
+
+ # Insert/update the tags
+ for new_tag in tags:
+ try:
+ old_tag = Tag.objects.get(name=new_tag.name)
+ except Tag.DoesNotExist:
+ # The tag is not in the database yet,
+ # pretend it's previous state is the current state
+ old_tag = new_tag
+
+ if old_tag.sha == new_tag.sha and old_tag.last_commit is not None:
+ # We still have an up-to-date commit entry
+ new_tag.last_commit = old_tag.last_commit
+
+ new_tag.save()
+
+ # Drop old, unused commits
+ Commit.objects.filter(tag__isnull=True).delete()
+
+
+def get_tags() -> list[Tag]:
+ """Return a list of all tags visible to the application, from the cache or API."""
+ if settings.STATIC_BUILD: # pragma: no cover
+ last_update = None
+ else:
+ last_update = (
+ Tag.objects.values_list("last_updated", flat=True)
+ .order_by("last_updated").first()
+ )
+
+ if last_update is None or timezone.now() >= (last_update + TAG_CACHE_TTL):
+ # Stale or empty cache
+ if settings.STATIC_BUILD: # pragma: no cover
+ tags = get_tags_static()
+ else:
+ tags = fetch_tags()
+ record_tags(tags)
+
+ return tags
+ else:
+ # Get tags from database
+ return list(Tag.objects.all())
+
+
+def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]:
+ """
+ Return a tag based on the search location.
+
+ If certain tag data is out of sync (for instance a commit date is missing),
+ an extra request will be made to sync the information.
+
+ The tag name and group must match. If only one argument is provided in the path,
+ it's assumed to either be a group name, or a no-group tag name.
+
+ If it's a group name, a list of tags which belong to it is returned.
+ """
+ path = path.split("/")
+ if len(path) == 2:
+ group, name = path
+ else:
+ name = path[0]
+ group = None
+
+ matches = []
+ for tag in get_tags():
+ if tag.name == name and tag.group == group:
+ if tag.last_commit is None and not skip_sync:
+ set_tag_commit(tag)
+ return tag
+ elif tag.group == name and group is None:
+ matches.append(tag)
+
+ if matches:
+ return matches
+
+ raise Tag.DoesNotExist()
+
+
+def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]:
+ """
+ Generate context data for `tags`, or all tags if None.
+
+ If `tags` is None, `get_tag` is used to populate the data.
+ If `collapse_groups` is True, tags with parent groups are not included in the list,
+ and instead the parent itself is included as a single entry with it's sub-tags
+ in the description.
+ """
+ if not tags:
+ tags = get_tags()
+
+ data = []
+ groups = {}
+
+ # Create all the metadata for the tags
+ for tag in tags:
+ if tag.group is None or not collapse_groups:
+ content = frontmatter.parse(tag.body)[1]
+ data.append({
+ "title": tag.name,
+ "description": markdown.markdown(content, extensions=["pymdownx.superfences"]),
+ "icon": "fas fa-tag",
+ })
+ else:
+ if tag.group not in groups:
+ groups[tag.group] = {
+ "title": tag.group,
+ "description": [tag.name],
+ "icon": "fas fa-tags",
+ }
+ else:
+ groups[tag.group]["description"].append(tag.name)
+
+ # Flatten group description into a single string
+ for group in groups.values():
+ # If the following string is updated, make sure to update it in the frontend JS as well
+ group["description"] = "Contains the following tags: " + ", ".join(group["description"])
+ data.append(group)
+
+ # Sort the tags, and return them in the proper format
+ return {tag["title"]: tag for tag in sorted(data, key=lambda tag: tag["title"].casefold())}
+
+
+def get_category_pages(path: Path) -> dict[str, dict]:
"""Get all page names and their metadata at a category path."""
+ # Special handling for tags
+ if path == Path(__file__).parent / "resources/tags":
+ return get_tag_category(collapse_groups=True)
+
pages = {}
for item in path.glob("*.md"):
@@ -39,7 +311,7 @@ def get_category_pages(path: Path) -> Dict[str, Dict]:
return pages
-def get_page(path: Path) -> Tuple[str, Dict]:
+def get_page(path: Path) -> tuple[str, dict]:
"""Get one specific page."""
if not path.is_file():
raise Http404("Page not found.")
diff --git a/pydis_site/apps/content/views/__init__.py b/pydis_site/apps/content/views/__init__.py
index 70ea1c7a..a969b1dc 100644
--- a/pydis_site/apps/content/views/__init__.py
+++ b/pydis_site/apps/content/views/__init__.py
@@ -1,3 +1,4 @@
from .page_category import PageOrCategoryView
+from .tags import TagView
-__all__ = ["PageOrCategoryView"]
+__all__ = ["PageOrCategoryView", "TagView"]
diff --git a/pydis_site/apps/content/views/page_category.py b/pydis_site/apps/content/views/page_category.py
index 356eb021..062c2bc1 100644
--- a/pydis_site/apps/content/views/page_category.py
+++ b/pydis_site/apps/content/views/page_category.py
@@ -1,4 +1,3 @@
-import typing as t
from pathlib import Path
import frontmatter
@@ -6,7 +5,7 @@ from django.conf import settings
from django.http import Http404, HttpRequest, HttpResponse
from django.views.generic import TemplateView
-from pydis_site.apps.content import utils
+from pydis_site.apps.content import models, utils
class PageOrCategoryView(TemplateView):
@@ -25,7 +24,7 @@ class PageOrCategoryView(TemplateView):
return super().dispatch(request, *args, **kwargs)
- def get_template_names(self) -> t.List[str]:
+ def get_template_names(self) -> list[str]:
"""Checks if the view uses the page template or listing template."""
if self.page_path.is_file():
template_name = "content/page.html"
@@ -36,7 +35,7 @@ class PageOrCategoryView(TemplateView):
return [template_name]
- def get_context_data(self, **kwargs) -> t.Dict[str, t.Any]:
+ def get_context_data(self, **kwargs) -> dict[str, any]:
"""Assign proper context variables based on what resource user requests."""
context = super().get_context_data(**kwargs)
@@ -73,7 +72,7 @@ class PageOrCategoryView(TemplateView):
return context
@staticmethod
- def _get_page_context(path: Path) -> t.Dict[str, t.Any]:
+ def _get_page_context(path: Path) -> dict[str, any]:
page, metadata = utils.get_page(path)
return {
"page": page,
@@ -84,7 +83,7 @@ class PageOrCategoryView(TemplateView):
}
@staticmethod
- def _get_category_context(path: Path) -> t.Dict[str, t.Any]:
+ def _get_category_context(path: Path) -> dict[str, any]:
category = utils.get_category(path)
return {
"categories": utils.get_categories(path),
@@ -92,4 +91,7 @@ class PageOrCategoryView(TemplateView):
"page_title": category["title"],
"page_description": category["description"],
"icon": category.get("icon"),
+ "app_name": "content:page_category",
+ "is_tag_listing": "/resources/tags" in path.as_posix(),
+ "tag_url": models.Tag.URL_BASE,
}
diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py
new file mode 100644
index 00000000..4f4bb5a2
--- /dev/null
+++ b/pydis_site/apps/content/views/tags.py
@@ -0,0 +1,124 @@
+import re
+import typing
+
+import frontmatter
+import markdown
+from django.conf import settings
+from django.http import Http404
+from django.urls import reverse
+from django.views.generic import TemplateView
+
+from pydis_site.apps.content import utils
+from pydis_site.apps.content.models import Tag
+
+# The following regex tries to parse a tag command
+# It'll read up to two words seperated by spaces
+# If the command does not include a group, the tag name will be in the `first` group
+# If there's a second word after the command, or if there's a tag group, extra logic
+# is necessary to determine whether it's a tag with a group, or a tag with text after it
+COMMAND_REGEX = re.compile(r"`*!tags? (?P<first>[\w-]+)(?P<second> [\w-]+)?`*")
+
+
+class TagView(TemplateView):
+ """Handles tag pages."""
+
+ tag: typing.Union[Tag, list[Tag]]
+ is_group: bool
+
+ def setup(self, *args, **kwargs) -> None:
+ """Look for a tag, and configure the view."""
+ super().setup(*args, **kwargs)
+
+ try:
+ self.tag = utils.get_tag(kwargs.get("location"))
+ self.is_group = isinstance(self.tag, list)
+ except Tag.DoesNotExist:
+ raise Http404
+
+ def get_template_names(self) -> list[str]:
+ """Either return the tag page template, or the listing."""
+ if self.is_group:
+ template_name = "content/listing.html"
+ else:
+ template_name = "content/tag.html"
+
+ return [template_name]
+
+ def get_context_data(self, **kwargs) -> dict:
+ """Get the relevant context for this tag page or group."""
+ context = super().get_context_data(**kwargs)
+ context["breadcrumb_items"] = [{
+ "name": utils.get_category(settings.CONTENT_PAGES_PATH / location)["title"],
+ "path": location,
+ } for location in (".", "tags")]
+
+ if self.is_group:
+ self._set_group_context(context, self.tag)
+ else:
+ self._set_tag_context(context, self.tag)
+
+ return context
+
+ @staticmethod
+ def _set_tag_context(context: dict[str, any], tag: Tag) -> None:
+ """Update the context with the information for a tag page."""
+ context.update({
+ "page_title": tag.name,
+ "tag": tag,
+ })
+
+ if tag.group:
+ # Add group names to the breadcrumbs
+ context["breadcrumb_items"].append({
+ "name": tag.group,
+ "path": f"tags/{tag.group}",
+ })
+
+ # Clean up tag body
+ body = frontmatter.parse(tag.body)
+ content = body[1]
+
+ # Check for tags which can be hyperlinked
+ def sub(match: re.Match) -> str:
+ first, second = match.groups()
+ location = first
+ text, extra = match.group(), ""
+
+ if second is not None:
+ # Possibly a tag group
+ try:
+ new_location = f"{first}/{second.strip()}"
+ utils.get_tag(new_location, skip_sync=True)
+ location = new_location
+ except Tag.DoesNotExist:
+ # Not a group, remove the second argument from the link
+ extra = text[text.find(second):]
+ text = text[:text.find(second)]
+
+ link = reverse("content:tag", kwargs={"location": location})
+ return f"[{text}]({link}){extra}"
+ content = COMMAND_REGEX.sub(sub, content)
+
+ # Add support for some embed elements
+ if embed := body[0].get("embed"):
+ context["page_title"] = embed["title"]
+ if image := embed.get("image"):
+ content = f"![{embed['title']}]({image['url']})\n\n" + content
+
+ # Insert the content
+ context["page"] = markdown.markdown(content, extensions=["pymdownx.superfences"])
+
+ @staticmethod
+ def _set_group_context(context: dict[str, any], tags: list[Tag]) -> None:
+ """Update the context with the information for a group of tags."""
+ group = tags[0].group
+ context.update({
+ "categories": {},
+ "pages": utils.get_tag_category(tags, collapse_groups=False),
+ "page_title": group,
+ "icon": "fab fa-tags",
+ "is_tag_listing": True,
+ "app_name": "content:tag",
+ "path": f"{group}/",
+ "tag_url": f"{tags[0].URL_BASE}/{group}"
+ })
diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py
index 4007eded..a963f733 100644
--- a/pydis_site/apps/home/tests/test_repodata_helpers.py
+++ b/pydis_site/apps/home/tests/test_repodata_helpers.py
@@ -42,7 +42,7 @@ class TestRepositoryMetadataHelpers(TestCase):
metadata = self.home_view._get_repo_data()
self.assertIsInstance(metadata[0], RepositoryMetadata)
- self.assertEquals(len(metadata), len(self.home_view.repos))
+ self.assertEqual(len(metadata), len(self.home_view.repos))
def test_returns_cached_metadata(self):
"""Test if the _get_repo_data helper returns cached data when available."""
@@ -82,7 +82,7 @@ class TestRepositoryMetadataHelpers(TestCase):
repo = self.home_view.repos[0]
self.assertIsInstance(api_data, dict)
- self.assertEquals(len(api_data), len(self.home_view.repos))
+ self.assertEqual(len(api_data), len(self.home_view.repos))
self.assertIn(repo, api_data.keys())
self.assertIn("stargazers_count", api_data[repo])
@@ -126,7 +126,7 @@ class TestRepositoryMetadataHelpers(TestCase):
with self.assertLogs():
metadata = self.home_view._get_repo_data()
- self.assertEquals(len(metadata), 0)
+ self.assertEqual(len(metadata), 0)
def test_cleans_up_stale_metadata(self):
"""Tests that we clean up stale metadata when we start the HomeView."""
diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py
index 9bb1f8fd..8a165682 100644
--- a/pydis_site/apps/home/views/home.py
+++ b/pydis_site/apps/home/views/home.py
@@ -32,9 +32,7 @@ class HomeView(View):
def __init__(self):
"""Clean up stale RepositoryMetadata."""
- self._static_build = settings.env("STATIC_BUILD")
-
- if not self._static_build:
+ if not settings.STATIC_BUILD:
RepositoryMetadata.objects.exclude(repo_name__in=self.repos).delete()
# If no token is defined (for example in local development), then
@@ -94,7 +92,7 @@ class HomeView(View):
def _get_repo_data(self) -> List[RepositoryMetadata]:
"""Build a list of RepositoryMetadata objects that we can use to populate the front page."""
# First off, load the timestamp of the least recently updated entry.
- if self._static_build:
+ if settings.STATIC_BUILD:
last_update = None
else:
last_update = (
@@ -121,7 +119,7 @@ class HomeView(View):
for api_data in api_repositories.values()
]
- if settings.env("STATIC_BUILD"):
+ if settings.STATIC_BUILD:
return data
else:
return RepositoryMetadata.objects.bulk_create(data)
diff --git a/pydis_site/apps/redirect/urls.py b/pydis_site/apps/redirect/urls.py
index f7ddf45b..067cccc3 100644
--- a/pydis_site/apps/redirect/urls.py
+++ b/pydis_site/apps/redirect/urls.py
@@ -3,6 +3,7 @@ import re
import yaml
from django import conf
+from django.http import HttpResponse
from django.urls import URLPattern, path
from django_distill import distill_path
@@ -31,7 +32,7 @@ class Redirect:
def map_redirect(name: str, data: Redirect) -> list[URLPattern]:
"""Return a pattern using the Redirects app, or a static HTML redirect for static builds."""
- if not settings.env("STATIC_BUILD"):
+ if not settings.STATIC_BUILD:
# Normal dynamic redirect
return [path(
data.original_path,
@@ -53,7 +54,7 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]:
class RedirectFunc:
def __init__(self, new_url: str, _name: str):
- self.result = REDIRECT_TEMPLATE.format(url=new_url)
+ self.result = HttpResponse(REDIRECT_TEMPLATE.format(url=new_url))
self.__qualname__ = _name
def __call__(self, *args, **kwargs):
@@ -95,7 +96,7 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]:
return [distill_path(
data.original_path,
- lambda *args: REDIRECT_TEMPLATE.format(url=new_redirect),
+ lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)),
name=name,
)]
diff --git a/pydis_site/settings.py b/pydis_site/settings.py
index bbf1d3aa..e9e0ba67 100644
--- a/pydis_site/settings.py
+++ b/pydis_site/settings.py
@@ -38,17 +38,22 @@ GITHUB_API = "https://api.github.com"
GITHUB_TOKEN = env("GITHUB_TOKEN")
GITHUB_APP_ID = env("GITHUB_APP_ID")
GITHUB_APP_KEY = env("GITHUB_APP_KEY")
+GITHUB_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
+"""The datetime string format GitHub uses."""
+
+STATIC_BUILD: bool = env("STATIC_BUILD")
if GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file():
# Allow the OAuth key to be loaded from a file
GITHUB_APP_KEY = key_file.read_text(encoding="utf-8")
-sentry_sdk.init(
- dsn=env('SITE_DSN'),
- integrations=[DjangoIntegration()],
- send_default_pii=True,
- release=f"site@{GIT_SHA}"
-)
+if not STATIC_BUILD:
+ sentry_sdk.init(
+ dsn=env('SITE_DSN'),
+ integrations=[DjangoIntegration()],
+ send_default_pii=True,
+ release=f"site@{GIT_SHA}"
+ )
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -99,7 +104,7 @@ else:
NON_STATIC_APPS = [
'pydis_site.apps.api',
'pydis_site.apps.staff',
-] if not env("STATIC_BUILD") else []
+] if not STATIC_BUILD else []
INSTALLED_APPS = [
*NON_STATIC_APPS,
@@ -128,25 +133,29 @@ INSTALLED_APPS = [
if not env("BUILDING_DOCKER"):
INSTALLED_APPS.append("django_prometheus")
-NON_STATIC_MIDDLEWARE = [
- 'django_prometheus.middleware.PrometheusBeforeMiddleware',
-] if not env("STATIC_BUILD") else []
-
-# Ensure that Prometheus middlewares are first and last here.
-MIDDLEWARE = [
- *NON_STATIC_MIDDLEWARE,
-
- 'django.middleware.security.SecurityMiddleware',
- 'whitenoise.middleware.WhiteNoiseMiddleware',
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
- 'django.middleware.clickjacking.XFrameOptionsMiddleware',
-
- 'django_prometheus.middleware.PrometheusAfterMiddleware'
-]
+if STATIC_BUILD:
+ # The only middleware required during static builds
+ MIDDLEWARE = [
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ ]
+else:
+ # Ensure that Prometheus middlewares are first and last here.
+ MIDDLEWARE = [
+ 'django_prometheus.middleware.PrometheusBeforeMiddleware',
+
+ 'django.middleware.security.SecurityMiddleware',
+ 'whitenoise.middleware.WhiteNoiseMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+
+ 'django_prometheus.middleware.PrometheusAfterMiddleware'
+ ]
ROOT_URLCONF = 'pydis_site.urls'
@@ -175,7 +184,7 @@ WSGI_APPLICATION = 'pydis_site.wsgi.application'
DATABASES = {
'default': env.db(),
'metricity': env.db('METRICITY_DB_URL'),
-} if not env("STATIC_BUILD") else {}
+} if not STATIC_BUILD else {}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
@@ -200,7 +209,6 @@ AUTH_PASSWORD_VALIDATORS = [
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
-USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
diff --git a/pydis_site/static/css/content/color.css b/pydis_site/static/css/content/color.css
new file mode 100644
index 00000000..f4801c28
--- /dev/null
+++ b/pydis_site/static/css/content/color.css
@@ -0,0 +1,7 @@
+.content .fa-github {
+ color: black;
+}
+
+.content .fa-github:hover {
+ color: #7289DA;
+}
diff --git a/pydis_site/static/css/content/tag.css b/pydis_site/static/css/content/tag.css
new file mode 100644
index 00000000..79795f9e
--- /dev/null
+++ b/pydis_site/static/css/content/tag.css
@@ -0,0 +1,13 @@
+.content a * {
+ /* This is the original color, but propagated down the chain */
+ /* which allows for elements inside links, such as codeblocks */
+ color: #7289DA;
+}
+
+.content a *:hover {
+ color: dimgray;
+}
+
+span.update-time {
+ text-decoration: black underline dotted;
+}
diff --git a/pydis_site/static/images/content/fix-ssl-certificate/pem.png b/pydis_site/static/images/content/fix-ssl-certificate/pem.png
new file mode 100644
index 00000000..face520f
--- /dev/null
+++ b/pydis_site/static/images/content/fix-ssl-certificate/pem.png
Binary files differ
diff --git a/pydis_site/static/images/content/regenerating_token.jpg b/pydis_site/static/images/content/regenerating_token.jpg
new file mode 100644
index 00000000..7b2588dc
--- /dev/null
+++ b/pydis_site/static/images/content/regenerating_token.jpg
Binary files differ
diff --git a/pydis_site/static/js/content/listing.js b/pydis_site/static/js/content/listing.js
new file mode 100644
index 00000000..4b722632
--- /dev/null
+++ b/pydis_site/static/js/content/listing.js
@@ -0,0 +1,41 @@
+/**
+ * Trim a tag listing to only show a few lines of content.
+ */
+function trimTag() {
+ const containers = document.getElementsByClassName("tag-container");
+ for (const container of containers) {
+ if (container.textContent.startsWith("Contains the following tags:")) {
+ // Tag group, no need to trim
+ continue;
+ }
+
+ // Remove every element after the first two paragraphs
+ while (container.children.length > 2) {
+ container.removeChild(container.lastChild);
+ }
+
+ // Trim down the elements if they are too long
+ const containerLength = container.textContent.length;
+ if (containerLength > 300) {
+ if (containerLength - container.firstChild.textContent.length > 300) {
+ // The first element alone takes up more than 300 characters
+ container.removeChild(container.lastChild);
+ }
+
+ let last = container.lastChild.lastChild;
+ while (container.textContent.length > 300 && container.lastChild.childNodes.length > 0) {
+ last = container.lastChild.lastChild;
+ last.remove();
+ }
+
+ if (container.textContent.length > 300 && (last instanceof HTMLElement && last.tagName !== "CODE")) {
+ // Add back the final element (up to a period if possible)
+ const stop = last.textContent.indexOf(".");
+ last.textContent = last.textContent.slice(0, stop > 0 ? stop + 1: null);
+ container.lastChild.appendChild(last);
+ }
+ }
+ }
+}
+
+trimTag();
diff --git a/pydis_site/templates/base/navbar.html b/pydis_site/templates/base/navbar.html
index d7fb4f4c..931693c8 100644
--- a/pydis_site/templates/base/navbar.html
+++ b/pydis_site/templates/base/navbar.html
@@ -67,6 +67,9 @@
<a class="navbar-item" href="{% url "resources:index" %}">
Resources
</a>
+ <a class="navbar-item" href="{% url "content:pages" %}">
+ Content
+ </a>
<a class="navbar-item" href="{% url "events:index" %}">
Events
</a>
diff --git a/pydis_site/templates/content/base.html b/pydis_site/templates/content/base.html
index 4a19a275..2fd721a3 100644
--- a/pydis_site/templates/content/base.html
+++ b/pydis_site/templates/content/base.html
@@ -8,6 +8,10 @@
<meta property="og:description" content="{{ page_description }}" />
<link rel="stylesheet" href="{% static "css/content/page.css" %}">
<link rel="stylesheet" href="{% static "css/collapsibles.css" %}">
+ <link rel="stylesheet"
+ href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/styles/atom-one-dark.min.css">
+ <script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/highlight.min.js"></script>
+ <script>hljs.highlightAll();</script>
<script src="{% static "js/collapsibles.js" %}"></script>
{% endblock %}
@@ -35,7 +39,7 @@
<section class="section">
<div class="container">
<div class="content">
- <h1 class="title">{{ page_title }}</h1>
+ <h1 class="title">{% block title_element %}{{ page_title }}{% endblock %}</h1>
{% block page_content %}{% endblock %}
</div>
</div>
diff --git a/pydis_site/templates/content/listing.html b/pydis_site/templates/content/listing.html
index ef0ef919..934b95f6 100644
--- a/pydis_site/templates/content/listing.html
+++ b/pydis_site/templates/content/listing.html
@@ -1,6 +1,22 @@
+{# Base navigation screen for resources #}
{% extends 'content/base.html' %}
+{% load static %}
+
+{# Show a GitHub button on tag pages #}
+{% block title_element %}
+{% if is_tag_listing %}
+ <link rel="stylesheet" href="{% static "css/content/color.css" %}">
+ <div class="level">
+ <div class="level-left">{{ block.super }}</div>
+ <div class="level-right">
+ <a class="level-item fab fa-github" href="{{ tag_url }}"></a>
+ </div>
+ </div>
+{% endif %}
+{% endblock %}
{% block page_content %}
+ {# Nested Categories #}
{% for category, data in categories.items %}
<div class="box" style="max-width: 800px;">
<span class="icon is-size-4 is-medium">
@@ -13,15 +29,22 @@
<p class="is-italic">{{ data.description }}</p>
</div>
{% endfor %}
+
+ {# Single Pages #}
{% for page, data in pages.items %}
<div class="box" style="max-width: 800px;">
<span class="icon is-size-4 is-medium">
<i class="{{ data.icon|default:"fab fa-python" }} is-size-3 is-black has-icon-padding" aria-hidden="true"></i>
</span>
- <a href="{% url "content:page_category" location=path|add:page %}">
+ <a href="{% url app_name location=path|add:page %}">
<span class="is-size-4 has-text-weight-bold">{{ data.title }}</span>
</a>
- <p class="is-italic">{{ data.description }}</p>
+ {% if is_tag_listing %}
+ <div class="tag-container">{{ data.description | safe }}</div>
+ {% else %}
+ <p class="is-italic">{{ data.description }}</p>
+ {% endif %}
</div>
{% endfor %}
+ <script src="{% static 'js/content/listing.js' %}"></script>
{% endblock %}
diff --git a/pydis_site/templates/content/page.html b/pydis_site/templates/content/page.html
index 759286f6..679ecec6 100644
--- a/pydis_site/templates/content/page.html
+++ b/pydis_site/templates/content/page.html
@@ -1,13 +1,5 @@
{% extends 'content/base.html' %}
-{% block head %}
- {{ block.super }}
- <link rel="stylesheet"
- href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/styles/atom-one-dark.min.css">
- <script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/highlight.min.js"></script>
- <script>hljs.initHighlightingOnLoad();</script>
-{% endblock %}
-
{% block page_content %}
{% if relevant_links or toc %}
<div class="columns is-variable is-8">
diff --git a/pydis_site/templates/content/tag.html b/pydis_site/templates/content/tag.html
new file mode 100644
index 00000000..fa9e44f5
--- /dev/null
+++ b/pydis_site/templates/content/tag.html
@@ -0,0 +1,40 @@
+{% extends "content/page.html" %}
+{% load static %}
+
+{% block head %}
+ {{ block.super }}
+ <link rel="stylesheet" href="{% static 'css/content/color.css' %}"/>
+ <link rel="stylesheet" href="{% static 'css/content/tag.css' %}"/>
+ <title>{{ tag.name }}</title>
+{% endblock %}
+
+{% block title_element %}
+ <div class="level mb-2">
+ <div class="level-left">{{ block.super }}</div>
+ <div class="level-right">
+ <a class="level-item fab fa-github" href="{{ tag.url }}"></a>
+ </div>
+ </div>
+
+ <div class="dropdown is-size-6 is-hoverable">
+ <div class="dropdown-trigger ">
+ <a aria-haspopup="menu" href="{{ tag.last_commit.url }}">
+ <span class="update-time">
+ Last Updated: {{ tag.last_commit.date | date:"F j, Y g:i A e" }}
+ </span>
+ </a>
+ </div>
+ <div class="dropdown-menu">
+ <div class="dropdown-content">
+ <div class="dropdown-item">Last edited by:</div>
+ {% for user in tag.last_commit.format_authors %}
+ <div class="dropdown-item">{{ user }}</div>
+ {% endfor %}
+ <div class="dropdown-divider"></div>
+ {% for line in tag.last_commit.lines %}
+ <div class="dropdown-item">{{ line }}</div>
+ {% endfor %}
+ </div>
+ </div>
+ </div>
+{% endblock %}
diff --git a/pydis_site/templates/events/pages/code-jams/9/_index.html b/pydis_site/templates/events/pages/code-jams/9/_index.html
index 7c2617d7..ca7c4f90 100644
--- a/pydis_site/templates/events/pages/code-jams/9/_index.html
+++ b/pydis_site/templates/events/pages/code-jams/9/_index.html
@@ -27,8 +27,8 @@
<li><strike>Wednesday, July 6 - Voting for the theme opens</strike></li>
<li><strike>Wednesday, July 13 - The Qualifier closes</strike></li>
<li><strike>Thursday, July 21 - Code Jam Begins</strike></li>
- <li>Sunday, July 31 - Coding portion of the jam ends</li>
- <li>Sunday, August 4 - Code Jam submissions are closed</li>
+ <li><strike>Sunday, July 31 - Coding portion of the jam ends</strike></li>
+ <li><strike>Sunday, August 4 - Code Jam submissions are closed</strike></li>
</ul>
<h3 id="qualifier"><a href="#how-to-join">The Qualifier</a></h3>
diff --git a/pydis_site/urls.py b/pydis_site/urls.py
index 6cd31f26..0f2f6aeb 100644
--- a/pydis_site/urls.py
+++ b/pydis_site/urls.py
@@ -12,7 +12,7 @@ NON_STATIC_PATTERNS = [
path('pydis-api/', include('pydis_site.apps.api.urls', namespace='internal_api')),
path('', include('django_prometheus.urls')),
-] if not settings.env("STATIC_BUILD") else []
+] if not settings.STATIC_BUILD else []
urlpatterns = (
@@ -29,7 +29,7 @@ urlpatterns = (
)
-if not settings.env("STATIC_BUILD"):
+if not settings.STATIC_BUILD:
urlpatterns += (
path('staff/', include('pydis_site.apps.staff.urls', namespace='staff')),
)
diff --git a/pyproject.toml b/pyproject.toml
index 037f837c..ecd71dac 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -6,42 +6,41 @@ authors = ["Python Discord <[email protected]>"]
license = "MIT"
[tool.poetry.dependencies]
-python = "3.9.*"
-django = "~=4.0"
-django-environ = "~=0.4.5"
-django-filter = "~=21.1"
-djangorestframework = "~=3.13"
-psycopg2-binary = "~=2.8.0"
-django-simple-bulma = "~=2.4"
-whitenoise = "~=5.0"
-httpx = "~=0.23.0"
-pyyaml = "~=5.1"
-gunicorn = "~=20.0.4"
-sentry-sdk = "~=0.19"
-markdown = "~=3.3.4"
-python-frontmatter = "~=1.0"
-django-prometheus = "~=2.1"
-django-distill = "~=2.9.0"
-PyJWT = {version = "~=2.4.0", extras = ["crypto"]}
+python = "3.10.*"
+django = "4.1.3"
+django-environ = "0.9.0"
+django-filter = "22.1"
+djangorestframework = "3.14.0"
+psycopg2-binary = "2.9.5"
+django-simple-bulma = "2.5.0"
+whitenoise = "6.2.0"
+httpx = "0.23.1"
+pyyaml = "6.0"
+gunicorn = "20.1.0"
+sentry-sdk = "1.11.0"
+markdown = "3.4.1"
+python-frontmatter = "1.0.0"
+django-prometheus = "2.2.0"
+django-distill = "3.0.1"
+PyJWT = {version = "2.6.0", extras = ["crypto"]}
+pymdown-extensions = "9.8"
[tool.poetry.dev-dependencies]
-coverage = "~=5.0"
-flake8 = "~=3.7"
-flake8-annotations = "~=2.0"
-flake8-bandit = "~=3.0"
-flake8-bugbear = "~=20.1"
-flake8-docstrings = "~=1.5"
-flake8-import-order = "~=0.18"
-flake8-string-format = "~=0.3"
-flake8-tidy-imports = "~=4.0"
-flake8-todo = "~=0.7"
-mccabe = "~=0.6.1"
-pep8-naming = "~=0.9"
-pre-commit = "~=2.1"
-pyfakefs = "~=4.5"
-coveralls = "~=2.1"
-taskipy = "~=1.7.0"
-python-dotenv = "~=0.17.1"
+coverage = "6.5.0"
+flake8 = "5.0.4"
+flake8-annotations = "2.9.1"
+flake8-bandit = "4.1.1"
+flake8-bugbear = "22.10.27"
+flake8-docstrings = "1.6.0"
+flake8-import-order = "0.18.1"
+flake8-tidy-imports = "4.8.0"
+flake8-string-format = "0.3.0"
+flake8-todo = "0.7"
+pep8-naming = "0.13.2"
+pre-commit = "2.20.0"
+pyfakefs = "5.0.0"
+taskipy = "1.10.3"
+python-dotenv = "0.21.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py
index f3a53f72..36520c28 100644
--- a/static-builds/netlify_build.py
+++ b/static-builds/netlify_build.py
@@ -28,6 +28,11 @@ def raise_response(response: httpx.Response) -> None:
if __name__ == "__main__":
+ client = httpx.Client(
+ follow_redirects=True,
+ timeout=3 * 60,
+ )
+
owner, repo = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2]
download_url = "/".join([
@@ -40,19 +45,19 @@ if __name__ == "__main__":
os.getenv("ARTIFACT_NAME"),
])
print(f"Fetching download URL from {download_url}")
- response = httpx.get(download_url, follow_redirects=True)
+ response = client.get(download_url)
raise_response(response)
# The workflow is still pending, retry in a bit
while response.status_code == 202:
print(f"{response.json()['error']}. Retrying in 10 seconds.")
time.sleep(10)
- response = httpx.get(download_url, follow_redirects=True)
+ response = client.get(download_url)
raise_response(response)
url = response.json()["url"]
print(f"Downloading build from {url}")
- zipped_content = httpx.get(url, follow_redirects=True, timeout=3 * 60)
+ zipped_content = client.get(url)
zipped_content.raise_for_status()
zip_file = Path("temp.zip")