diff options
author | 2022-11-20 07:12:25 -0800 | |
---|---|---|
committer | 2022-11-20 07:12:25 -0800 | |
commit | 6d8cafc45438f63918ccd300686e368354e1e4f0 (patch) | |
tree | 39fd46a19381b0f358867c952e7a4b8ab43ab990 | |
parent | Rename file for consistency (diff) | |
parent | Merge pull request #789 from python-discord/messages-in-past-n-days-endpoint (diff) |
Merge branch 'main' into main
95 files changed, 3174 insertions, 2028 deletions
diff --git a/.coveragerc b/.coveragerc index b4a9bbe4..039654db 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,7 +2,6 @@ branch = true source = pydis_site - pydis_site/apps/admin pydis_site/apps/api pydis_site/apps/home pydis_site/apps/staff diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..b38df29f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 0e315327..57712dc7 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -29,7 +29,6 @@ jobs: uses: actions/checkout@v2 with: repository: python-discord/kubernetes - token: ${{ secrets.REPO_TOKEN }} - name: Authenticate with Kubernetes uses: azure/k8s-set-context@v1 diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index f97cd758..f82e1d4f 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -10,76 +10,26 @@ on: jobs: lint-test: runs-on: ubuntu-latest - env: - # Configure pip to cache dependencies and do a user install - PIP_NO_CACHE_DIR: false - PIP_USER: 1 - - # Make sure package manager does not use virtualenv - POETRY_VIRTUALENVS_CREATE: false - - # Specify explicit paths for python dependencies and the pre-commit - # environment so we know which directories to cache - POETRY_CACHE_DIR: ${{ github.workspace }}/.cache/py-user-base - PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base - PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache steps: - - name: Add custom PYTHONUSERBASE to PATH - run: echo '${{ env.PYTHONUSERBASE }}/bin/' >> $GITHUB_PATH - - name: Checkout repository uses: actions/checkout@v2 - - name: Setup python - id: python - uses: actions/setup-python@v2 + - name: Install Python Dependencies + uses: HassanAbouelela/actions/setup-python@setup-python_v1.3.1 with: - python-version: '3.9' + dev: true + python_version: '3.10' # Start the database early to give it a chance to get ready before # we start running tests. - name: Run database using docker-compose run: docker-compose run -d -p 7777:5432 --name pydis_web postgres - # This step caches our Python dependencies. To make sure we - # only restore a cache when the dependencies, the python version, - # the runner operating system, and the dependency location haven't - # changed, we create a cache key that is a composite of those states. - # - # Only when the context is exactly the same, we will restore the cache. - - name: Python Dependency Caching - uses: actions/cache@v2 - id: python_cache - with: - path: ${{ env.PYTHONUSERBASE }} - key: "python-0-${{ runner.os }}-${{ env.PYTHONUSERBASE }}-\ - ${{ steps.python.outputs.python-version }}-\ - ${{ hashFiles('./pyproject.toml', './poetry.lock') }}" - - # Install our dependencies if we did not restore a dependency cache - - name: Install dependencies using poetry - if: steps.python_cache.outputs.cache-hit != 'true' - run: | - pip install poetry - poetry install - - # This step caches our pre-commit environment. To make sure we - # do create a new environment when our pre-commit setup changes, - # we create a cache key based on relevant factors. - - name: Pre-commit Environment Caching - uses: actions/cache@v2 - with: - path: ${{ env.PRE_COMMIT_HOME }} - key: "precommit-0-${{ runner.os }}-${{ env.PRE_COMMIT_HOME }}-\ - ${{ steps.python.outputs.python-version }}-\ - ${{ hashFiles('./.pre-commit-config.yaml') }}" - # We will not run `flake8` here, as we will use a separate flake8 - # action. As pre-commit does not support user installs, we set - # PIP_USER=0 to not do a user install. + # action. - name: Run pre-commit hooks - run: export PIP_USER=0; SKIP=flake8 pre-commit run --all-files + run: SKIP=flake8 pre-commit run --all-files # Run flake8 and have it format the linting errors in the format of # the GitHub Workflow command to register error annotations. This @@ -97,20 +47,22 @@ jobs: - name: Migrations and run tests with coverage.py run: | python manage.py makemigrations --check - python manage.py migrate coverage run manage.py test --no-input coverage report -m + coverage lcov env: CI: True DATABASE_URL: postgres://pysite:pysite@localhost:7777/pysite METRICITY_DB_URL: postgres://pysite:pysite@localhost:7777/metricity + PYTHONWARNINGS: error # This step will publish the coverage reports coveralls.io and - # print a "job" link in the output of the GitHub Action - - name: Publish coverage report to coveralls.io - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: coveralls + # link the report to the commit + - name: Publish Coverage Report + uses: coverallsapp/[email protected] + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + path-to-lcov: ./coverage.lcov - name: Tear down docker-compose containers run: docker-compose stop @@ -36,6 +36,7 @@ pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports +*.lcov htmlcov/ .tox/ .coverage @@ -132,3 +133,6 @@ log.* # Mac/OSX .DS_Store + +# Private keys +*.pem diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 25781752..b2a03559 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,4 +22,3 @@ repos: entry: poetry run flake8 language: system types: [python] - require_serial: true @@ -1,21 +1,14 @@ -FROM --platform=linux/amd64 python:3.9-slim-buster +FROM ghcr.io/chrislovering/python-poetry-base:3.10-slim # Allow service to handle stops gracefully STOPSIGNAL SIGQUIT -# Set pip to have cleaner logs and no saved cache -ENV PIP_NO_CACHE_DIR=false \ - POETRY_VIRTUALENVS_CREATE=false - -# Install poetry -RUN pip install -U poetry - # Copy the project files into working directory WORKDIR /app # Install project dependencies COPY pyproject.toml poetry.lock ./ -RUN poetry install --no-dev +RUN poetry install --without dev # Set Git SHA environment variable ARG git_sha="development" @@ -34,14 +27,14 @@ RUN \ SECRET_KEY=dummy_value \ DATABASE_URL=postgres://localhost \ METRICITY_DB_URL=postgres://localhost \ - python manage.py collectstatic --noinput --clear + poetry run python manage.py collectstatic --noinput --clear # Build static files if we are doing a static build ARG STATIC_BUILD=false RUN if [ $STATIC_BUILD = "TRUE" ] ; \ - then SECRET_KEY=dummy_value python manage.py distill-local build --traceback --force ; \ + then SECRET_KEY=dummy_value poetry run python manage.py distill-local build --traceback --force ; \ fi # Run web server through custom manager -ENTRYPOINT ["python", "manage.py"] +ENTRYPOINT ["poetry", "run", "python", "manage.py"] CMD ["run"] diff --git a/docker-compose.yml b/docker-compose.yml index eb987624..61554ae4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,12 +8,12 @@ # and additionally use the Django development server which is # unsuitable for production. -version: "3.6" +version: "3.8" services: postgres: - image: postgres:13-alpine + image: postgres:15-alpine ports: - - "127.0.0.1:7777:5432" + - "7777:5432" environment: POSTGRES_DB: pysite POSTGRES_PASSWORD: pysite @@ -38,7 +38,7 @@ services: - admin.web - staff.web ports: - - "127.0.0.1:8000:8000" + - "8000:8000" depends_on: postgres: condition: service_healthy @@ -7,6 +7,7 @@ from pathlib import Path import django from django.contrib.auth import get_user_model from django.core.management import call_command, execute_from_command_line +from django.test.utils import ignore_warnings DEFAULT_ENVS = { "DJANGO_SETTINGS_MODULE": "pydis_site.settings", @@ -95,13 +96,15 @@ class SiteManager: name="pythondiscord.local:8000" ) - def prepare_server(self) -> None: - """Perform preparation tasks before running the server.""" + def prepare_environment(self) -> None: + """Perform common preparation tasks.""" django.setup() print("Applying migrations.") call_command("migrate", verbosity=self.verbosity) + def prepare_server(self) -> None: + """Preform runserver-specific preparation tasks.""" if self.debug: # In Production, collectstatic is ran in the Docker image print("Collecting static files.") @@ -121,6 +124,7 @@ class SiteManager: # Prevent preparing twice when in dev mode due to reloader if not self.debug or in_reloader: + self.prepare_environment() self.prepare_server() print("Starting server.") @@ -148,6 +152,20 @@ class SiteManager: # Run gunicorn for the production server. gunicorn.app.wsgiapp.run() + def run_tests(self) -> None: + """Prepare and run the test suite.""" + self.prepare_environment() + # The whitenoise package expects a staticfiles directory to exist during startup, + # else it raises a warning. This is fine under normal application, but during + # tests, staticfiles are not, and do not need to be generated. + # The following line suppresses the warning. + # Reference: https://github.com/evansd/whitenoise/issues/215 + with ignore_warnings( + message=r"No directory at: .*staticfiles", + module="whitenoise.base", + ): + call_command(*sys.argv[1:]) + def clean_up_static_files(build_folder: Path) -> None: """Recursively loop over the build directory and fix links.""" @@ -168,12 +186,16 @@ def clean_up_static_files(build_folder: Path) -> None: def main() -> None: """Entry point for Django management script.""" # Use the custom site manager for launching the server - if len(sys.argv) > 1 and sys.argv[1] == "run": - SiteManager(sys.argv).run_server() + if len(sys.argv) > 1 and sys.argv[1] in ("run", "test"): + manager = SiteManager(sys.argv) + if sys.argv[1] == "run": + manager.run_server() + elif sys.argv[1] == "test": + manager.run_tests() # Pass any others directly to standard management commands else: - _static_build = "distill" in sys.argv[1] + _static_build = len(sys.argv) > 1 and "distill" in sys.argv[1] if _static_build: # Build a static version of the site with no databases and API support diff --git a/poetry.lock b/poetry.lock index 3b26c275..c17c3286 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,31 +1,48 @@ [[package]] +name = "anyio" +version = "3.6.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16,<0.22)"] + +[[package]] name = "asgiref" -version = "3.5.0" +version = "3.5.2" description = "ASGI specs, helper code, and adapters" category = "main" optional = false python-versions = ">=3.7" [package.extras] -tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "bandit" -version = "1.7.2" +version = "1.7.4" description = "Security oriented static analyser for python code." category = "dev" optional = false @@ -40,16 +57,27 @@ stevedore = ">=1.20.0" [package.extras] test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] toml = ["toml"] -yaml = ["pyyaml"] +yaml = ["PyYAML"] [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false +python-versions = ">=3.6" + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false python-versions = "*" +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.3.1" @@ -60,53 +88,56 @@ python-versions = ">=3.6.1" [[package]] name = "charset-normalizer" -version = "2.0.11" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" [[package]] name = "coverage" -version = "5.5" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=3.7" [package.extras] -toml = ["toml"] +toml = ["tomli"] [[package]] -name = "coveralls" -version = "2.2.0" -description = "Show coverage stats online via coveralls.io" -category = "dev" +name = "cryptography" +version = "38.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false -python-versions = ">= 3.5" +python-versions = ">=3.6" [package.dependencies] -coverage = ">=4.1,<6.0" -docopt = ">=0.6.1" -requests = ">=1.0.0" +cffi = ">=1.12" [package.extras] -yaml = ["PyYAML (>=3.10)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools-rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "distlib" -version = "0.3.4" +version = "0.3.6" description = "Distribution utilities" category = "dev" optional = false @@ -114,24 +145,24 @@ python-versions = "*" [[package]] name = "django" -version = "3.1.14" -description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." +version = "4.1.3" +description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" [package.dependencies] -asgiref = ">=3.2.10,<4" -pytz = "*" +asgiref = ">=3.5.2,<4" sqlparse = ">=0.2.2" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -argon2 = ["argon2-cffi (>=16.1.0)"] +argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] name = "django-distill" -version = "2.9.2" +version = "3.0.1" description = "Static site renderer and publisher for Django." category = "main" optional = false @@ -143,22 +174,27 @@ requests = "*" [[package]] name = "django-environ" -version = "0.4.5" -description = "Django-environ allows you to utilize 12factor inspired environment variables to configure your Django application." +version = "0.9.0" +description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.4,<4" + +[package.extras] +develop = ["coverage[toml] (>=5.0a4)", "furo (>=2021.8.17b43,<2021.9.0)", "pytest (>=4.6.11)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] +docs = ["furo (>=2021.8.17b43,<2021.9.0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] +testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)"] [[package]] name = "django-filter" -version = "21.1" +version = "22.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -Django = ">=2.2" +Django = ">=3.2" [[package]] name = "django-prometheus" @@ -173,7 +209,7 @@ prometheus-client = ">=0.7" [[package]] name = "django-simple-bulma" -version = "2.4.0" +version = "2.5.0" description = "Django application to add the Bulma CSS framework and its extensions" category = "main" optional = false @@ -184,91 +220,83 @@ Django = ">=2.0" libsass = ">=0.19,<1.0" [package.extras] -dev = ["flake8 (>=3.8,<4.0)", "flake8-annotations (>=2.0,<3.0)", "flake8-bugbear (>=20.1,<21.0)", "flake8-docstrings (>=1.4,<2.0)", "flake8-import-order (>=0.18,<1.0)", "flake8-tidy-imports (>=4.0,<5.0)", "flake8-todo (>=0.7,<1.0)", "flake8-string-format (>=0.3,<1.0)", "pdoc (>=0.3,<1.0)", "pep8-naming (>=0.9,<1.0)", "pre-commit (>=2.1,<3.0)", "PyGithub (>=1.43,<2.0)", "wheel (>=0.33,<1.0)"] +dev = ["PyGithub (>=1.43,<2.0)", "flake8 (>=3.8,<4.0)", "flake8-annotations (>=2.0,<3.0)", "flake8-bugbear (>=20.1,<21.0)", "flake8-docstrings (>=1.4,<2.0)", "flake8-import-order (>=0.18,<1.0)", "flake8-string-format (>=0.3,<1.0)", "flake8-tidy-imports (>=4.0,<5.0)", "flake8-todo (>=0.7,<1.0)", "pdoc (>=0.3,<1.0)", "pep8-naming (>=0.9,<1.0)", "pre-commit (>=2.1,<3.0)", "wheel (>=0.33,<1.0)"] [[package]] name = "djangorestframework" -version = "3.12.4" +version = "3.14.0" description = "Web APIs for Django, made easy." category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] -django = ">=2.2" - -[[package]] -name = "docopt" -version = "0.6.2" -description = "Pythonic argument parser, that will make you smile" -category = "dev" -optional = false -python-versions = "*" +django = ">=3.0" +pytz = "*" [[package]] name = "filelock" -version = "3.4.2" +version = "3.8.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] -testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" -version = "3.9.2" +version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6.1" [package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "flake8-annotations" -version = "2.7.0" +version = "2.9.1" description = "Flake8 Type Annotation Checks" category = "dev" optional = false -python-versions = ">=3.6.2,<4.0.0" +python-versions = ">=3.7,<4.0" [package.dependencies] -flake8 = ">=3.7,<5.0" +attrs = ">=21.4" +flake8 = ">=3.7" [[package]] name = "flake8-bandit" -version = "2.1.2" +version = "4.1.1" description = "Automated security testing with bandit and flake8." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -bandit = "*" -flake8 = "*" -flake8-polyfill = "*" -pycodestyle = "*" +bandit = ">=1.7.3" +flake8 = ">=5.0.0" [[package]] name = "flake8-bugbear" -version = "20.11.1" +version = "22.10.27" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] attrs = ">=19.2.0" flake8 = ">=3.0.0" [package.extras] -dev = ["coverage", "black", "hypothesis", "hypothesmith"] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] [[package]] name = "flake8-docstrings" @@ -292,17 +320,7 @@ python-versions = "*" [package.dependencies] pycodestyle = "*" - -[[package]] -name = "flake8-polyfill" -version = "1.0.2" -description = "Polyfill package for Flake8 plugins" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -flake8 = "*" +setuptools = "*" [[package]] name = "flake8-string-format" @@ -317,7 +335,7 @@ flake8 = "*" [[package]] name = "flake8-tidy-imports" -version = "4.6.0" +version = "4.8.0" description = "A flake8 plugin that helps you write tidier imports." category = "dev" optional = false @@ -349,8 +367,8 @@ python-versions = ">=3.6" smmap = ">=3.0.1,<6" [[package]] -name = "gitpython" -version = "3.1.26" +name = "GitPython" +version = "3.1.29" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -361,21 +379,70 @@ gitdb = ">=4.0.1,<5" [[package]] name = "gunicorn" -version = "20.0.4" +version = "20.1.0" description = "WSGI HTTP Server for UNIX" category = "main" optional = false -python-versions = ">=3.4" +python-versions = ">=3.5" + +[package.dependencies] +setuptools = ">=3.0" [package.extras] -eventlet = ["eventlet (>=0.9.7)"] -gevent = ["gevent (>=0.13)"] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] tornado = ["tornado (>=0.2)"] [[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "httpcore" +version = "0.15.0" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +anyio = ">=3.0.0,<4.0.0" +certifi = "*" +h11 = ">=0.11,<0.13" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.0" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.16.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] name = "identify" -version = "2.4.6" +version = "2.5.8" description = "File identification library for Python" category = "dev" optional = false @@ -386,29 +453,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" [[package]] -name = "importlib-metadata" -version = "4.10.1" -description = "Read metadata from Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] - -[[package]] name = "libsass" version = "0.21.0" description = "Sass for Python: A straightforward binding of libsass for Python." @@ -420,26 +471,23 @@ python-versions = "*" six = "*" [[package]] -name = "markdown" -version = "3.3.6" +name = "Markdown" +version = "3.4.1" description = "Python implementation of Markdown." category = "main" optional = false -python-versions = ">=3.6" - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +python-versions = ">=3.7" [package.extras] testing = ["coverage", "pyyaml"] [[package]] name = "mccabe" -version = "0.6.1" +version = "0.7.0" description = "McCabe checker, plugin for flake8" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "mslex" @@ -451,15 +499,18 @@ python-versions = ">=3.5" [[package]] name = "nodeenv" -version = "1.6.0" +version = "1.7.0" description = "Node.js virtual environment builder" category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" + +[package.dependencies] +setuptools = "*" [[package]] name = "pbr" -version = "5.8.0" +version = "5.11.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -467,35 +518,34 @@ python-versions = ">=2.6" [[package]] name = "pep8-naming" -version = "0.12.1" +version = "0.13.2" description = "Check PEP-8 naming conventions, plugin for flake8" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3.9.1" -flake8-polyfill = ">=1.0.2,<2" [[package]] name = "platformdirs" -version = "2.4.1" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pre-commit" -version = "2.17.0" +version = "2.20.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] cfgv = ">=2.0.0" @@ -507,7 +557,7 @@ virtualenv = ">=20.0.8" [[package]] name = "prometheus-client" -version = "0.13.1" +version = "0.15.0" description = "Python client for the Prometheus monitoring system." category = "main" optional = false @@ -518,29 +568,37 @@ twisted = ["twisted"] [[package]] name = "psutil" -version = "5.9.0" +version = "5.9.3" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg2-binary" -version = "2.8.6" +version = "2.9.5" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = ">=3.6" [[package]] name = "pycodestyle" -version = "2.7.0" +version = "2.9.1" description = "Python style guide checker" category = "dev" optional = false +python-versions = ">=3.6" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] @@ -559,27 +617,55 @@ toml = ["toml"] [[package]] name = "pyfakefs" -version = "4.5.4" +version = "5.0.0" description = "pyfakefs implements a fake file system that mocks the Python file system modules." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "pyflakes" -version = "2.3.1" +version = "2.5.0" description = "passive checker of Python programs" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" + +[[package]] +name = "PyJWT" +version = "2.6.0" +description = "JSON Web Token implementation in Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pymdown-extensions" +version = "9.8" +description = "Extension pack for Python Markdown." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +markdown = ">=3.2" [[package]] name = "python-dotenv" -version = "0.17.1" +version = "0.21.0" description = "Read key-value pairs from a .env file and set them as environment variables" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.extras] cli = ["click (>=5.0)"] @@ -597,45 +683,59 @@ PyYAML = "*" [package.extras] docs = ["sphinx"] -test = ["pytest", "toml", "pyaml"] +test = ["pyaml", "pytest", "toml"] [[package]] name = "pytz" -version = "2021.3" +version = "2022.5" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" [[package]] -name = "pyyaml" -version = "5.4.1" +name = "PyYAML" +version = "6.0" description = "YAML parser and emitter for Python" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.6" [[package]] name = "requests" -version = "2.27.1" +version = "2.28.1" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] [[package]] name = "sentry-sdk" -version = "0.20.3" +version = "1.11.0" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = false @@ -643,7 +743,7 @@ python-versions = "*" [package.dependencies] certifi = "*" -urllib3 = ">=1.10.0" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] @@ -653,15 +753,33 @@ celery = ["celery (>=3)"] chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] -flask = ["flask (>=0.11)", "blinker (>=1.1)"] -pure_eval = ["pure-eval", "executing", "asttokens"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)"] +httpx = ["httpx (>=0.16.0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] rq = ["rq (>=0.6)"] sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] tornado = ["tornado (>=5)"] [[package]] +name = "setuptools" +version = "65.5.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" @@ -678,6 +796,14 @@ optional = false python-versions = ">=3.6" [[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." @@ -687,7 +813,7 @@ python-versions = "*" [[package]] name = "sqlparse" -version = "0.4.2" +version = "0.4.3" description = "A non-validating SQL parser." category = "main" optional = false @@ -695,27 +821,28 @@ python-versions = ">=3.5" [[package]] name = "stevedore" -version = "3.5.0" +version = "4.1.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "taskipy" -version = "1.7.0" +version = "1.10.3" description = "tasks runner for python projects" category = "dev" optional = false python-versions = ">=3.6,<4.0" [package.dependencies] -mslex = ">=0.3.0,<0.4.0" +colorama = ">=0.4.4,<0.5.0" +mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""} psutil = ">=5.7.2,<6.0.0" -toml = ">=0.10.0,<0.11.0" +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""} [[package]] name = "toml" @@ -726,203 +853,296 @@ optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tzdata" +version = "2022.5" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" + +[[package]] name = "urllib3" -version = "1.26.8" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.13.0" +version = "20.16.6" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] -distlib = ">=0.3.1,<1" -filelock = ">=3.2,<4" -platformdirs = ">=2,<3" -six = ">=1.9.0,<2" +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<3" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] +docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] +testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] [[package]] name = "whitenoise" -version = "5.3.0" +version = "6.2.0" description = "Radically simplified static file serving for WSGI applications" category = "main" optional = false -python-versions = ">=3.5, <4" - -[package.extras] -brotli = ["brotli"] - -[[package]] -name = "zipp" -version = "3.7.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +brotli = ["Brotli"] [metadata] lock-version = "1.1" -python-versions = "3.9.*" -content-hash = "fc9b20c33c65a289122d710844285ac20d7598e65c7f8237f8903509f5b2dea4" +python-versions = "3.10.*" +content-hash = "bd31b8df83e8098e6a18a2cddc41ef40215cc0e20269900bedd59330a7363951" [metadata.files] +anyio = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] asgiref = [ - {file = "asgiref-3.5.0-py3-none-any.whl", hash = "sha256:88d59c13d634dcffe0510be048210188edd79aeccb6a6c9028cdad6f31d730a9"}, - {file = "asgiref-3.5.0.tar.gz", hash = "sha256:2f8abc20f7248433085eda803936d98992f1343ddb022065779f37c5da0181d0"}, + {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, + {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, ] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] bandit = [ - {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, - {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, ] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, - {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, -] -coveralls = [ - {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"}, - {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] +cryptography = [ + {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"}, + {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"}, + {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"}, + {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"}, + {file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"}, + {file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"}, + {file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"}, ] distlib = [ - {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, - {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, ] django = [ - {file = "Django-3.1.14-py3-none-any.whl", hash = "sha256:0fabc786489af16ad87a8c170ba9d42bfd23f7b699bd5ef05675864e8d012859"}, - {file = "Django-3.1.14.tar.gz", hash = "sha256:72a4a5a136a214c39cf016ccdd6b69e2aa08c7479c66d93f3a9b5e4bb9d8a347"}, + {file = "Django-4.1.3-py3-none-any.whl", hash = "sha256:6b1de6886cae14c7c44d188f580f8ba8da05750f544c80ae5ad43375ab293cd5"}, + {file = "Django-4.1.3.tar.gz", hash = "sha256:678bbfc8604eb246ed54e2063f0765f13b321a50526bdc8cb1f943eda7fa31f1"}, ] django-distill = [ - {file = "django-distill-2.9.2.tar.gz", hash = "sha256:91d5f45c2ff78b8efd4805ff5ec17df4ba815bbf51ca12a2cea65727d2f1d42e"}, + {file = "django-distill-3.0.1.tar.gz", hash = "sha256:8bbac5e45d2afc61cc718d587c6026267c985305f5e599465f2ebc4b0cba9ebf"}, ] django-environ = [ - {file = "django-environ-0.4.5.tar.gz", hash = "sha256:6c9d87660142608f63ec7d5ce5564c49b603ea8ff25da595fd6098f6dc82afde"}, - {file = "django_environ-0.4.5-py2.py3-none-any.whl", hash = "sha256:c57b3c11ec1f319d9474e3e5a79134f40174b17c7cc024bbb2fad84646b120c4"}, + {file = "django-environ-0.9.0.tar.gz", hash = "sha256:bff5381533056328c9ac02f71790bd5bf1cea81b1beeb648f28b81c9e83e0a21"}, + {file = "django_environ-0.9.0-py2.py3-none-any.whl", hash = "sha256:f21a5ef8cc603da1870bbf9a09b7e5577ab5f6da451b843dbcc721a7bca6b3d9"}, ] django-filter = [ - {file = "django-filter-21.1.tar.gz", hash = "sha256:632a251fa8f1aadb4b8cceff932bb52fe2f826dd7dfe7f3eac40e5c463d6836e"}, - {file = "django_filter-21.1-py3-none-any.whl", hash = "sha256:f4a6737a30104c98d2e2a5fb93043f36dd7978e0c7ddc92f5998e85433ea5063"}, + {file = "django-filter-22.1.tar.gz", hash = "sha256:ed473b76e84f7e83b2511bb2050c3efb36d135207d0128dfe3ae4b36e3594ba5"}, + {file = "django_filter-22.1-py3-none-any.whl", hash = "sha256:ed429e34760127e3520a67f415bec4c905d4649fbe45d0d6da37e6ff5e0287eb"}, ] django-prometheus = [ {file = "django-prometheus-2.2.0.tar.gz", hash = "sha256:240378a1307c408bd5fc85614a3a57f1ce633d4a222c9e291e2bbf325173b801"}, {file = "django_prometheus-2.2.0-py2.py3-none-any.whl", hash = "sha256:e6616770d8820b8834762764bf1b76ec08e1b98e72a6f359d488a2e15fe3537c"}, ] django-simple-bulma = [ - {file = "django-simple-bulma-2.4.0.tar.gz", hash = "sha256:99a15261b0c61062a128af3c6a45da9c066d6a4a548c9063464e0fb7a5438aa1"}, - {file = "django_simple_bulma-2.4.0-py3-none-any.whl", hash = "sha256:95d5e26bebbf6a0184e33df844a0ff534bdfd91431e413d1a844d47a75c55fff"}, + {file = "django-simple-bulma-2.5.0.tar.gz", hash = "sha256:d4e9f6ea857954a9bdc7a4f16453834a578cd04da5c3a96b2a3241bfcfabead2"}, + {file = "django_simple_bulma-2.5.0-py3-none-any.whl", hash = "sha256:c413b031494d80f674068a782440c6ec5f20a12501ee7464d6f781a5777fa89c"}, ] djangorestframework = [ - {file = "djangorestframework-3.12.4-py3-none-any.whl", hash = "sha256:6d1d59f623a5ad0509fe0d6bfe93cbdfe17b8116ebc8eda86d45f6e16e819aaf"}, - {file = "djangorestframework-3.12.4.tar.gz", hash = "sha256:f747949a8ddac876e879190df194b925c177cdeb725a099db1460872f7c0a7f2"}, -] -docopt = [ - {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, + {file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"}, + {file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"}, ] filelock = [ - {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, - {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, ] flake8-annotations = [ - {file = "flake8-annotations-2.7.0.tar.gz", hash = "sha256:52e53c05b0c06cac1c2dec192ea2c36e85081238add3bd99421d56f574b9479b"}, - {file = "flake8_annotations-2.7.0-py3-none-any.whl", hash = "sha256:3edfbbfb58e404868834fe6ec3eaf49c139f64f0701259f707d043185545151e"}, + {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"}, + {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"}, ] flake8-bandit = [ - {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, + {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, + {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"}, - {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"}, + {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, + {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, ] flake8-docstrings = [ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, @@ -932,17 +1152,13 @@ flake8-import-order = [ {file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"}, {file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"}, ] -flake8-polyfill = [ - {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, - {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, -] flake8-string-format = [ {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, ] flake8-tidy-imports = [ - {file = "flake8-tidy-imports-4.6.0.tar.gz", hash = "sha256:3e193d8c4bb4492408a90e956d888b27eed14c698387c9b38230da3dad78058f"}, - {file = "flake8_tidy_imports-4.6.0-py3-none-any.whl", hash = "sha256:6ae9f55d628156e19d19f4c359dd5d3e95431a9bd514f5e2748c53c1398c66b2"}, + {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, + {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, ] flake8-todo = [ {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, @@ -951,25 +1167,33 @@ gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] -gitpython = [ - {file = "GitPython-3.1.26-py3-none-any.whl", hash = "sha256:26ac35c212d1f7b16036361ca5cff3ec66e11753a0d677fb6c48fa4e1a9dd8d6"}, - {file = "GitPython-3.1.26.tar.gz", hash = "sha256:fc8868f63a2e6d268fb25f481995ba185a85a66fcad126f039323ff6635669ee"}, +GitPython = [ + {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, + {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] gunicorn = [ - {file = "gunicorn-20.0.4-py2.py3-none-any.whl", hash = "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"}, - {file = "gunicorn-20.0.4.tar.gz", hash = "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626"}, + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] +h11 = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] +httpcore = [ + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, +] +httpx = [ + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, ] identify = [ - {file = "identify-2.4.6-py2.py3-none-any.whl", hash = "sha256:cf06b1639e0dca0c184b1504d8b73448c99a68e004a80524c7923b95f7b6837c"}, - {file = "identify-2.4.6.tar.gz", hash = "sha256:233679e3f61a02015d4293dbccf16aa0e4996f868bd114688b8c124f18826706"}, + {file = "identify-2.5.8-py2.py3-none-any.whl", hash = "sha256:48b7925fe122720088aeb7a6c34f17b27e706b72c61070f27fe3789094233440"}, + {file = "identify-2.5.8.tar.gz", hash = "sha256:7a214a10313b9489a0d61467db2856ae8d0b8306fc923e03a9effa53d8aedc58"}, ] idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, - {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] libsass = [ {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, @@ -983,179 +1207,250 @@ libsass = [ {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, ] -markdown = [ - {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, - {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, +Markdown = [ + {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, + {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"}, ] mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mslex = [ {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"}, ] nodeenv = [ - {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, - {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, ] pbr = [ - {file = "pbr-5.8.0-py2.py3-none-any.whl", hash = "sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a"}, - {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, + {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, + {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, ] pep8-naming = [ - {file = "pep8-naming-0.12.1.tar.gz", hash = "sha256:bb2455947757d162aa4cad55dba4ce029005cd1692f2899a21d51d8630ca7841"}, - {file = "pep8_naming-0.12.1-py2.py3-none-any.whl", hash = "sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37"}, + {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, + {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, ] platformdirs = [ - {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, - {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pre-commit = [ - {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, - {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, + {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, + {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, ] prometheus-client = [ - {file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"}, - {file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"}, + {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, + {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, ] psutil = [ - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, - {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, - {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, - {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, - {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, - {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, - {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, - {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, - {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, - {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, - {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, - {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, - {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, - {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, - {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, - {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, - {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, - {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, - {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, + {file = "psutil-5.9.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b4a247cd3feaae39bb6085fcebf35b3b8ecd9b022db796d89c8f05067ca28e71"}, + {file = "psutil-5.9.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5fa88e3d5d0b480602553d362c4b33a63e0c40bfea7312a7bf78799e01e0810b"}, + {file = "psutil-5.9.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:767ef4fa33acda16703725c0473a91e1832d296c37c63896c7153ba81698f1ab"}, + {file = "psutil-5.9.3-cp27-cp27m-win32.whl", hash = "sha256:9a4af6ed1094f867834f5f07acd1250605a0874169a5fcadbcec864aec2496a6"}, + {file = "psutil-5.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:fa5e32c7d9b60b2528108ade2929b115167fe98d59f89555574715054f50fa31"}, + {file = "psutil-5.9.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:fe79b4ad4836e3da6c4650cb85a663b3a51aef22e1a829c384e18fae87e5e727"}, + {file = "psutil-5.9.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:db8e62016add2235cc87fb7ea000ede9e4ca0aa1f221b40cef049d02d5d2593d"}, + {file = "psutil-5.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:941a6c2c591da455d760121b44097781bc970be40e0e43081b9139da485ad5b7"}, + {file = "psutil-5.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71b1206e7909792d16933a0d2c1c7f04ae196186c51ba8567abae1d041f06dcb"}, + {file = "psutil-5.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f57d63a2b5beaf797b87024d018772439f9d3103a395627b77d17a8d72009543"}, + {file = "psutil-5.9.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7507f6c7b0262d3e7b0eeda15045bf5881f4ada70473b87bc7b7c93b992a7d7"}, + {file = "psutil-5.9.3-cp310-cp310-win32.whl", hash = "sha256:1b540599481c73408f6b392cdffef5b01e8ff7a2ac8caae0a91b8222e88e8f1e"}, + {file = "psutil-5.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:547ebb02031fdada635452250ff39942db8310b5c4a8102dfe9384ee5791e650"}, + {file = "psutil-5.9.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d8c3cc6bb76492133474e130a12351a325336c01c96a24aae731abf5a47fe088"}, + {file = "psutil-5.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d880053c6461c9b89cd5d4808f3b8336665fa3acdefd6777662c5ed73a851a"}, + {file = "psutil-5.9.3-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e8b50241dd3c2ed498507f87a6602825073c07f3b7e9560c58411c14fe1e1c9"}, + {file = "psutil-5.9.3-cp36-cp36m-win32.whl", hash = "sha256:828c9dc9478b34ab96be75c81942d8df0c2bb49edbb481f597314d92b6441d89"}, + {file = "psutil-5.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:ed15edb14f52925869250b1375f0ff58ca5c4fa8adefe4883cfb0737d32f5c02"}, + {file = "psutil-5.9.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d266cd05bd4a95ca1c2b9b5aac50d249cf7c94a542f47e0b22928ddf8b80d1ef"}, + {file = "psutil-5.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e4939ff75149b67aef77980409f156f0082fa36accc475d45c705bb00c6c16a"}, + {file = "psutil-5.9.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68fa227c32240c52982cb931801c5707a7f96dd8927f9102d6c7771ea1ff5698"}, + {file = "psutil-5.9.3-cp37-cp37m-win32.whl", hash = "sha256:beb57d8a1ca0ae0eb3d08ccaceb77e1a6d93606f0e1754f0d60a6ebd5c288837"}, + {file = "psutil-5.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:12500d761ac091f2426567f19f95fd3f15a197d96befb44a5c1e3cbe6db5752c"}, + {file = "psutil-5.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba38cf9984d5462b506e239cf4bc24e84ead4b1d71a3be35e66dad0d13ded7c1"}, + {file = "psutil-5.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:46907fa62acaac364fff0b8a9da7b360265d217e4fdeaca0a2397a6883dffba2"}, + {file = "psutil-5.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a04a1836894c8279e5e0a0127c0db8e198ca133d28be8a2a72b4db16f6cf99c1"}, + {file = "psutil-5.9.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a4e07611997acf178ad13b842377e3d8e9d0a5bac43ece9bfc22a96735d9a4f"}, + {file = "psutil-5.9.3-cp38-cp38-win32.whl", hash = "sha256:6ced1ad823ecfa7d3ce26fe8aa4996e2e53fb49b7fed8ad81c80958501ec0619"}, + {file = "psutil-5.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35feafe232d1aaf35d51bd42790cbccb882456f9f18cdc411532902370d660df"}, + {file = "psutil-5.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:538fcf6ae856b5e12d13d7da25ad67f02113c96f5989e6ad44422cb5994ca7fc"}, + {file = "psutil-5.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3d81165b8474087bb90ec4f333a638ccfd1d69d34a9b4a1a7eaac06648f9fbe"}, + {file = "psutil-5.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a7826e68b0cf4ce2c1ee385d64eab7d70e3133171376cac53d7c1790357ec8f"}, + {file = "psutil-5.9.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ec296f565191f89c48f33d9544d8d82b0d2af7dd7d2d4e6319f27a818f8d1cc"}, + {file = "psutil-5.9.3-cp39-cp39-win32.whl", hash = "sha256:9ec95df684583b5596c82bb380c53a603bb051cf019d5c849c47e117c5064395"}, + {file = "psutil-5.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4bd4854f0c83aa84a5a40d3b5d0eb1f3c128f4146371e03baed4589fe4f3c931"}, + {file = "psutil-5.9.3.tar.gz", hash = "sha256:7ccfcdfea4fc4b0a02ca2c31de7fcd186beb9cff8207800e14ab66f79c773af6"}, ] psycopg2-binary = [ - {file = "psycopg2-binary-2.8.6.tar.gz", hash = "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f5ab93a2cb2d8338b1674be43b442a7f544a0971da062a5da774ed40587f18f5"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:b4afc542c0ac0db720cf516dd20c0846f71c248d2b3d21013aa0d4ef9c71ca25"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-win_amd64.whl", hash = "sha256:e74a55f6bad0e7d3968399deb50f61f4db1926acf4a6d83beaaa7df986f48b1c"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ad20d2eb875aaa1ea6d0f2916949f5c08a19c74d05b16ce6ebf6d24f2c9f75d1"}, - {file = "psycopg2_binary-2.8.6-cp34-cp34m-win32.whl", hash = "sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2"}, - {file = "psycopg2_binary-2.8.6-cp34-cp34m-win_amd64.whl", hash = "sha256:b8a3715b3c4e604bcc94c90a825cd7f5635417453b253499664f784fc4da0152"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d1b4ab59e02d9008efe10ceabd0b31e79519da6fb67f7d8e8977118832d0f449"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ac0c682111fbf404525dfc0f18a8b5f11be52657d4f96e9fcb75daf4f3984859"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-win32.whl", hash = "sha256:aaa4213c862f0ef00022751161df35804127b78adf4a2755b9f991a507e425fd"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-win_amd64.whl", hash = "sha256:c2507d796fca339c8fb03216364cca68d87e037c1f774977c8fc377627d01c71"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ee69dad2c7155756ad114c02db06002f4cded41132cc51378e57aad79cc8e4f4"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e82aba2188b9ba309fd8e271702bd0d0fc9148ae3150532bbb474f4590039ffb"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d5227b229005a696cc67676e24c214740efd90b148de5733419ac9aaba3773da"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-win32.whl", hash = "sha256:a0eb43a07386c3f1f1ebb4dc7aafb13f67188eab896e7397aa1ee95a9c884eb2"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:e1f57aa70d3f7cc6947fd88636a481638263ba04a742b4a37dd25c373e41491a"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ba28584e6bca48c59eecbf7efb1576ca214b47f05194646b081717fa628dfddf"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-win32.whl", hash = "sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:cec7e622ebc545dbb4564e483dd20e4e404da17ae07e06f3e780b2dacd5cee66"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ba381aec3a5dc29634f20692349d73f2d21f17653bda1decf0b52b11d694541f"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-win32.whl", hash = "sha256:bd1be66dde2b82f80afb9459fc618216753f67109b859a361cf7def5c7968729"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-win32.whl", hash = "sha256:6422f2ff0919fd720195f64ffd8f924c1395d30f9a495f31e2392c2efafb5056"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6"}, + {file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:0775d6252ccb22b15da3b5d7adbbf8cfe284916b14b6dc0ff503a23edb01ee85"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec46ed947801652c9643e0b1dc334cfb2781232e375ba97312c2fc256597632"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3520d7af1ebc838cc6084a3281145d5cd5bdd43fdef139e6db5af01b92596cb7"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cbc554ba47ecca8cd3396ddaca85e1ecfe3e48dd57dc5e415e59551affe568e"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:5d28ecdf191db558d0c07d0f16524ee9d67896edf2b7990eea800abeb23ebd61"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:b9c33d4aef08dfecbd1736ceab8b7b3c4358bf10a0121483e5cd60d3d308cc64"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05b3d479425e047c848b9782cd7aac9c6727ce23181eb9647baf64ffdfc3da41"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e491e6489a6cb1d079df8eaa15957c277fdedb102b6a68cfbf40c4994412fd0"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:9e32cedc389bcb76d9f24ea8a012b3cb8385ee362ea437e1d012ffaed106c17d"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46850a640df62ae940e34a163f72e26aca1f88e2da79148e1862faaac985c302"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-win32.whl", hash = "sha256:3d790f84201c3698d1bfb404c917f36e40531577a6dda02e45ba29b64d539867"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:1764546ffeaed4f9428707be61d68972eb5ede81239b46a45843e0071104d0dd"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_10_9_universal2.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:426c2ae999135d64e6a18849a7d1ad0e1bd007277e4a8f4752eaa40a96b550ff"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cf1d44e710ca3a9ce952bda2855830fe9f9017ed6259e01fcd71ea6287565f5"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024030b13bdcbd53d8a93891a2cf07719715724fc9fee40243f3bd78b4264b8f"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcda1c84a1c533c528356da5490d464a139b6e84eb77cc0b432e38c5c6dd7882"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:2ef892cabdccefe577088a79580301f09f2a713eb239f4f9f62b2b29cafb0577"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_ppc64le.whl", hash = "sha256:af0516e1711995cb08dc19bbd05bec7dbdebf4185f68870595156718d237df3e"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e72c91bda9880f097c8aa3601a2c0de6c708763ba8128006151f496ca9065935"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:56b2957a145f816726b109ee3d4e6822c23f919a7d91af5a94593723ed667835"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:f95b8aca2703d6a30249f83f4fe6a9abf2e627aa892a5caaab2267d56be7ab69"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:70831e03bd53702c941da1a1ad36c17d825a24fbb26857b40913d58df82ec18b"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:dbc332beaf8492b5731229a881807cd7b91b50dbbbaf7fe2faf46942eda64a24"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:2d964eb24c8b021623df1c93c626671420c6efadbdb8655cb2bd5e0c6fa422ba"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:95076399ec3b27a8f7fa1cc9a83417b1c920d55cf7a97f718a94efbb96c7f503"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:3fc33295cfccad697a97a76dec3f1e94ad848b7b163c3228c1636977966b51e2"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:02551647542f2bf89073d129c73c05a25c372fc0a49aa50e0de65c3c143d8bd0"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:63e318dbe52709ed10d516a356f22a635e07a2e34c68145484ed96a19b0c4c68"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7e518a0911c50f60313cb9e74a169a65b5d293770db4770ebf004245f24b5c5"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d38a4656e4e715d637abdf7296e98d6267df0cc0a8e9a016f8ba07e4aa3eeb"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:68d81a2fe184030aa0c5c11e518292e15d342a667184d91e30644c9d533e53e1"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:7ee3095d02d6f38bd7d9a5358fcc9ea78fcdb7176921528dd709cc63f40184f5"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:46512486be6fbceef51d7660dec017394ba3e170299d1dc30928cbedebbf103a"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b911dfb727e247340d36ae20c4b9259e4a64013ab9888ccb3cbba69b77fd9636"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:422e3d43b47ac20141bc84b3d342eead8d8099a62881a501e97d15f6addabfe9"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c5682a45df7d9642eff590abc73157c887a68f016df0a8ad722dcc0f888f56d7"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:b8104f709590fff72af801e916817560dbe1698028cd0afe5a52d75ceb1fce5f"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:7b3751857da3e224f5629400736a7b11e940b5da5f95fa631d86219a1beaafec"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:043a9fd45a03858ff72364b4b75090679bd875ee44df9c0613dc862ca6b98460"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9ffdc51001136b699f9563b1c74cc1f8c07f66ef7219beb6417a4c8aaa896c28"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c15ba5982c177bc4b23a7940c7e4394197e2d6a424a2d282e7c236b66da6d896"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc85b3777068ed30aff8242be2813038a929f2084f69e43ef869daddae50f6ee"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:215d6bf7e66732a514f47614f828d8c0aaac9a648c46a831955cb103473c7147"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:7d07f552d1e412f4b4e64ce386d4c777a41da3b33f7098b6219012ba534fb2c2"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a0adef094c49f242122bb145c3c8af442070dc0e4312db17e49058c1702606d4"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:00475004e5ed3e3bf5e056d66e5dcdf41a0dc62efcd57997acd9135c40a08a50"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7d88db096fa19d94f433420eaaf9f3c45382da2dd014b93e4bf3215639047c16"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:902844f9c4fb19b17dfa84d9e2ca053d4a4ba265723d62ea5c9c26b38e0aa1e6"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-win32.whl", hash = "sha256:4e7904d1920c0c89105c0517dc7e3f5c20fb4e56ba9cdef13048db76947f1d79"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:a36a0e791805aa136e9cbd0ffa040d09adec8610453ee8a753f23481a0057af5"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:25382c7d174c679ce6927c16b6fbb68b10e56ee44b1acb40671e02d29f2fce7c"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9c38d3869238e9d3409239bc05bc27d6b7c99c2a460ea337d2814b35fb4fea1b"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c6527c8efa5226a9e787507652dd5ba97b62d29b53c371a85cd13f957fe4d42"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e59137cdb970249ae60be2a49774c6dfb015bd0403f05af1fe61862e9626642d"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:d4c7b3a31502184e856df1f7bbb2c3735a05a8ce0ade34c5277e1577738a5c91"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:b9a794cef1d9c1772b94a72eec6da144c18e18041d294a9ab47669bc77a80c1d"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5254cbd4f4855e11cebf678c1a848a3042d455a22a4ce61349c36aafd4c2267"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c5e65c6ac0ae4bf5bef1667029f81010b6017795dcb817ba5c7b8a8d61fab76f"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:74eddec4537ab1f701a1647214734bc52cee2794df748f6ae5908e00771f180a"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:01ad49d68dd8c5362e4bfb4158f2896dc6e0c02e87b8a3770fc003459f1a4425"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-win32.whl", hash = "sha256:937880290775033a743f4836aa253087b85e62784b63fd099ee725d567a48aa1"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:484405b883630f3e74ed32041a87456c5e0e63a8e3429aa93e8714c366d62bd1"}, ] pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] pyfakefs = [ - {file = "pyfakefs-4.5.4-py3-none-any.whl", hash = "sha256:e0cc0d22cb74badf4fb2143a112817d7aea1a58ee9dca015a68bf38c3691cb52"}, - {file = "pyfakefs-4.5.4.tar.gz", hash = "sha256:5b5951e873f73bf12e3a19d8e4470c4b7962c51df753cf8c4caaf64e24a0a323"}, + {file = "pyfakefs-5.0.0-py3-none-any.whl", hash = "sha256:e1b01954978fe2d9a4d75f079359d7f8d3af3bb12ff2dc8633a4cc0a0dc7fbda"}, + {file = "pyfakefs-5.0.0.tar.gz", hash = "sha256:19d1d8f1ee520891d78b6ed05c2078e0792d545f83dee33461fbaa5cc72e187d"}, ] pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] +PyJWT = [ + {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, + {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, +] +pymdown-extensions = [ + {file = "pymdown_extensions-9.8-py3-none-any.whl", hash = "sha256:8e62688a8b1128acd42fa823f3d429d22f4284b5e6dd4d3cd56721559a5a211b"}, + {file = "pymdown_extensions-9.8.tar.gz", hash = "sha256:1bd4a173095ef8c433b831af1f3cb13c10883be0c100ae613560668e594651f7"}, ] python-dotenv = [ - {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, - {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"}, + {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, + {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, ] python-frontmatter = [ {file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"}, {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"}, ] pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, -] -pyyaml = [ - {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, - {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, - {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, - {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, - {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, - {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, - {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, - {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, - {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, + {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, + {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, +] +PyYAML = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] sentry-sdk = [ - {file = "sentry-sdk-0.20.3.tar.gz", hash = "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237"}, - {file = "sentry_sdk-0.20.3-py2.py3-none-any.whl", hash = "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b"}, + {file = "sentry-sdk-1.11.0.tar.gz", hash = "sha256:e7b78a1ddf97a5f715a50ab8c3f7a93f78b114c67307785ee828ef67a5d6f117"}, + {file = "sentry_sdk-1.11.0-py2.py3-none-any.whl", hash = "sha256:f467e6c7fac23d4d42bc83eb049c400f756cd2d65ab44f0cc1165d0c7c3d40bc"}, +] +setuptools = [ + {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, + {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1165,39 +1460,47 @@ smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] +sniffio = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] sqlparse = [ - {file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"}, - {file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"}, + {file = "sqlparse-0.4.3-py3-none-any.whl", hash = "sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34"}, + {file = "sqlparse-0.4.3.tar.gz", hash = "sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268"}, ] stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, + {file = "stevedore-4.1.0-py3-none-any.whl", hash = "sha256:3b1cbd592a87315f000d05164941ee5e164899f8fc0ce9a00bb0f321f40ef93e"}, + {file = "stevedore-4.1.0.tar.gz", hash = "sha256:02518a8f0d6d29be8a445b7f2ac63753ff29e8f2a2faa01777568d5500d777a6"}, ] taskipy = [ - {file = "taskipy-1.7.0-py3-none-any.whl", hash = "sha256:9e284c10898e9dee01a3e72220b94b192b1daa0f560271503a6df1da53d03844"}, - {file = "taskipy-1.7.0.tar.gz", hash = "sha256:960e480b1004971e76454ecd1a0484e640744a30073a1069894a311467f85ed8"}, + {file = "taskipy-1.10.3-py3-none-any.whl", hash = "sha256:4c0070ca53868d97989f7ab5c6f237525d52ee184f9b967576e8fe427ed9d0b8"}, + {file = "taskipy-1.10.3.tar.gz", hash = "sha256:112beaf21e3d5569950b99162a1de003fa885fabee9e450757a6b874be914877"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tzdata = [ + {file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"}, + {file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"}, +] urllib3 = [ - {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, - {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] virtualenv = [ - {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"}, - {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"}, + {file = "virtualenv-20.16.6-py3-none-any.whl", hash = "sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108"}, + {file = "virtualenv-20.16.6.tar.gz", hash = "sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e"}, ] whitenoise = [ - {file = "whitenoise-5.3.0-py2.py3-none-any.whl", hash = "sha256:d963ef25639d1417e8a247be36e6aedd8c7c6f0a08adcb5a89146980a96b577c"}, - {file = "whitenoise-5.3.0.tar.gz", hash = "sha256:d234b871b52271ae7ed6d9da47ffe857c76568f11dd30e28e18c5869dbd11e12"}, -] -zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "whitenoise-6.2.0-py3-none-any.whl", hash = "sha256:8e9c600a5c18bd17655ef668ad55b5edf6c24ce9bdca5bf607649ca4b1e8e2c2"}, + {file = "whitenoise-6.2.0.tar.gz", hash = "sha256:8fa943c6d4cd9e27673b70c21a07b0aa120873901e099cd46cab40f7cc96d567"}, ] diff --git a/pydis_site/apps/api/__init__.py b/pydis_site/apps/api/__init__.py index afa5b4d5..e69de29b 100644 --- a/pydis_site/apps/api/__init__.py +++ b/pydis_site/apps/api/__init__.py @@ -1 +0,0 @@ -default_app_config = 'pydis_site.apps.api.apps.ApiConfig' diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py new file mode 100644 index 00000000..44c571c3 --- /dev/null +++ b/pydis_site/apps/api/github_utils.py @@ -0,0 +1,207 @@ +"""Utilities for working with the GitHub API.""" +import dataclasses +import datetime +import math +import typing + +import httpx +import jwt + +from pydis_site import settings + +MAX_RUN_TIME = datetime.timedelta(minutes=10) +"""The maximum time allowed before an action is declared timed out.""" + + +class ArtifactProcessingError(Exception): + """Base exception for other errors related to processing a GitHub artifact.""" + + status: int + + +class UnauthorizedError(ArtifactProcessingError): + """The application does not have permission to access the requested repo.""" + + status = 401 + + +class NotFoundError(ArtifactProcessingError): + """The requested resource could not be found.""" + + status = 404 + + +class ActionFailedError(ArtifactProcessingError): + """The requested workflow did not conclude successfully.""" + + status = 400 + + +class RunTimeoutError(ArtifactProcessingError): + """The requested workflow run was not ready in time.""" + + status = 408 + + +class RunPendingError(ArtifactProcessingError): + """The requested workflow run is still pending, try again later.""" + + status = 202 + + [email protected](frozen=True) +class WorkflowRun: + """ + A workflow run from the GitHub API. + + https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run + """ + + name: str + head_sha: str + created_at: str + status: str + conclusion: str + artifacts_url: str + + @classmethod + def from_raw(cls, data: dict[str, typing.Any]): + """Create an instance using the raw data from the API, discarding unused fields.""" + return cls(**{ + key.name: data[key.name] for key in dataclasses.fields(cls) + }) + + +def generate_token() -> str: + """ + Generate a JWT token to access the GitHub API. + + The token is valid for roughly 10 minutes after generation, before the API starts + returning 401s. + + Refer to: + https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app + """ + now = datetime.datetime.now() + return jwt.encode( + { + "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at + "exp": math.floor((now + datetime.timedelta(minutes=9)).timestamp()), # Expires at + "iss": settings.GITHUB_APP_ID, + }, + settings.GITHUB_APP_KEY, + algorithm="RS256" + ) + + +def authorize(owner: str, repo: str) -> httpx.Client: + """ + Get an access token for the requested repository. + + The process is roughly: + - GET app/installations to get a list of all app installations + - POST <app_access_token> to get a token to access the given app + - GET installation/repositories and check if the requested one is part of those + """ + client = httpx.Client( + base_url=settings.GITHUB_API, + headers={"Authorization": f"bearer {generate_token()}"}, + timeout=10, + ) + + try: + # Get a list of app installations we have access to + apps = client.get("app/installations") + apps.raise_for_status() + + for app in apps.json(): + # Look for an installation with the right owner + if app["account"]["login"] != owner: + continue + + # Get the repositories of the specified owner + app_token = client.post(app["access_tokens_url"]) + app_token.raise_for_status() + client.headers["Authorization"] = f"bearer {app_token.json()['token']}" + + repos = client.get("installation/repositories") + repos.raise_for_status() + + # Search for the request repository + for accessible_repo in repos.json()["repositories"]: + if accessible_repo["name"] == repo: + # We've found the correct repository, and it's accessible with the current auth + return client + + raise NotFoundError( + "Could not find the requested repository. Make sure the application can access it." + ) + + except BaseException as e: + # Close the client if we encountered an unexpected exception + client.close() + raise e + + +def check_run_status(run: WorkflowRun) -> str: + """Check if the provided run has been completed, otherwise raise an exception.""" + created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) + run_time = datetime.datetime.utcnow() - created_at + + if run.status != "completed": + if run_time <= MAX_RUN_TIME: + raise RunPendingError( + f"The requested run is still pending. It was created " + f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." + ) + else: + raise RunTimeoutError("The requested workflow was not ready in time.") + + if run.conclusion != "success": + # The action failed, or did not run + raise ActionFailedError(f"The requested workflow ended with: {run.conclusion}") + + # The requested action is ready + return run.artifacts_url + + +def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_name: str) -> str: + """Get a download URL for a build artifact.""" + client = authorize(owner, repo) + + try: + # Get the workflow runs for this repository + runs = client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) + runs.raise_for_status() + runs = runs.json() + + # Filter the runs for the one associated with the given SHA + for run in runs["workflow_runs"]: + run = WorkflowRun.from_raw(run) + if run.name == action_name and sha == run.head_sha: + break + else: + raise NotFoundError( + "Could not find a run matching the provided settings in the previous hundred runs." + ) + + # Check the workflow status + url = check_run_status(run) + + # Filter the artifacts, and return the download URL + artifacts = client.get(url) + artifacts.raise_for_status() + + for artifact in artifacts.json()["artifacts"]: + if artifact["name"] == artifact_name: + data = client.get(artifact["archive_download_url"]) + if data.status_code == 302: + return str(data.next_request.url) + + # The following line is left untested since it should in theory be impossible + data.raise_for_status() # pragma: no cover + + raise NotFoundError("Could not find an artifact matching the provided name.") + + finally: + client.close() diff --git a/pydis_site/apps/api/migrations/0013_specialsnake_image.py b/pydis_site/apps/api/migrations/0013_specialsnake_image.py index a0d0d318..8ba3432f 100644 --- a/pydis_site/apps/api/migrations/0013_specialsnake_image.py +++ b/pydis_site/apps/api/migrations/0013_specialsnake_image.py @@ -2,7 +2,6 @@ import datetime from django.db import migrations, models -from django.utils.timezone import utc class Migration(migrations.Migration): @@ -15,7 +14,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='specialsnake', name='image', - field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=utc)), + field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=datetime.timezone.utc)), preserve_default=False, ), ] diff --git a/pydis_site/apps/api/migrations/0019_deletedmessage.py b/pydis_site/apps/api/migrations/0019_deletedmessage.py index 6b848d64..25d04434 100644 --- a/pydis_site/apps/api/migrations/0019_deletedmessage.py +++ b/pydis_site/apps/api/migrations/0019_deletedmessage.py @@ -18,7 +18,7 @@ class Migration(migrations.Migration): ('id', models.BigIntegerField(help_text='The message ID as taken from Discord.', primary_key=True, serialize=False, validators=[django.core.validators.MinValueValidator(limit_value=0, message='Message IDs cannot be negative.')])), ('channel_id', models.BigIntegerField(help_text='The channel ID that this message was sent in, taken from Discord.', validators=[django.core.validators.MinValueValidator(limit_value=0, message='Channel IDs cannot be negative.')])), ('content', models.CharField(help_text='The content of this message, taken from Discord.', max_length=2000)), - ('embeds', django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), help_text='Embeds attached to this message.', size=None)), + ('embeds', django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[]), help_text='Embeds attached to this message.', size=None)), ('author', models.ForeignKey(help_text='The author of this message.', on_delete=django.db.models.deletion.CASCADE, to='api.User')), ('deletion_context', models.ForeignKey(help_text='The deletion context this message is part of.', on_delete=django.db.models.deletion.CASCADE, to='api.MessageDeletionContext')), ], diff --git a/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py b/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py index 124c6a57..622f21d1 100644 --- a/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py +++ b/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py @@ -3,7 +3,6 @@ import django.contrib.postgres.fields import django.contrib.postgres.fields.jsonb from django.db import migrations -import pydis_site.apps.api.models.utils class Migration(migrations.Migration): @@ -16,6 +15,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='deletedmessage', name='embeds', - field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), blank=True, help_text='Embeds attached to this message.', size=None), + field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[]), blank=True, help_text='Embeds attached to this message.', size=None), ), ] diff --git a/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py b/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py index 9e8f2fb9..95ef5850 100644 --- a/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py +++ b/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py @@ -2,7 +2,6 @@ import django.contrib.postgres.fields from django.db import migrations, models -import pydis_site.apps.api.models.utils class Migration(migrations.Migration): @@ -20,6 +19,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='deletedmessage', name='embeds', - field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), blank=True, help_text='Embeds attached to this message.', size=None), + field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(validators=[]), blank=True, help_text='Embeds attached to this message.', size=None), ), ] diff --git a/pydis_site/apps/api/migrations/0083_remove_embed_validation.py b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py new file mode 100644 index 00000000..e835bb66 --- /dev/null +++ b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py @@ -0,0 +1,19 @@ +# Generated by Django 3.1.14 on 2022-06-30 09:41 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0082_otn_allow_big_solidus'), + ] + + operations = [ + migrations.AlterField( + model_name='deletedmessage', + name='embeds', + field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(), blank=True, help_text='Embeds attached to this message.', size=None), + ), + ] diff --git a/pydis_site/apps/api/migrations/0084_infraction_last_applied.py b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py new file mode 100644 index 00000000..7704ddb8 --- /dev/null +++ b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py @@ -0,0 +1,26 @@ +# Generated by Django 4.0.6 on 2022-07-27 20:32 + +import django.utils.timezone +from django.db import migrations, models +from django.apps.registry import Apps + + +def set_last_applied_to_inserted_at(apps: Apps, schema_editor): + Infractions = apps.get_model("api", "infraction") + Infractions.objects.all().update(last_applied=models.F("inserted_at")) + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0083_remove_embed_validation'), + ] + + operations = [ + migrations.AddField( + model_name='infraction', + name='last_applied', + field=models.DateTimeField(default=django.utils.timezone.now, help_text='The date and time of when this infraction was last applied.'), + ), + migrations.RunPython(set_last_applied_to_inserted_at) + ] diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py index c9303024..218ee5ec 100644 --- a/pydis_site/apps/api/models/bot/infraction.py +++ b/pydis_site/apps/api/models/bot/infraction.py @@ -23,6 +23,12 @@ class Infraction(ModelReprMixin, models.Model): default=timezone.now, help_text="The date and time of the creation of this infraction." ) + last_applied = models.DateTimeField( + # This default is for backwards compatibility with bot versions + # that don't explicitly give a value. + default=timezone.now, + help_text="The date and time of when this infraction was last applied." + ) expires_at = models.DateTimeField( null=True, help_text=( diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index bab3368d..89ae27e4 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -1,13 +1,11 @@ -from datetime import datetime +import datetime from django.contrib.postgres import fields as pgfields from django.core.validators import MinValueValidator from django.db import models -from django.utils import timezone from pydis_site.apps.api.models.bot.user import User from pydis_site.apps.api.models.mixins import ModelReprMixin -from pydis_site.apps.api.models.utils import validate_embed class Message(ModelReprMixin, models.Model): @@ -48,9 +46,7 @@ class Message(ModelReprMixin, models.Model): blank=True ) embeds = pgfields.ArrayField( - models.JSONField( - validators=(validate_embed,) - ), + models.JSONField(), blank=True, help_text="Embeds attached to this message." ) @@ -63,11 +59,11 @@ class Message(ModelReprMixin, models.Model): ) @property - def timestamp(self) -> datetime: + def timestamp(self) -> datetime.datetime: """Attribute that represents the message timestamp as derived from the snowflake id.""" - tz_naive_datetime = datetime.utcfromtimestamp(((self.id >> 22) + 1420070400000) / 1000) - tz_aware_datetime = timezone.make_aware(tz_naive_datetime, timezone=timezone.utc) - return tz_aware_datetime + return datetime.datetime.utcfromtimestamp( + ((self.id >> 22) + 1420070400000) / 1000 + ).replace(tzinfo=datetime.timezone.utc) class Meta: """Metadata provided for Django's ORM.""" diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index abd25ef0..f53dd33c 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -130,3 +130,31 @@ class Metricity: raise NotFoundError() return values + + def total_messages_in_past_n_days( + self, + user_ids: list[str], + days: int + ) -> list[tuple[str, int]]: + """ + Query activity by a list of users in the past `days` days. + + Returns a list of (user_id, message_count) tuples. + """ + self.cursor.execute( + """ + SELECT + author_id, COUNT(*) + FROM messages + WHERE + author_id IN %s + AND NOT is_deleted + AND channel_id NOT IN %s + AND created_at > now() - interval '%s days' + GROUP BY author_id + """, + [tuple(user_ids), EXCLUDE_CHANNELS, days] + ) + values = self.cursor.fetchall() + + return values diff --git a/pydis_site/apps/api/models/utils.py b/pydis_site/apps/api/models/utils.py deleted file mode 100644 index 859394d2..00000000 --- a/pydis_site/apps/api/models/utils.py +++ /dev/null @@ -1,172 +0,0 @@ -from collections.abc import Mapping -from typing import Any, Dict - -from django.core.exceptions import ValidationError -from django.core.validators import MaxLengthValidator, MinLengthValidator - - -def is_bool_validator(value: Any) -> None: - """Validates if a given value is of type bool.""" - if not isinstance(value, bool): - raise ValidationError(f"This field must be of type bool, not {type(value)}.") - - -def validate_embed_fields(fields: dict) -> None: - """Raises a ValidationError if any of the given embed fields is invalid.""" - field_validators = { - 'name': (MaxLengthValidator(limit_value=256),), - 'value': (MaxLengthValidator(limit_value=1024),), - 'inline': (is_bool_validator,), - } - - required_fields = ('name', 'value') - - for field in fields: - if not isinstance(field, Mapping): - raise ValidationError("Embed fields must be a mapping.") - - if not all(required_field in field for required_field in required_fields): - raise ValidationError( - f"Embed fields must contain the following fields: {', '.join(required_fields)}." - ) - - for field_name, value in field.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed field field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed_footer(footer: Dict[str, str]) -> None: - """Raises a ValidationError if the given footer is invalid.""" - field_validators = { - 'text': ( - MinLengthValidator( - limit_value=1, - message="Footer text must not be empty." - ), - MaxLengthValidator(limit_value=2048) - ), - 'icon_url': (), - 'proxy_icon_url': () - } - - if not isinstance(footer, Mapping): - raise ValidationError("Embed footer must be a mapping.") - - for field_name, value in footer.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed footer field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed_author(author: Any) -> None: - """Raises a ValidationError if the given author is invalid.""" - field_validators = { - 'name': ( - MinLengthValidator( - limit_value=1, - message="Embed author name must not be empty." - ), - MaxLengthValidator(limit_value=256) - ), - 'url': (), - 'icon_url': (), - 'proxy_icon_url': () - } - - if not isinstance(author, Mapping): - raise ValidationError("Embed author must be a mapping.") - - for field_name, value in author.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed author field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed(embed: Any) -> None: - """ - Validate a JSON document containing an embed as possible to send on Discord. - - This attempts to rebuild the validation used by Discord - as well as possible by checking for various embed limits so we can - ensure that any embed we store here will also be accepted as a - valid embed by the Discord API. - - Using this directly is possible, although not intended - you usually - stick this onto the `validators` keyword argument of model fields. - - Example: - - >>> from django.db import models - >>> from pydis_site.apps.api.models.utils import validate_embed - >>> class MyMessage(models.Model): - ... embed = models.JSONField( - ... validators=( - ... validate_embed, - ... ) - ... ) - ... # ... - ... - - Args: - embed (Any): - A dictionary describing the contents of this embed. - See the official documentation for a full reference - of accepted keys by this dictionary: - https://discordapp.com/developers/docs/resources/channel#embed-object - - Raises: - ValidationError: - In case the given embed is deemed invalid, a `ValidationError` - is raised which in turn will allow Django to display errors - as appropriate. - """ - all_keys = { - 'title', 'type', 'description', 'url', 'timestamp', - 'color', 'footer', 'image', 'thumbnail', 'video', - 'provider', 'author', 'fields' - } - one_required_of = {'description', 'fields', 'image', 'title', 'video'} - field_validators = { - 'title': ( - MinLengthValidator( - limit_value=1, - message="Embed title must not be empty." - ), - MaxLengthValidator(limit_value=256) - ), - 'description': (MaxLengthValidator(limit_value=4096),), - 'fields': ( - MaxLengthValidator(limit_value=25), - validate_embed_fields - ), - 'footer': (validate_embed_footer,), - 'author': (validate_embed_author,) - } - - if not embed: - raise ValidationError("Tag embed must not be empty.") - - elif not isinstance(embed, Mapping): - raise ValidationError("Tag embed must be a mapping.") - - elif not any(field in embed for field in one_required_of): - raise ValidationError(f"Tag embed must contain one of the fields {one_required_of}.") - - for required_key in one_required_of: - if required_key in embed and not embed[required_key]: - raise ValidationError(f"Key {required_key!r} must not be empty.") - - for field_name, value in embed.items(): - if field_name not in all_keys: - raise ValidationError(f"Unknown field name: {field_name!r}") - - if field_name in field_validators: - for validator in field_validators[field_name]: - validator(value) diff --git a/pydis_site/apps/api/pagination.py b/pydis_site/apps/api/pagination.py index 2a325460..61707d33 100644 --- a/pydis_site/apps/api/pagination.py +++ b/pydis_site/apps/api/pagination.py @@ -1,7 +1,6 @@ -import typing - from rest_framework.pagination import LimitOffsetPagination from rest_framework.response import Response +from rest_framework.utils.serializer_helpers import ReturnList class LimitOffsetPaginationExtended(LimitOffsetPagination): @@ -44,6 +43,6 @@ class LimitOffsetPaginationExtended(LimitOffsetPagination): default_limit = 100 - def get_paginated_response(self, data: typing.Any) -> Response: + def get_paginated_response(self, data: ReturnList) -> Response: """Override to skip metadata i.e. `count`, `next`, and `previous`.""" return Response(data) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index e53ccffa..9228c1f4 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -176,6 +176,7 @@ class InfractionSerializer(ModelSerializer): fields = ( 'id', 'inserted_at', + 'last_applied', 'expires_at', 'active', 'user', diff --git a/pydis_site/apps/api/tests/migrations/__init__.py b/pydis_site/apps/api/tests/migrations/__init__.py deleted file mode 100644 index 38e42ffc..00000000 --- a/pydis_site/apps/api/tests/migrations/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""This submodule contains tests for functions used in data migrations.""" diff --git a/pydis_site/apps/api/tests/migrations/base.py b/pydis_site/apps/api/tests/migrations/base.py deleted file mode 100644 index 0c0a5bd0..00000000 --- a/pydis_site/apps/api/tests/migrations/base.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Includes utilities for testing migrations.""" -from django.db import connection -from django.db.migrations.executor import MigrationExecutor -from django.test import TestCase - - -class MigrationsTestCase(TestCase): - """ - A `TestCase` subclass to test migration files. - - To be able to properly test a migration, we will need to inject data into the test database - before the migrations we want to test are applied, but after the older migrations have been - applied. This makes sure that we are testing "as if" we were actually applying this migration - to a database in the state it was in before introducing the new migration. - - To set up a MigrationsTestCase, create a subclass of this class and set the following - class-level attributes: - - - app: The name of the app that contains the migrations (e.g., `'api'`) - - migration_prior: The name* of the last migration file before the migrations you want to test - - migration_target: The name* of the last migration file we want to test - - *) Specify the file names without a path or the `.py` file extension. - - Additionally, overwrite the `setUpMigrationData` in the subclass to inject data into the - database before the migrations we want to test are applied. Please read the docstring of the - method for more information. An optional hook, `setUpPostMigrationData` is also provided. - """ - - # These class-level attributes should be set in classes that inherit from this base class. - app = None - migration_prior = None - migration_target = None - - @classmethod - def setUpTestData(cls): - """ - Injects data into the test database prior to the migration we're trying to test. - - This class methods reverts the test database back to the state of the last migration file - prior to the migrations we want to test. It will then allow the user to inject data into the - test database by calling the `setUpMigrationData` hook. After the data has been injected, it - will apply the migrations we want to test and call the `setUpPostMigrationData` hook. The - user can now test if the migration correctly migrated the injected test data. - """ - if not cls.app: - raise ValueError("The `app` attribute was not set.") - - if not cls.migration_prior or not cls.migration_target: - raise ValueError("Both ` migration_prior` and `migration_target` need to be set.") - - cls.migrate_from = [(cls.app, cls.migration_prior)] - cls.migrate_to = [(cls.app, cls.migration_target)] - - # Reverse to database state prior to the migrations we want to test - executor = MigrationExecutor(connection) - executor.migrate(cls.migrate_from) - - # Call the data injection hook with the current state of the project - old_apps = executor.loader.project_state(cls.migrate_from).apps - cls.setUpMigrationData(old_apps) - - # Run the migrations we want to test - executor = MigrationExecutor(connection) - executor.loader.build_graph() - executor.migrate(cls.migrate_to) - - # Save the project state so we're able to work with the correct model states - cls.apps = executor.loader.project_state(cls.migrate_to).apps - - # Call `setUpPostMigrationData` to potentially set up post migration data used in testing - cls.setUpPostMigrationData(cls.apps) - - @classmethod - def setUpMigrationData(cls, apps): - """ - Override this method to inject data into the test database before the migration is applied. - - This method will be called after setting up the database according to the migrations that - come before the migration(s) we are trying to test, but before the to-be-tested migration(s) - are applied. This allows us to simulate a database state just prior to the migrations we are - trying to test. - - To make sure we're creating objects according to the state the models were in at this point - in the migration history, use `apps.get_model(app_name: str, model_name: str)` to get the - appropriate model, e.g.: - - >>> Infraction = apps.get_model('api', 'Infraction') - """ - pass - - @classmethod - def setUpPostMigrationData(cls, apps): - """ - Set up additional test data after the target migration has been applied. - - Use `apps.get_model(app_name: str, model_name: str)` to get the correct instances of the - model classes: - - >>> Infraction = apps.get_model('api', 'Infraction') - """ - pass diff --git a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py b/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py deleted file mode 100644 index 8dc29b34..00000000 --- a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py +++ /dev/null @@ -1,496 +0,0 @@ -"""Tests for the data migration in `filename`.""" -import logging -from collections import ChainMap, namedtuple -from datetime import timedelta -from itertools import count -from typing import Dict, Iterable, Type, Union - -from django.db.models import Q -from django.forms.models import model_to_dict -from django.utils import timezone - -from pydis_site.apps.api.models import Infraction, User -from .base import MigrationsTestCase - -log = logging.getLogger(__name__) -log.setLevel(logging.DEBUG) - - -InfractionHistory = namedtuple('InfractionHistory', ("user_id", "infraction_history")) - - -class InfractionFactory: - """Factory that creates infractions for a User instance.""" - - infraction_id = count(1) - user_id = count(1) - default_values = { - 'active': True, - 'expires_at': None, - 'hidden': False, - } - - @classmethod - def create( - cls, - actor: User, - infractions: Iterable[Dict[str, Union[str, int, bool]]], - infraction_model: Type[Infraction] = Infraction, - user_model: Type[User] = User, - ) -> InfractionHistory: - """ - Creates `infractions` for the `user` with the given `actor`. - - The `infractions` dictionary can contain the following fields: - - `type` (required) - - `active` (default: True) - - `expires_at` (default: None; i.e, permanent) - - `hidden` (default: False). - - The parameters `infraction_model` and `user_model` can be used to pass in an instance of - both model classes from a different migration/project state. - """ - user_id = next(cls.user_id) - user = user_model.objects.create( - id=user_id, - name=f"Infracted user {user_id}", - discriminator=user_id, - avatar_hash=None, - ) - infraction_history = [] - - for infraction in infractions: - infraction = dict(infraction) - infraction["id"] = next(cls.infraction_id) - infraction = ChainMap(infraction, cls.default_values) - new_infraction = infraction_model.objects.create( - user=user, - actor=actor, - type=infraction["type"], - reason=f"`{infraction['type']}` infraction (ID: {infraction['id']} of {user}", - active=infraction['active'], - hidden=infraction['hidden'], - expires_at=infraction['expires_at'], - ) - infraction_history.append(new_infraction) - - return InfractionHistory(user_id=user_id, infraction_history=infraction_history) - - -class InfractionFactoryTests(MigrationsTestCase): - """Tests for the InfractionFactory.""" - - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0046_reminder_jump_url" - - @classmethod - def setUpPostMigrationData(cls, apps): - """Create a default actor for all infractions.""" - cls.infraction_model = apps.get_model('api', 'Infraction') - cls.user_model = apps.get_model('api', 'User') - - cls.actor = cls.user_model.objects.create( - id=9999, - name="Unknown Moderator", - discriminator=1040, - avatar_hash=None, - ) - - def test_infraction_factory_total_count(self): - """Does the test database hold as many infractions as we tried to create?""" - InfractionFactory.create( - actor=self.actor, - infractions=( - {'type': 'kick', 'active': False, 'hidden': False}, - {'type': 'ban', 'active': True, 'hidden': False}, - {'type': 'note', 'active': False, 'hidden': True}, - ), - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - database_count = Infraction.objects.all().count() - self.assertEqual(3, database_count) - - def test_infraction_factory_multiple_users(self): - """Does the test database hold as many infractions as we tried to create?""" - for _user in range(5): - InfractionFactory.create( - actor=self.actor, - infractions=( - {'type': 'kick', 'active': False, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': False}, - ), - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - - # Check if infractions and users are recorded properly in the database - database_count = Infraction.objects.all().count() - self.assertEqual(database_count, 10) - - user_count = User.objects.all().count() - self.assertEqual(user_count, 5 + 1) - - def test_infraction_factory_sets_correct_fields(self): - """Does the InfractionFactory set the correct attributes?""" - infractions = ( - { - 'type': 'note', - 'active': False, - 'hidden': True, - 'expires_at': timezone.now() - }, - {'type': 'warning', 'active': False, 'hidden': False, 'expires_at': None}, - {'type': 'watch', 'active': False, 'hidden': True, 'expires_at': None}, - {'type': 'mute', 'active': True, 'hidden': False, 'expires_at': None}, - {'type': 'kick', 'active': True, 'hidden': True, 'expires_at': None}, - {'type': 'ban', 'active': True, 'hidden': False, 'expires_at': None}, - { - 'type': 'superstar', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() - }, - ) - - InfractionFactory.create( - actor=self.actor, - infractions=infractions, - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - - for infraction in infractions: - with self.subTest(**infraction): - self.assertTrue(Infraction.objects.filter(**infraction).exists()) - - -class ActiveInfractionMigrationTests(MigrationsTestCase): - """ - Tests the active infraction data migration. - - The active infraction data migration should do the following things: - - 1. migrates all active notes, warnings, and kicks to an inactive status; - 2. migrates all users with multiple active infractions of a single type to have only one active - infraction of that type. The infraction with the longest duration stays active. - """ - - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0047_active_infractions_migration" - - @classmethod - def setUpMigrationData(cls, apps): - """Sets up an initial database state that contains the relevant test cases.""" - # Fetch the Infraction and User model in the current migration state - cls.infraction_model = apps.get_model('api', 'Infraction') - cls.user_model = apps.get_model('api', 'User') - - cls.created_infractions = {} - - # Moderator that serves as actor for all infractions - cls.user_moderator = cls.user_model.objects.create( - id=9999, - name="Olivier de Vienne", - discriminator=1040, - avatar_hash=None, - ) - - # User #1: clean user with no infractions - cls.created_infractions["no infractions"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=[], - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #2: One inactive note infraction - cls.created_infractions["one inactive note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': False, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #3: One active note infraction - cls.created_infractions["one active note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #4: One active and one inactive note infraction - cls.created_infractions["one active and one inactive note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': False, 'hidden': True}, - {'type': 'note', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #5: Once active note, one active kick, once active warning - cls.created_infractions["active note, kick, warning"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': True, 'hidden': True}, - {'type': 'kick', 'active': True, 'hidden': True}, - {'type': 'warning', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #6: One inactive ban and one active ban - cls.created_infractions["one inactive and one active ban"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': False, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #7: Two active permanent bans - cls.created_infractions["two active perm bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #8: Multiple active temporary bans - cls.created_infractions["multiple active temp bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=1) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=20) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=5) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #9: One active permanent ban, two active temporary bans - cls.created_infractions["active perm, two active temp bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': None, - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=7) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #10: One inactive permanent ban, two active temporary bans - cls.created_infractions["one inactive perm ban, two active temp bans"] = ( - InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': False, - 'hidden': True, - 'expires_at': None, - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=7) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - ) - - # User #11: Active ban, active mute, active superstar - cls.created_infractions["active ban, mute, and superstar"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #12: Multiple active bans, active mutes, active superstars - cls.created_infractions["multiple active bans, mutes, stars"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - def test_all_never_active_types_became_inactive(self): - """Are all infractions of a non-active type inactive after the migration?""" - inactive_type_query = Q(type="note") | Q(type="warning") | Q(type="kick") - self.assertFalse( - self.infraction_model.objects.filter(inactive_type_query, active=True).exists() - ) - - def test_migration_left_clean_user_without_infractions(self): - """Do users without infractions have no infractions after the migration?""" - user_id, infraction_history = self.created_infractions["no infractions"] - self.assertFalse( - self.infraction_model.objects.filter(user__id=user_id).exists() - ) - - def test_migration_left_user_with_inactive_note_untouched(self): - """Did the migration leave users with only an inactive note untouched?""" - user_id, infraction_history = self.created_infractions["one inactive note"] - inactive_note = infraction_history[0] - self.assertTrue( - self.infraction_model.objects.filter(**model_to_dict(inactive_note)).exists() - ) - - def test_migration_only_touched_active_field_of_active_note(self): - """Does the migration only change the `active` field?""" - user_id, infraction_history = self.created_infractions["one active note"] - note = model_to_dict(infraction_history[0]) - note['active'] = False - self.assertTrue( - self.infraction_model.objects.filter(**note).exists() - ) - - def test_migration_only_touched_active_field_of_active_note_left_inactive_untouched(self): - """Does the migration only change the `active` field of active notes?""" - user_id, infraction_history = self.created_infractions["one active and one inactive note"] - for note in infraction_history: - with self.subTest(active=note.active): - note = model_to_dict(note) - note['active'] = False - self.assertTrue( - self.infraction_model.objects.filter(**note).exists() - ) - - def test_migration_migrates_all_nonactive_types_to_inactive(self): - """Do we set the `active` field of all non-active infractions to `False`?""" - user_id, infraction_history = self.created_infractions["active note, kick, warning"] - self.assertFalse( - self.infraction_model.objects.filter(user__id=user_id, active=True).exists() - ) - - def test_migration_leaves_user_with_one_active_ban_untouched(self): - """Do we leave a user with one active and one inactive ban untouched?""" - user_id, infraction_history = self.created_infractions["one inactive and one active ban"] - for infraction in infraction_history: - with self.subTest(active=infraction.active): - self.assertTrue( - self.infraction_model.objects.filter(**model_to_dict(infraction)).exists() - ) - - def test_migration_turns_double_active_perm_ban_into_single_active_perm_ban(self): - """Does the migration turn two active permanent bans into one active permanent ban?""" - user_id, infraction_history = self.created_infractions["two active perm bans"] - active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count() - self.assertEqual(active_count, 1) - - def test_migration_leaves_temporary_ban_with_longest_duration_active(self): - """Does the migration turn two active permanent bans into one active permanent ban?""" - user_id, infraction_history = self.created_infractions["multiple active temp bans"] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertEqual(active_ban.expires_at, infraction_history[2].expires_at) - - def test_migration_leaves_permanent_ban_active(self): - """Does the migration leave the permanent ban active?""" - user_id, infraction_history = self.created_infractions["active perm, two active temp bans"] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertIsNone(active_ban.expires_at) - - def test_migration_leaves_longest_temp_ban_active_with_inactive_permanent_ban(self): - """Does the longest temp ban stay active, even with an inactive perm ban present?""" - user_id, infraction_history = self.created_infractions[ - "one inactive perm ban, two active temp bans" - ] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertEqual(active_ban.expires_at, infraction_history[0].expires_at) - - def test_migration_leaves_all_active_types_active_if_one_of_each_exists(self): - """Do all active infractions stay active if only one of each is present?""" - user_id, infraction_history = self.created_infractions["active ban, mute, and superstar"] - active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count() - self.assertEqual(active_count, 4) - - def test_migration_reduces_all_active_types_to_a_single_active_infraction(self): - """Do we reduce all of the infraction types to one active infraction?""" - user_id, infraction_history = self.created_infractions["multiple active bans, mutes, stars"] - active_infractions = self.infraction_model.objects.filter(user__id=user_id, active=True) - self.assertEqual(len(active_infractions), 4) - types_observed = [infraction.type for infraction in active_infractions] - - for infraction_type in ('ban', 'mute', 'superstar', 'watch'): - with self.subTest(type=infraction_type): - self.assertIn(infraction_type, types_observed) diff --git a/pydis_site/apps/api/tests/migrations/test_base.py b/pydis_site/apps/api/tests/migrations/test_base.py deleted file mode 100644 index f69bc92c..00000000 --- a/pydis_site/apps/api/tests/migrations/test_base.py +++ /dev/null @@ -1,135 +0,0 @@ -import logging -from unittest.mock import call, patch - -from django.db.migrations.loader import MigrationLoader -from django.test import TestCase - -from .base import MigrationsTestCase, connection - -log = logging.getLogger(__name__) - - -class SpanishInquisition(MigrationsTestCase): - app = "api" - migration_prior = "scragly" - migration_target = "kosa" - - -@patch("pydis_site.apps.api.tests.migrations.base.MigrationExecutor") -class MigrationsTestCaseNoSideEffectsTests(TestCase): - """Tests the MigrationTestCase class with actual migration side effects disabled.""" - - def setUp(self): - """Set up an instance of MigrationsTestCase for use in tests.""" - self.test_case = SpanishInquisition() - - def test_missing_app_class_raises_value_error(self, _migration_executor): - """A MigrationsTestCase subclass should set the class-attribute `app`.""" - class Spam(MigrationsTestCase): - pass - - spam = Spam() - with self.assertRaises(ValueError, msg="The `app` attribute was not set."): - spam.setUpTestData() - - def test_missing_migration_class_attributes_raise_value_error(self, _migration_executor): - """A MigrationsTestCase subclass should set both `migration_prior` and `migration_target`""" - class Eggs(MigrationsTestCase): - app = "api" - migration_target = "lemon" - - class Bacon(MigrationsTestCase): - app = "api" - migration_prior = "mark" - - instances = (Eggs(), Bacon()) - - exception_message = "Both ` migration_prior` and `migration_target` need to be set." - for instance in instances: - with self.subTest( - migration_prior=instance.migration_prior, - migration_target=instance.migration_target, - ): - with self.assertRaises(ValueError, msg=exception_message): - instance.setUpTestData() - - @patch(f"{__name__}.SpanishInquisition.setUpMigrationData") - @patch(f"{__name__}.SpanishInquisition.setUpPostMigrationData") - def test_migration_data_hooks_are_called_once(self, pre_hook, post_hook, _migration_executor): - """The `setUpMigrationData` and `setUpPostMigrationData` hooks should be called once.""" - self.test_case.setUpTestData() - for hook in (pre_hook, post_hook): - with self.subTest(hook=repr(hook)): - hook.assert_called_once() - - def test_migration_executor_is_instantiated_twice(self, migration_executor): - """The `MigrationExecutor` should be instantiated with the database connection twice.""" - self.test_case.setUpTestData() - - expected_args = [call(connection), call(connection)] - self.assertEqual(migration_executor.call_args_list, expected_args) - - def test_project_state_is_loaded_for_correct_migration_files_twice(self, migration_executor): - """The `project_state` should first be loaded with `migrate_from`, then `migrate_to`.""" - self.test_case.setUpTestData() - - expected_args = [call(self.test_case.migrate_from), call(self.test_case.migrate_to)] - self.assertEqual(migration_executor().loader.project_state.call_args_list, expected_args) - - def test_loader_build_graph_gets_called_once(self, migration_executor): - """We should rebuild the migration graph before applying the second set of migrations.""" - self.test_case.setUpTestData() - - migration_executor().loader.build_graph.assert_called_once() - - def test_migration_executor_migrate_method_is_called_correctly_twice(self, migration_executor): - """The migrate method of the executor should be called twice with the correct arguments.""" - self.test_case.setUpTestData() - - self.assertEqual(migration_executor().migrate.call_count, 2) - calls = [call([('api', 'scragly')]), call([('api', 'kosa')])] - migration_executor().migrate.assert_has_calls(calls) - - -class LifeOfBrian(MigrationsTestCase): - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0048_add_infractions_unique_constraints_active" - - @classmethod - def log_last_migration(cls): - """Parses the applied migrations dictionary to log the last applied migration.""" - loader = MigrationLoader(connection) - api_migrations = [ - migration for app, migration in loader.applied_migrations if app == cls.app - ] - last_migration = max(api_migrations, key=lambda name: int(name[:4])) - log.info(f"The last applied migration: {last_migration}") - - @classmethod - def setUpMigrationData(cls, apps): - """Method that logs the last applied migration at this point.""" - cls.log_last_migration() - - @classmethod - def setUpPostMigrationData(cls, apps): - """Method that logs the last applied migration at this point.""" - cls.log_last_migration() - - -class MigrationsTestCaseMigrationTest(TestCase): - """Tests if `MigrationsTestCase` travels to the right points in the migration history.""" - - def test_migrations_test_case_travels_to_correct_migrations_in_history(self): - """The test case should first revert to `migration_prior`, then go to `migration_target`.""" - brian = LifeOfBrian() - - with self.assertLogs(log, level=logging.INFO) as logs: - brian.setUpTestData() - - self.assertEqual(len(logs.records), 2) - - for time_point, record in zip(("migration_prior", "migration_target"), logs.records): - with self.subTest(time_point=time_point): - message = f"The last applied migration: {getattr(brian, time_point)}" - self.assertEqual(record.getMessage(), message) diff --git a/pydis_site/apps/api/tests/test_filterlists.py b/pydis_site/apps/api/tests/test_filterlists.py index 5a5bca60..9959617e 100644 --- a/pydis_site/apps/api/tests/test_filterlists.py +++ b/pydis_site/apps/api/tests/test_filterlists.py @@ -64,8 +64,8 @@ class FetchTests(AuthenticatedAPITestCase): self.assertEqual(response.status_code, 200) for api_type, model_type in zip(response.json(), FilterList.FilterListType.choices): - self.assertEquals(api_type[0], model_type[0]) - self.assertEquals(api_type[1], model_type[1]) + self.assertEqual(api_type[0], model_type[0]) + self.assertEqual(api_type[1], model_type[1]) class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py new file mode 100644 index 00000000..95bafec0 --- /dev/null +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -0,0 +1,286 @@ +import dataclasses +import datetime +import typing +import unittest +from unittest import mock + +import django.test +import httpx +import jwt +import rest_framework.response +import rest_framework.test +from django.urls import reverse + +from pydis_site import settings +from .. import github_utils + + +class GeneralUtilityTests(unittest.TestCase): + """Test the utility methods which do not fit in another class.""" + + def test_token_generation(self): + """Test that the a valid JWT token is generated.""" + def encode(payload: dict, _: str, algorithm: str, *args, **kwargs) -> str: + """ + Intercept the encode method. + + The result is encoded with an algorithm which does not require a PEM key, as it may + not be available in testing environments. + """ + self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.") + return original_encode( + payload, "secret-encoding-key", *args, algorithm="HS256", **kwargs + ) + + original_encode = jwt.encode + with mock.patch("jwt.encode", new=encode): + token = github_utils.generate_token() + decoded = jwt.decode(token, "secret-encoding-key", algorithms=["HS256"]) + + delta = datetime.timedelta(minutes=10) + self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) + self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) + + +class CheckRunTests(unittest.TestCase): + """Tests the check_run_status utility.""" + + run_kwargs: typing.Mapping = { + "name": "run_name", + "head_sha": "sha", + "status": "completed", + "conclusion": "success", + "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + "artifacts_url": "url", + } + + def test_completed_run(self): + """Test that an already completed run returns the correct URL.""" + final_url = "some_url_string_1234" + + kwargs = dict(self.run_kwargs, artifacts_url=final_url) + result = github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) + self.assertEqual(final_url, result) + + def test_pending_run(self): + """Test that a pending run raises the proper exception.""" + kwargs = dict(self.run_kwargs, status="pending") + with self.assertRaises(github_utils.RunPendingError): + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) + + def test_timeout_error(self): + """Test that a timeout is declared after a certain duration.""" + kwargs = dict(self.run_kwargs, status="pending") + # Set the creation time to well before the MAX_RUN_TIME + # to guarantee the right conclusion + kwargs["created_at"] = ( + datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) + + with self.assertRaises(github_utils.RunTimeoutError): + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) + + def test_failed_run(self): + """Test that a failed run raises the proper exception.""" + kwargs = dict(self.run_kwargs, conclusion="failed") + with self.assertRaises(github_utils.ActionFailedError): + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) + + +def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> httpx.Response: + """ + Helper method for the authorize tests. + + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path + auth = request.headers.get("Authorization") + + if request.method == "GET": + if path == "/app/installations": + if auth == "bearer JWT initial token": + return httpx.Response(200, request=request, json=[{ + "account": {"login": "VALID_OWNER"}, + "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL" + }]) + else: + return httpx.Response( + 401, json={"error": "auth app/installations"}, request=request + ) + + elif path == "/installation/repositories": + if auth == "bearer app access token": + return httpx.Response(200, request=request, json={ + "repositories": [{ + "name": "VALID_REPO" + }] + }) + else: # pragma: no cover + return httpx.Response( + 401, json={"error": "auth installation/repositories"}, request=request + ) + + elif request.method == "POST": + if path == "/ACCESS_TOKEN_URL": + if auth == "bearer JWT initial token": + return httpx.Response(200, request=request, json={"token": "app access token"}) + else: # pragma: no cover + return httpx.Response(401, json={"error": "auth access_token"}, request=request) + + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover + + [email protected]("httpx.Client.send", new=get_response_authorize) [email protected](github_utils, "generate_token", new=mock.Mock(return_value="JWT initial token")) +class AuthorizeTests(unittest.TestCase): + """Test the authorize utility.""" + + def test_invalid_apps_auth(self): + """Test that an exception is raised if authorization was attempted with an invalid token.""" + with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): + with self.assertRaises(httpx.HTTPStatusError) as error: + github_utils.authorize("VALID_OWNER", "VALID_REPO") + + exception: httpx.HTTPStatusError = error.exception + self.assertEqual(401, exception.response.status_code) + self.assertEqual("auth app/installations", exception.response.json()["error"]) + + def test_missing_repo(self): + """Test that an exception is raised when the selected owner or repo are not available.""" + with self.assertRaises(github_utils.NotFoundError): + github_utils.authorize("INVALID_OWNER", "VALID_REPO") + with self.assertRaises(github_utils.NotFoundError): + github_utils.authorize("VALID_OWNER", "INVALID_REPO") + + def test_valid_authorization(self): + """Test that an accessible repository can be accessed.""" + client = github_utils.authorize("VALID_OWNER", "VALID_REPO") + self.assertEqual("bearer app access token", client.headers.get("Authorization")) + + +class ArtifactFetcherTests(unittest.TestCase): + """Test the get_artifact utility.""" + + @staticmethod + def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: + """ + Helper method for the get_artifact tests. + + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path + + if "force_error" in path: + return httpx.Response(404, request=request) + + if request.method == "GET": + if path == "/repos/owner/repo/actions/runs": + run = github_utils.WorkflowRun( + name="action_name", + head_sha="action_sha", + created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + status="completed", + conclusion="success", + artifacts_url="artifacts_url" + ) + return httpx.Response( + 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]} + ) + elif path == "/artifact_url": + return httpx.Response( + 200, request=request, json={"artifacts": [{ + "name": "artifact_name", + "archive_download_url": "artifact_download_url" + }]} + ) + elif path == "/artifact_download_url": + response = httpx.Response(302, request=request) + response.next_request = httpx.Request( + "GET", + httpx.URL("https://final_download.url") + ) + return response + + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover + + def setUp(self) -> None: + self.call_args = ["owner", "repo", "action_sha", "action_name", "artifact_name"] + self.client = httpx.Client(base_url="https://example.com") + + self.patchers = [ + mock.patch.object(self.client, "send", new=self.get_response_get_artifact), + mock.patch.object(github_utils, "authorize", return_value=self.client), + mock.patch.object(github_utils, "check_run_status", return_value="artifact_url"), + ] + + for patcher in self.patchers: + patcher.start() + + def tearDown(self) -> None: + for patcher in self.patchers: + patcher.stop() + + def test_client_closed_on_errors(self): + """Test that the client is terminated even if an error occurs at some point.""" + self.call_args[0] = "force_error" + with self.assertRaises(httpx.HTTPStatusError): + github_utils.get_artifact(*self.call_args) + self.assertTrue(self.client.is_closed) + + def test_missing(self): + """Test that an exception is raised if the requested artifact was not found.""" + cases = ( + "invalid sha", + "invalid action name", + "invalid artifact name", + ) + for i, name in enumerate(cases, 2): + with self.subTest(f"Test {name} raises an error"): + new_args = self.call_args.copy() + new_args[i] = name + + with self.assertRaises(github_utils.NotFoundError): + github_utils.get_artifact(*new_args) + + def test_valid(self): + """Test that the correct download URL is returned for valid requests.""" + url = github_utils.get_artifact(*self.call_args) + self.assertEqual("https://final_download.url", url) + self.assertTrue(self.client.is_closed) + + [email protected](github_utils, "get_artifact") +class GitHubArtifactViewTests(django.test.TestCase): + """Test the GitHub artifact fetch API view.""" + + def setUp(self): + self.kwargs = { + "owner": "test_owner", + "repo": "test_repo", + "sha": "test_sha", + "action_name": "test_action", + "artifact_name": "test_artifact", + } + self.url = reverse("api:github-artifacts", kwargs=self.kwargs) + + def test_correct_artifact(self, artifact_mock: mock.Mock): + """Test a proper response is returned with proper input.""" + artifact_mock.return_value = "final download url" + result = self.client.get(self.url) + + self.assertIsInstance(result, rest_framework.response.Response) + self.assertEqual({"url": artifact_mock.return_value}, result.data) + + def test_failed_fetch(self, artifact_mock: mock.Mock): + """Test that a proper error is returned when the request fails.""" + artifact_mock.side_effect = github_utils.NotFoundError("Test error message") + result = self.client.get(self.url) + + self.assertIsInstance(result, rest_framework.response.Response) + self.assertEqual({ + "error_type": github_utils.NotFoundError.__name__, + "error": "Test error message", + "requested_resource": "/".join(self.kwargs.values()) + }, result.data) diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index f1107734..89ee4e23 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -56,15 +56,17 @@ class InfractionTests(AuthenticatedAPITestCase): type='ban', reason='He terk my jerb!', hidden=True, + inserted_at=dt(2020, 10, 10, 0, 0, 0, tzinfo=timezone.utc), expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc), - active=True + active=True, ) cls.ban_inactive = Infraction.objects.create( user_id=cls.user.id, actor_id=cls.user.id, type='ban', reason='James is an ass, and we won\'t be working with him again.', - active=False + active=False, + inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc), ) cls.mute_permanent = Infraction.objects.create( user_id=cls.user.id, @@ -72,7 +74,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='mute', reason='He has a filthy mouth and I am his soap.', active=True, - expires_at=None + inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc), + expires_at=None, ) cls.superstar_expires_soon = Infraction.objects.create( user_id=cls.user.id, @@ -80,7 +83,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='superstar', reason='This one doesn\'t matter anymore.', active=True, - expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5) + inserted_at=dt(2020, 10, 10, 0, 3, 0, tzinfo=timezone.utc), + expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5), ) cls.voiceban_expires_later = Infraction.objects.create( user_id=cls.user.id, @@ -88,7 +92,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='voice_ban', reason='Jet engine mic', active=True, - expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5) + inserted_at=dt(2020, 10, 10, 0, 4, 0, tzinfo=timezone.utc), + expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5), ) def test_list_all(self): diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 0fad467c..c07d59cd 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -7,7 +7,6 @@ from pydis_site.apps.api.models import ( DeletedMessage, DocumentationLink, Infraction, - Message, MessageDeletionContext, Nomination, NominationEntry, @@ -116,17 +115,6 @@ class StringDunderMethodTests(SimpleTestCase): colour=0x5, permissions=0, position=10, ), - Message( - id=45, - author=User( - id=444, - name='bill', - discriminator=5, - ), - channel_id=666, - content="wooey", - embeds=[] - ), MessageDeletionContext( actor=User( id=5555, diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index 5d10069d..d86e80bb 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -502,6 +502,90 @@ class UserMetricityTests(AuthenticatedAPITestCase): "total_messages": total_messages }) + def test_metricity_activity_data(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + self.metricity.total_messages_in_past_n_days.return_value = [(0, 10)] + + # When + url = reverse("api:bot:user-metricity-activity-data") + response = self.client.post( + url, + data=[0, 1], + QUERY_STRING="days=10", + ) + + # Then + self.assertEqual(response.status_code, 200) + self.metricity.total_messages_in_past_n_days.assert_called_once_with(["0", "1"], 10) + self.assertEqual(response.json(), {"0": 10, "1": 0}) + + def test_metricity_activity_data_invalid_days(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse("api:bot:user-metricity-activity-data") + response = self.client.post( + url, + data=[0, 1], + QUERY_STRING="days=fifty", + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {"days": ["This query parameter must be an integer."]}) + + def test_metricity_activity_data_no_days(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + response = self.client.post( + url, + data=[0, 1], + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {'days': ["This query parameter is required."]}) + + def test_metricity_activity_data_no_users(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + response = self.client.post( + url, + QUERY_STRING="days=10", + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), ['Expected a list of items but got type "dict".']) + + def test_metricity_activity_data_invalid_users(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + response = self.client.post( + url, + data=[123, 'username'], + QUERY_STRING="days=10", + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {'1': ['A valid integer is required.']}) + def mock_metricity_user(self, joined_at, total_messages, total_blocks, top_channel_activity): patcher = patch("pydis_site.apps.api.viewsets.bot.user.Metricity") self.metricity = patcher.start() diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index 551cc2aa..8c46fcbc 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -5,7 +5,6 @@ from django.test import TestCase from ..models.bot.bot_setting import validate_bot_setting_name from ..models.bot.offensive_message import future_date_validator -from ..models.utils import validate_embed REQUIRED_KEYS = ( @@ -22,234 +21,6 @@ class BotSettingValidatorTests(TestCase): validate_bot_setting_name('bad name') -class TagEmbedValidatorTests(TestCase): - def test_rejects_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed('non-empty non-mapping') - - def test_rejects_missing_required_keys(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'unknown': "key" - }) - - def test_rejects_one_correct_one_incorrect(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'provider': "??", - 'title': "" - }) - - def test_rejects_empty_required_key(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': '' - }) - - def test_rejects_list_as_embed(self): - with self.assertRaises(ValidationError): - validate_embed([]) - - def test_rejects_required_keys_and_unknown_keys(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "the duck walked up to the lemonade stand", - 'and': "he said to the man running the stand" - }) - - def test_rejects_too_long_title(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': 'a' * 257 - }) - - def test_rejects_too_many_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [{} for _ in range(26)] - }) - - def test_rejects_too_long_description(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'description': 'd' * 4097 - }) - - def test_allows_valid_embed(self): - validate_embed({ - 'title': "My embed", - 'description': "look at my embed, my embed is amazing" - }) - - def test_allows_unvalidated_fields(self): - validate_embed({ - 'title': "My embed", - 'provider': "what am I??" - }) - - def test_rejects_fields_as_list_of_non_mappings(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': ['abc'] - }) - - def test_rejects_fields_with_unknown_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'what': "is this field" - } - ] - }) - - def test_rejects_fields_with_too_long_name(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "a" * 257 - } - ] - }) - - def test_rejects_one_correct_one_incorrect_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'value': "LOOK AT ME" - }, - { - 'name': "Totally valid", - 'value': "LOOK AT ME", - 'oh': "what is this key?" - } - ] - }) - - def test_rejects_missing_required_field_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'inline': True, - } - ] - }) - - def test_rejects_invalid_inline_field_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'value': "LOOK AT ME", - 'inline': "Totally not a boolean", - } - ] - }) - - def test_allows_valid_fields(self): - validate_embed({ - 'fields': [ - { - 'name': "valid", - 'value': "field", - }, - { - 'name': "valid", - 'value': "field", - 'inline': False, - }, - { - 'name': "valid", - 'value': "field", - 'inline': True, - }, - ] - }) - - def test_rejects_footer_as_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': [] - }) - - def test_rejects_footer_with_unknown_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': { - 'duck': "quack" - } - }) - - def test_rejects_footer_with_empty_text(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': { - 'text': "" - } - }) - - def test_allows_footer_with_proper_values(self): - validate_embed({ - 'title': "whatever", - 'footer': { - 'text': "django good" - } - }) - - def test_rejects_author_as_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': [] - }) - - def test_rejects_author_with_unknown_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - 'field': "that is unknown" - } - }) - - def test_rejects_author_with_empty_name(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - 'name': "" - } - }) - - def test_rejects_author_with_one_correct_one_incorrect(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - # Relies on "dictionary insertion order remembering" (D.I.O.R.) behaviour - 'url': "bobswebsite.com", - 'name': "" - } - }) - - def test_allows_author_with_proper_values(self): - validate_embed({ - 'title': "whatever", - 'author': { - 'name': "Bob" - } - }) - - class OffensiveMessageValidatorsTests(TestCase): def test_accepts_future_date(self): future_date_validator(datetime(3000, 1, 1, tzinfo=timezone.utc)) diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index 1e564b29..2757f176 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -1,7 +1,7 @@ from django.urls import include, path from rest_framework.routers import DefaultRouter -from .views import HealthcheckView, RulesView +from .views import GitHubArtifactsView, HealthcheckView, RulesView from .viewsets import ( AocAccountLinkViewSet, AocCompletionistBlockViewSet, @@ -86,5 +86,10 @@ urlpatterns = ( # from django_hosts.resolvers import reverse path('bot/', include((bot_router.urls, 'api'), namespace='bot')), path('healthcheck', HealthcheckView.as_view(), name='healthcheck'), - path('rules', RulesView.as_view(), name='rules') + path('rules', RulesView.as_view(), name='rules'), + path( + 'github/artifact/<str:owner>/<str:repo>/<str:sha>/<str:action_name>/<str:artifact_name>', + GitHubArtifactsView.as_view(), + name="github-artifacts" + ), ) diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 816463f6..34167a38 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -1,7 +1,10 @@ from rest_framework.exceptions import ParseError +from rest_framework.request import Request from rest_framework.response import Response from rest_framework.views import APIView +from . import github_utils + class HealthcheckView(APIView): """ @@ -34,12 +37,14 @@ class RulesView(APIView): ## Routes ### GET /rules - Returns a JSON array containing the server's rules: + Returns a JSON array containing the server's rules + and keywords relating to each rule. + Example response: >>> [ - ... "Eat candy.", - ... "Wake up at 4 AM.", - ... "Take your medicine." + ... ["Eat candy.", ["candy", "sweets"]], + ... ["Wake up at 4 AM.", ["wake_up", "early", "early_bird"]], + ... ["Take your medicine.", ["medicine", "health"]] ... ] Since some of the the rules require links, this view @@ -97,6 +102,12 @@ class RulesView(APIView): # `format` here is the result format, we have a link format here instead. def get(self, request, format=None): # noqa: D102,ANN001,ANN201 + """ + Returns a list of our community rules coupled with their keywords. + + Each item in the returned list is a tuple with the rule as first item + and a list of keywords that match that rules as second item. + """ link_format = request.query_params.get('link_format', 'md') if link_format not in ('html', 'md'): raise ParseError( @@ -121,34 +132,93 @@ class RulesView(APIView): return Response([ ( - f"Follow the {pydis_coc}." + f"Follow the {pydis_coc}.", + ["coc", "conduct", "code"] ), ( - f"Follow the {discord_community_guidelines} and {discord_tos}." + f"Follow the {discord_community_guidelines} and {discord_tos}.", + ["discord", "guidelines", "discord_tos"] ), ( - "Respect staff members and listen to their instructions." + "Respect staff members and listen to their instructions.", + ["respect", "staff", "instructions"] ), ( "Use English to the best of your ability. " - "Be polite if someone speaks English imperfectly." + "Be polite if someone speaks English imperfectly.", + ["english", "language"] ), ( "Do not provide or request help on projects that may break laws, " - "breach terms of services, or are malicious or inappropriate." + "breach terms of services, or are malicious or inappropriate.", + ["infraction", "tos", "breach", "malicious", "inappropriate"] ), ( - "Do not post unapproved advertising." + "Do not post unapproved advertising.", + ["ad", "ads", "advert", "advertising"] ), ( "Keep discussions relevant to the channel topic. " - "Each channel's description tells you the topic." + "Each channel's description tells you the topic.", + ["off-topic", "topic", "relevance"] ), ( "Do not help with ongoing exams. When helping with homework, " - "help people learn how to do the assignment without doing it for them." + "help people learn how to do the assignment without doing it for them.", + ["exam", "exams", "assignment", "assignments", "homework"] ), ( - "Do not offer or ask for paid work of any kind." + "Do not offer or ask for paid work of any kind.", + ["paid", "work", "money"] ), ]) + + +class GitHubArtifactsView(APIView): + """ + Provides utilities for interacting with the GitHub API and obtaining action artifacts. + + ## Routes + ### GET /github/artifacts + Returns a download URL for the artifact requested. + + { + 'url': 'https://pipelines.actions.githubusercontent.com/...' + } + + ### Exceptions + In case of an error, the following body will be returned: + + { + "error_type": "<error class name>", + "error": "<error description>", + "requested_resource": "<owner>/<repo>/<sha>/<artifact_name>" + } + + ## Authentication + Does not require any authentication nor permissions. + """ + + authentication_classes = () + permission_classes = () + + def get( + self, + request: Request, + *, + owner: str, + repo: str, + sha: str, + action_name: str, + artifact_name: str + ) -> Response: + """Return a download URL for the requested artifact.""" + try: + url = github_utils.get_artifact(owner, repo, sha, action_name, artifact_name) + return Response({"url": url}) + except github_utils.ArtifactProcessingError as e: + return Response({ + "error_type": e.__class__.__name__, + "error": str(e), + "requested_resource": f"{owner}/{repo}/{sha}/{action_name}/{artifact_name}" + }, status=e.status) diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py index 3a4cec60..97efb63c 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -70,4 +70,4 @@ class AocCompletionistBlockViewSet( serializer_class = AocCompletionistBlockSerializer queryset = AocCompletionistBlock.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id", "is_blocked") + filterset_fields = ("user__id", "is_blocked") diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index c7a96629..3cdc342d 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -68,4 +68,4 @@ class AocAccountLinkViewSet( serializer_class = AocAccountLinkSerializer queryset = AocAccountLink.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id", "aoc_username") + filterset_fields = ("user__id", "aoc_username") diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py index 7f31292f..93d29391 100644 --- a/pydis_site/apps/api/viewsets/bot/infraction.py +++ b/pydis_site/apps/api/viewsets/bot/infraction.py @@ -1,9 +1,8 @@ -from datetime import datetime +import datetime from django.db import IntegrityError from django.db.models import QuerySet from django.http.request import HttpRequest -from django.utils import timezone from django_filters.rest_framework import DjangoFilterBackend from rest_framework.decorators import action from rest_framework.exceptions import ValidationError @@ -154,7 +153,7 @@ class InfractionViewSet( queryset = Infraction.objects.all() pagination_class = LimitOffsetPaginationExtended filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) - filter_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type') + filterset_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type') search_fields = ('$reason',) frozen_fields = ('id', 'inserted_at', 'type', 'user', 'actor', 'hidden') @@ -185,23 +184,21 @@ class InfractionViewSet( filter_expires_after = self.request.query_params.get('expires_after') if filter_expires_after: try: - expires_after_parsed = datetime.fromisoformat(filter_expires_after) + expires_after_parsed = datetime.datetime.fromisoformat(filter_expires_after) except ValueError: raise ValidationError({'expires_after': ['failed to convert to datetime']}) - additional_filters['expires_at__gte'] = timezone.make_aware( - expires_after_parsed, - timezone=timezone.utc, + additional_filters['expires_at__gte'] = expires_after_parsed.replace( + tzinfo=datetime.timezone.utc ) filter_expires_before = self.request.query_params.get('expires_before') if filter_expires_before: try: - expires_before_parsed = datetime.fromisoformat(filter_expires_before) + expires_before_parsed = datetime.datetime.fromisoformat(filter_expires_before) except ValueError: raise ValidationError({'expires_before': ['failed to convert to datetime']}) - additional_filters['expires_at__lte'] = timezone.make_aware( - expires_before_parsed, - timezone=timezone.utc, + additional_filters['expires_at__lte'] = expires_before_parsed.replace( + tzinfo=datetime.timezone.utc ) if 'expires_at__lte' in additional_filters and 'expires_at__gte' in additional_filters: diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py index 144daab0..6af42bcb 100644 --- a/pydis_site/apps/api/viewsets/bot/nomination.py +++ b/pydis_site/apps/api/viewsets/bot/nomination.py @@ -172,7 +172,7 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge serializer_class = NominationSerializer queryset = Nomination.objects.all() filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) - filter_fields = ('user__id', 'active') + filterset_fields = ('user__id', 'active') frozen_fields = ('id', 'inserted_at', 'user', 'ended_at') frozen_on_create = ('ended_at', 'end_reason', 'active', 'inserted_at', 'reviewed') diff --git a/pydis_site/apps/api/viewsets/bot/reminder.py b/pydis_site/apps/api/viewsets/bot/reminder.py index 78d7cb3b..5f997052 100644 --- a/pydis_site/apps/api/viewsets/bot/reminder.py +++ b/pydis_site/apps/api/viewsets/bot/reminder.py @@ -125,4 +125,4 @@ class ReminderViewSet( serializer_class = ReminderSerializer queryset = Reminder.objects.prefetch_related('author') filter_backends = (DjangoFilterBackend, SearchFilter) - filter_fields = ('active', 'author__id') + filterset_fields = ('active', 'author__id') diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index 3318b2b9..db73a83c 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -3,8 +3,9 @@ from collections import OrderedDict from django.db.models import Q from django_filters.rest_framework import DjangoFilterBackend -from rest_framework import status +from rest_framework import fields, status from rest_framework.decorators import action +from rest_framework.exceptions import ParseError from rest_framework.pagination import PageNumberPagination from rest_framework.request import Request from rest_framework.response import Response @@ -138,6 +139,29 @@ class UserViewSet(ModelViewSet): - 200: returned on success - 404: if a user with the given `snowflake` could not be found + ### POST /bot/users/metricity_activity_data + Returns a mapping of user ID to message count in a given period for + the given user IDs. + + #### Required Query Parameters + - days: how many days into the past to count message from. + + #### Request Format + >>> [ + ... 409107086526644234, + ... 493839819168808962 + ... ] + + #### Response format + >>> { + ... "409107086526644234": 54, + ... "493839819168808962": 0 + ... } + + #### Status codes + - 200: returned on success + - 400: if request body or query parameters were missing or invalid + ### POST /bot/users Adds a single or multiple new users. The roles attached to the user(s) must be roles known by the site. @@ -237,7 +261,7 @@ class UserViewSet(ModelViewSet): queryset = User.objects.all().order_by("id") pagination_class = UserListPagination filter_backends = (DjangoFilterBackend,) - filter_fields = ('name', 'discriminator') + filterset_fields = ('name', 'discriminator') def get_serializer(self, *args, **kwargs) -> ModelSerializer: """Set Serializer many attribute to True if request body contains a list.""" @@ -298,3 +322,34 @@ class UserViewSet(ModelViewSet): except NotFoundError: return Response(dict(detail="User not found in metricity"), status=status.HTTP_404_NOT_FOUND) + + @action(detail=False, methods=["POST"]) + def metricity_activity_data(self, request: Request) -> Response: + """Request handler for metricity_activity_data endpoint.""" + if "days" in request.query_params: + try: + days = int(request.query_params["days"]) + except ValueError: + raise ParseError(detail={ + "days": ["This query parameter must be an integer."] + }) + else: + raise ParseError(detail={ + "days": ["This query parameter is required."] + }) + + user_id_list_validator = fields.ListField( + child=fields.IntegerField(min_value=0), + allow_empty=False + ) + user_ids = [ + str(user_id) for user_id in + user_id_list_validator.run_validation(request.data) + ] + + with Metricity() as metricity: + data = metricity.total_messages_in_past_n_days(user_ids, days) + + default_data = {user_id: 0 for user_id in user_ids} + response_data = default_data | dict(data) + return Response(response_data, status=status.HTTP_200_OK) diff --git a/pydis_site/apps/content/apps.py b/pydis_site/apps/content/apps.py index 1e300a48..96019e1c 100644 --- a/pydis_site/apps/content/apps.py +++ b/pydis_site/apps/content/apps.py @@ -4,4 +4,4 @@ from django.apps import AppConfig class ContentConfig(AppConfig): """Django AppConfig for content app.""" - name = 'content' + name = 'pydis_site.apps.content' diff --git a/pydis_site/apps/content/migrations/0001_add_tags.py b/pydis_site/apps/content/migrations/0001_add_tags.py new file mode 100644 index 00000000..2c31e4c1 --- /dev/null +++ b/pydis_site/apps/content/migrations/0001_add_tags.py @@ -0,0 +1,35 @@ +# Generated by Django 4.0.6 on 2022-08-23 09:06 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Commit', + fields=[ + ('sha', models.CharField(help_text='The SHA hash of this commit.', max_length=40, primary_key=True, serialize=False)), + ('message', models.TextField(help_text='The commit message.')), + ('date', models.DateTimeField(help_text='The date and time the commit was created.')), + ('authors', models.TextField(help_text='The person(s) who created the commit. This is a serialized JSON object. Refer to the GitHub documentation on the commit endpoint (schema/commit.author & schema/commit.committer) for more info. https://docs.github.com/en/rest/commits/commits#get-a-commit')), + ], + ), + migrations.CreateModel( + name='Tag', + fields=[ + ('last_updated', models.DateTimeField(auto_now=True, help_text='The date and time this data was last fetched.')), + ('sha', models.CharField(help_text="The tag's hash, as calculated by GitHub.", max_length=40)), + ('name', models.CharField(help_text="The tag's name.", max_length=50, primary_key=True, serialize=False)), + ('group', models.CharField(help_text='The group the tag belongs to.', max_length=50, null=True)), + ('body', models.TextField(help_text='The content of the tag.')), + ('last_commit', models.ForeignKey(help_text='The commit this file was last touched in.', null=True, on_delete=django.db.models.deletion.CASCADE, to='content.commit')), + ], + ), + ] diff --git a/pydis_site/apps/content/migrations/__init__.py b/pydis_site/apps/content/migrations/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pydis_site/apps/content/migrations/__init__.py diff --git a/pydis_site/apps/content/models/__init__.py b/pydis_site/apps/content/models/__init__.py new file mode 100644 index 00000000..60007e27 --- /dev/null +++ b/pydis_site/apps/content/models/__init__.py @@ -0,0 +1,3 @@ +from .tag import Commit, Tag + +__all__ = ["Commit", "Tag"] diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py new file mode 100644 index 00000000..1a20d775 --- /dev/null +++ b/pydis_site/apps/content/models/tag.py @@ -0,0 +1,80 @@ +import collections.abc +import json + +from django.db import models + + +class Commit(models.Model): + """A git commit from the Python Discord Bot project.""" + + URL_BASE = "https://github.com/python-discord/bot/commit/" + + sha = models.CharField( + help_text="The SHA hash of this commit.", + primary_key=True, + max_length=40, + ) + message = models.TextField(help_text="The commit message.") + date = models.DateTimeField(help_text="The date and time the commit was created.") + authors = models.TextField(help_text=( + "The person(s) who created the commit. This is a serialized JSON object. " + "Refer to the GitHub documentation on the commit endpoint " + "(schema/commit.author & schema/commit.committer) for more info. " + "https://docs.github.com/en/rest/commits/commits#get-a-commit" + )) + + @property + def url(self) -> str: + """The URL to the commit on GitHub.""" + return self.URL_BASE + self.sha + + def lines(self) -> collections.abc.Iterable[str]: + """Return each line in the commit message.""" + for line in self.message.split("\n"): + yield line + + def format_authors(self) -> collections.abc.Iterable[str]: + """Return a nice representation of the author(s)' name and email.""" + for author in json.loads(self.authors): + yield f"{author['name']} <{author['email']}>" + + +class Tag(models.Model): + """A tag from the python-discord bot repository.""" + + URL_BASE = "https://github.com/python-discord/bot/tree/main/bot/resources/tags" + + last_updated = models.DateTimeField( + help_text="The date and time this data was last fetched.", + auto_now=True, + ) + sha = models.CharField( + help_text="The tag's hash, as calculated by GitHub.", + max_length=40, + ) + last_commit = models.ForeignKey( + Commit, + help_text="The commit this file was last touched in.", + null=True, + on_delete=models.CASCADE, + ) + name = models.CharField( + help_text="The tag's name.", + primary_key=True, + max_length=50, + ) + group = models.CharField( + help_text="The group the tag belongs to.", + null=True, + max_length=50, + ) + body = models.TextField(help_text="The content of the tag.") + + @property + def url(self) -> str: + """Get the URL of the tag on GitHub.""" + url = Tag.URL_BASE + if self.group: + url += f"/{self.group}" + url += f"/{self.name}.md" + return url diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md index 6231fe87..2822d046 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md @@ -119,7 +119,7 @@ As mentioned in the Contributing Guidelines, we have a simple style guide for ou [**Style Guide**](./style-guide/) ### 4. Create an issue -The first step to any new contribution is an issue describing a problem with the current codebase or proposing a new feature. All the open issues are viewable on the GitHub repositories, for instance here is the [issues page for Sir Lancebot](https://github.com/python-discord/sir-lancebot/issues). If you have something that you want to implement open a new issue to present your idea. Otherwise you can browse the unassigned issues and ask to be assigned to one that you're interested in, either in the comments on the issue or in the [`#dev-contrib`](https://discord.gg/2h3qBv8Xaa) channel on Discord. +The first step to any new contribution is an issue describing a problem with the current codebase or proposing a new feature. All the open issues are viewable on the GitHub repositories, for instance here is the [issues page for Sir Lancebot](https://github.com/python-discord/sir-lancebot/issues). If you have something that you want to implement open a new issue to present your idea. Otherwise, you can browse the unassigned issues and ask to be assigned to one that you're interested in, either in the comments on the issue or in the [`#dev-contrib`](https://discord.gg/2h3qBv8Xaa) channel on Discord. [**How to write a good issue**](./issues/) diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md index ad446cc8..02316bca 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md @@ -88,6 +88,7 @@ urls: # Snekbox snekbox_eval_api: "http://localhost:8060/eval" + snekbox_311_eval_api: "http://localhost:8065/eval" ##### << Replace the following � characters with the channel IDs in your test server >> ##### # This assumes the template was used: https://discord.new/zmHtscpYN9E3 @@ -481,10 +482,14 @@ You are now almost ready to run the Python bot. The simplest way to do so is wit In your `config.yml` file: * Set `urls.site` to `"web:8000"`. -* If you wish to work with snekbox set `urls.snekbox_eval_api` to `"http://snekbox:8060/eval"`. +* If you wish to work with snekbox set the following: + * `urls.snekbox_eval_api` to `"http://snekbox:8060/eval"` + * `urls.snekbox_311_eval_api` to `"http://snekbox-311:8060/eval"`. Assuming you have Docker installed **and running**, enter the cloned repo in the command line and type `docker-compose up`. +If working with snekbox you can run `docker-compose --profile 3.10 up` to also start up a 3.10 snekbox container, in addition to the default 3.11 container! + After pulling the images and building the containers, your bot will start. Enter your server and type `!help` (or whatever prefix you chose instead of `!`). Your bot is now running, but this method makes debugging with an IDE a fairly involved process. For additional running methods, continue reading the following sections. @@ -494,12 +499,13 @@ The advantage of this method is that you can run the bot's code in your preferre * Append the following line to your `.env` file: `BOT_API_KEY=badbot13m0n8f570f942013fc818f234916ca531`. * In your `config.yml` file, set `urls.site` to `"localhost:8000"`. If you wish to keep using `web:8000`, then [COMPOSE_PROJECT_NAME](../docker/#compose-project-names) has to be set. -* To work with snekbox, set `urls.snekbox_eval_api` to `"http://localhost:8060/eval"` +* To work with snekbox, set `urls.snekbox_eval_api` to `"http://localhost:8060/eval"` and `urls.snekbox_311_eval_api` to `"http://localhost:8065/eval"` You will need to start the services separately, but if you got the previous section with Docker working, that's pretty simple: * `docker-compose up web` to start the site container. This is required. * `docker-compose up snekbox` to start the snekbox container. You only need this if you're planning on working on the snekbox cog. +* `docker-compose up snekbox-311` to start the snekbox 3.11 container. You only need this if you're planning on working on the snekbox cog. * `docker-compose up redis` to start the Redis container. You only need this if you're not using fakeredis. For more info refer to [Working with Redis](#optional-working-with-redis). You can start several services together: `docker-compose up web snekbox redis`. @@ -507,7 +513,7 @@ You can start several services together: `docker-compose up web snekbox redis`. ##### Setting Up a Development Environment The bot's code is Python code like any other. To run it locally, you will need the right version of Python with the necessary packages installed: -1. Make sure you have [Python 3.9](https://www.python.org/downloads/) installed. It helps if it is your system's default Python version. +1. Make sure you have [Python 3.10](https://www.python.org/downloads/) installed. It helps if it is your system's default Python version. 2. [Install Poetry](https://github.com/python-poetry/poetry#installation). 3. [Install the dependencies](../installing-project-dependencies). diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md index 520e41ad..9786698b 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md @@ -9,7 +9,7 @@ You should have already forked the [`site`](https://github.com/python-discord/si ### Requirements -- [Python 3.9](https://www.python.org/downloads/) +- [Python 3.10](https://www.python.org/downloads/) - [Poetry](https://python-poetry.org/docs/#installation) - `pip install poetry` - [Git](https://git-scm.com/downloads) diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md index 4dba45c8..b26c467c 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md @@ -202,6 +202,3 @@ def foo(input_1: int, input_2: dict[str, int]) -> bool: This tells us that `foo` accepts an `int` and a `dict`, with `str` keys and `int` values, and returns a `bool`. In previous examples, we have purposely omitted annotations to keep focus on the specific points they represent. - -> **Note:** if the project is running Python 3.8 or below you have to use `typing.Dict` instead of `dict`, but our three main projects are all >=3.9. -> See [PEP 585](https://www.python.org/dev/peps/pep-0585/) for more information. diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md b/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md index f8031834..5e785cd9 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md @@ -5,7 +5,7 @@ icon: fab fa-discord --- ## Why do we need off-topic etiquette? -Everyone wants to have good conversations in our off-topic channels, but with tens of thousands of members, this might mean different things to different people. +Everyone wants to have good conversations in our off-topic channels, but with hundreds of thousands of members, this might mean different things to different people. To facilitate the best experience for everyone, here are some guidelines on conversation etiquette. ## Three things you shouldn't do diff --git a/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md b/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md new file mode 100644 index 00000000..096e3a90 --- /dev/null +++ b/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md @@ -0,0 +1,23 @@ +--- +title: Fixing an SSL Certificate Verification Error +description: A guide on fixing verification of an SSL certificate. +--- + +We're fixing the error Python specifies as [ssl.SSLCertVerificationError](https://docs.python.org/3/library/ssl.html#ssl.SSLCertVerificationError). + +# How to fix SSL Certificate issue on Windows + +Firstly, try updating your OS, wouldn't hurt to try. + +Now, if you're still having an issue, you would need to download the certificate for the SSL. + +The SSL Certificate, Sectigo (cert vendor) provides a download link of an [SSL certificate](https://crt.sh/?id=2835394). You should find it in the bottom left corner, shown below: + +A picture where to find the certificate in the website is: + + +You have to setup the certificate yourself. To do that you can just click on it, or if that doesn't work, refer to [this link](https://portal.threatpulse.com/docs/sol/Solutions/ManagePolicy/SSL/ssl_chrome_cert_ta.htm) + +# How to fix SSL Certificate issue on Mac + +Navigate to your `Applications/Python 3.x/` folder and double-click the `Install Certificates.command` to fix this. diff --git a/pydis_site/apps/content/resources/tags/_info.yml b/pydis_site/apps/content/resources/tags/_info.yml new file mode 100644 index 00000000..054125ec --- /dev/null +++ b/pydis_site/apps/content/resources/tags/_info.yml @@ -0,0 +1,3 @@ +title: Tags +description: Useful snippets that are often used in the server. +icon: fas fa-tags diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index be5ea897..462818b5 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -1,12 +1,34 @@ +import datetime +import json +import tarfile +import tempfile +import textwrap from pathlib import Path +from unittest import mock +import httpx +import markdown from django.http import Http404 +from django.test import TestCase -from pydis_site.apps.content import utils +from pydis_site import settings +from pydis_site.apps.content import models, utils from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.timezone.utc) +_time_str = _time.strftime(settings.GITHUB_TIMESTAMP_FORMAT) +TEST_COMMIT_KWARGS = { + "sha": "123", + "message": "Hello world\n\nThis is a commit message", + "date": _time, + "authors": json.dumps([ + {"name": "Author 1", "email": "[email protected]", "date": _time_str}, + {"name": "Author 2", "email": "[email protected]", "date": _time_str}, + ]), +} + class GetCategoryTests(MockPagesTestCase): """Tests for the get_category function.""" @@ -96,3 +118,268 @@ class GetPageTests(MockPagesTestCase): def test_get_nonexistent_page_returns_404(self): with self.assertRaises(Http404): utils.get_page(Path(BASE_PATH, "invalid")) + + +class TagUtilsTests(TestCase): + """Tests for the tag-related utilities.""" + + def setUp(self) -> None: + super().setUp() + self.commit = models.Commit.objects.create(**TEST_COMMIT_KWARGS) + + @mock.patch.object(utils, "fetch_tags") + def test_static_fetch(self, fetch_mock: mock.Mock): + """Test that the static fetch function is only called at most once during static builds.""" + tags = [models.Tag(name="Name", body="body")] + fetch_mock.return_value = tags + result = utils.get_tags_static() + second_result = utils.get_tags_static() + + fetch_mock.assert_called_once() + self.assertEqual(tags, result) + self.assertEqual(tags, second_result) + + @mock.patch("httpx.Client.get") + def test_mocked_fetch(self, get_mock: mock.Mock): + """Test that proper data is returned from fetch, but with a mocked API response.""" + fake_request = httpx.Request("GET", "https://google.com") + + # Metadata requests + returns = [httpx.Response( + request=fake_request, + status_code=200, + json=[ + {"type": "file", "name": "first_tag.md", "sha": "123"}, + {"type": "file", "name": "second_tag.md", "sha": "456"}, + {"type": "dir", "name": "some_group", "sha": "789", "url": "/some_group"}, + ] + ), httpx.Response( + request=fake_request, + status_code=200, + json=[{"type": "file", "name": "grouped_tag.md", "sha": "789123"}] + )] + + # Main content request + bodies = ( + "This is the first tag!", + textwrap.dedent(""" + --- + frontmatter: empty + --- + This tag has frontmatter! + """), + "This is a grouped tag!", + ) + + # Generate a tar archive with a few tags + with tempfile.TemporaryDirectory() as tar_folder: + tar_folder = Path(tar_folder) + with tempfile.TemporaryDirectory() as folder: + folder = Path(folder) + (folder / "ignored_file.md").write_text("This is an ignored file.") + tags_folder = folder / "bot/resources/tags" + tags_folder.mkdir(parents=True) + + (tags_folder / "first_tag.md").write_text(bodies[0]) + (tags_folder / "second_tag.md").write_text(bodies[1]) + + group_folder = tags_folder / "some_group" + group_folder.mkdir() + (group_folder / "grouped_tag.md").write_text(bodies[2]) + + with tarfile.open(tar_folder / "temp.tar", "w") as file: + file.add(folder, recursive=True) + + body = (tar_folder / "temp.tar").read_bytes() + + returns.append(httpx.Response( + status_code=200, + content=body, + request=fake_request, + )) + + get_mock.side_effect = returns + result = utils.fetch_tags() + + def sort(_tag: models.Tag) -> str: + return _tag.name + + self.assertEqual(sorted([ + models.Tag(name="first_tag", body=bodies[0], sha="123"), + models.Tag(name="second_tag", body=bodies[1], sha="245"), + models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name, sha="789123"), + ], key=sort), sorted(result, key=sort)) + + def test_get_real_tag(self): + """Test that a single tag is returned if it exists.""" + tag = models.Tag.objects.create(name="real-tag", last_commit=self.commit) + result = utils.get_tag("real-tag") + + self.assertEqual(tag, result) + + def test_get_grouped_tag(self): + """Test fetching a tag from a group.""" + tag = models.Tag.objects.create( + name="real-tag", group="real-group", last_commit=self.commit + ) + result = utils.get_tag("real-group/real-tag") + + self.assertEqual(tag, result) + + def test_get_group(self): + """Test fetching a group of tags.""" + included = [ + models.Tag.objects.create(name="tag-1", group="real-group"), + models.Tag.objects.create(name="tag-2", group="real-group"), + models.Tag.objects.create(name="tag-3", group="real-group"), + ] + + models.Tag.objects.create(name="not-included-1") + models.Tag.objects.create(name="not-included-2", group="other-group") + + result = utils.get_tag("real-group") + self.assertListEqual(included, result) + + def test_get_tag_404(self): + """Test that an error is raised when we fetch a non-existing tag.""" + models.Tag.objects.create(name="real-tag") + with self.assertRaises(models.Tag.DoesNotExist): + utils.get_tag("fake") + + @mock.patch.object(utils, "get_tag_category") + def test_category_pages(self, get_mock: mock.Mock): + """Test that the category pages function calls the correct method for tags.""" + tag = models.Tag.objects.create(name="tag") + get_mock.return_value = tag + result = utils.get_category_pages(settings.CONTENT_PAGES_PATH / "tags") + self.assertEqual(tag, result) + get_mock.assert_called_once_with(collapse_groups=True) + + def test_get_category_root(self): + """Test that all tags are returned and formatted properly for the tag root page.""" + body = "normal body" + base = {"description": markdown.markdown(body), "icon": "fas fa-tag"} + + models.Tag.objects.create(name="tag-1", body=body), + models.Tag.objects.create(name="tag-2", body=body), + models.Tag.objects.create(name="tag-3", body=body), + + models.Tag.objects.create(name="tag-4", body=body, group="tag-group") + models.Tag.objects.create(name="tag-5", body=body, group="tag-group") + + result = utils.get_tag_category(collapse_groups=True) + + self.assertDictEqual({ + "tag-1": {**base, "title": "tag-1"}, + "tag-2": {**base, "title": "tag-2"}, + "tag-3": {**base, "title": "tag-3"}, + "tag-group": { + "title": "tag-group", + "description": "Contains the following tags: tag-4, tag-5", + "icon": "fas fa-tags" + } + }, result) + + def test_get_category_group(self): + """Test the function for a group root page.""" + body = "normal body" + base = {"description": markdown.markdown(body), "icon": "fas fa-tag"} + + included = [ + models.Tag.objects.create(name="tag-1", body=body, group="group"), + models.Tag.objects.create(name="tag-2", body=body, group="group"), + ] + models.Tag.objects.create(name="not-included", body=body) + + result = utils.get_tag_category(included, collapse_groups=False) + self.assertDictEqual({ + "tag-1": {**base, "title": "tag-1"}, + "tag-2": {**base, "title": "tag-2"}, + }, result) + + def test_tag_url(self): + """Test that tag URLs are generated correctly.""" + cases = [ + ({"name": "tag"}, f"{models.Tag.URL_BASE}/tag.md"), + ({"name": "grouped", "group": "abc"}, f"{models.Tag.URL_BASE}/abc/grouped.md"), + ] + + for options, url in cases: + tag = models.Tag(**options) + with self.subTest(tag=tag): + self.assertEqual(url, tag.url) + + @mock.patch("httpx.Client.get") + def test_get_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a normal tag.""" + tag = models.Tag.objects.create(name="example") + + authors = json.loads(self.commit.authors) + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[1], + } + }] + ) + + result = utils.get_tag(tag.name) + self.assertEqual(tag, result) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch("httpx.Client.get") + def test_get_group_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a group tag.""" + tag = models.Tag.objects.create(name="example", group="group-name") + + authors = json.loads(self.commit.authors) + authors.pop() + self.commit.authors = json.dumps(authors) + self.commit.save() + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[0], + } + }] + ) + + utils.set_tag_commit(tag) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/group-name/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch.object(utils, "set_tag_commit") + def test_exiting_commit(self, set_commit_mock: mock.Mock): + """Test that a commit is saved when the data has not changed.""" + tag = models.Tag.objects.create(name="tag-name", body="old body", last_commit=self.commit) + + # This is only applied to the object, not to the database + tag.last_commit = None + + utils.record_tags([tag]) + self.assertEqual(self.commit, tag.last_commit) + + result = utils.get_tag("tag-name") + self.assertEqual(tag, result) + set_commit_mock.assert_not_called() diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index eadad7e3..3ef9bcc4 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -1,12 +1,18 @@ +import textwrap from pathlib import Path from unittest import TestCase +import django.test +import markdown from django.http import Http404 from django.test import RequestFactory, SimpleTestCase, override_settings +from django.urls import reverse +from pydis_site.apps.content.models import Commit, Tag from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +from pydis_site.apps.content.tests.test_utils import TEST_COMMIT_KWARGS from pydis_site.apps.content.views import PageOrCategoryView @@ -172,7 +178,7 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase): for item in context["breadcrumb_items"]: item["path"] = Path(item["path"]) - self.assertEquals( + self.assertEqual( context["breadcrumb_items"], [ {"name": PARSED_CATEGORY_INFO["title"], "path": Path(".")}, @@ -180,3 +186,217 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase): {"name": PARSED_CATEGORY_INFO["title"], "path": Path("category/subcategory")}, ] ) + + +class TagViewTests(django.test.TestCase): + """Tests for the TagView class.""" + + def setUp(self): + """Set test helpers, then set up fake filesystem.""" + super().setUp() + self.commit = Commit.objects.create(**TEST_COMMIT_KWARGS) + + def test_routing(self): + """Test that the correct template is returned for each route.""" + Tag.objects.create(name="example", last_commit=self.commit) + Tag.objects.create(name="grouped-tag", group="group-name", last_commit=self.commit) + + cases = [ + ("/pages/tags/example/", "content/tag.html"), + ("/pages/tags/group-name/", "content/listing.html"), + ("/pages/tags/group-name/grouped-tag/", "content/tag.html"), + ] + + for url, template in cases: + with self.subTest(url=url): + response = self.client.get(url) + self.assertEqual(200, response.status_code) + self.assertTemplateUsed(response, template) + + def test_valid_tag_returns_200(self): + """Test that a page is returned for a valid tag.""" + Tag.objects.create(name="example", body="This is the tag body.", last_commit=self.commit) + response = self.client.get("/pages/tags/example/") + self.assertEqual(200, response.status_code) + self.assertIn("This is the tag body", response.content.decode("utf-8")) + self.assertTemplateUsed(response, "content/tag.html") + + def test_invalid_tag_404(self): + """Test that a tag which doesn't exist raises a 404.""" + response = self.client.get("/pages/tags/non-existent/") + self.assertEqual(404, response.status_code) + + def test_context_tag(self): + """Test that the context contains the required data for a tag.""" + body = textwrap.dedent(""" + --- + unused: frontmatter + ---- + Tag content here. + """) + + tag = Tag.objects.create(name="example", body=body, last_commit=self.commit) + response = self.client.get("/pages/tags/example/") + expected = { + "page_title": "example", + "page": markdown.markdown("Tag content here."), + "tag": tag, + "breadcrumb_items": [ + {"name": "Pages", "path": "."}, + {"name": "Tags", "path": "tags"}, + ] + } + for key in expected: + self.assertEqual( + expected[key], response.context.get(key), f"context.{key} did not match" + ) + + def test_context_grouped_tag(self): + """ + Test the context for a tag in a group. + + The only difference between this and a regular tag are the breadcrumbs, + so only those are checked. + """ + Tag.objects.create( + name="example", body="Body text", group="group-name", last_commit=self.commit + ) + response = self.client.get("/pages/tags/group-name/example/") + self.assertListEqual([ + {"name": "Pages", "path": "."}, + {"name": "Tags", "path": "tags"}, + {"name": "group-name", "path": "tags/group-name"}, + ], response.context.get("breadcrumb_items")) + + def test_group_page(self): + """Test rendering of a group's root page.""" + Tag.objects.create(name="tag-1", body="Body 1", group="group-name", last_commit=self.commit) + Tag.objects.create(name="tag-2", body="Body 2", group="group-name", last_commit=self.commit) + Tag.objects.create(name="not-included", last_commit=self.commit) + + response = self.client.get("/pages/tags/group-name/") + content = response.content.decode("utf-8") + + self.assertInHTML("<div class='level-left'>group-name</div>", content) + self.assertInHTML( + f"<a class='level-item fab fa-github' href='{Tag.URL_BASE}/group-name'>", + content + ) + self.assertIn(">tag-1</span>", content) + self.assertIn(">tag-2</span>", content) + self.assertNotIn( + ">not-included</span>", + content, + "Tags not in this group shouldn't be rendered." + ) + + self.assertInHTML("<p>Body 1</p>", content) + + def test_markdown(self): + """Test that markdown content is rendered properly.""" + body = textwrap.dedent(""" + ```py + Hello world! + ``` + + **This text is in bold** + """) + + Tag.objects.create(name="example", body=body, last_commit=self.commit) + response = self.client.get("/pages/tags/example/") + content = response.content.decode("utf-8") + + self.assertInHTML('<code class="language-py">Hello world!</code>', content) + self.assertInHTML("<strong>This text is in bold</strong>", content) + + def test_embed(self): + """Test that an embed from the frontmatter is treated correctly.""" + body = textwrap.dedent(""" + --- + embed: + title: Embed title + image: + url: https://google.com + --- + Tag body. + """) + + Tag.objects.create(name="example", body=body, last_commit=self.commit) + response = self.client.get("/pages/tags/example/") + content = response.content.decode("utf-8") + + self.assertInHTML('<img alt="Embed title" src="https://google.com"/>', content) + self.assertInHTML("<p>Tag body.</p>", content) + + def test_embed_title(self): + """Test that the page title gets set to the embed title.""" + body = textwrap.dedent(""" + --- + embed: + title: Embed title + --- + """) + + Tag.objects.create(name="example", body=body, last_commit=self.commit) + response = self.client.get("/pages/tags/example/") + self.assertEqual( + "Embed title", + response.context.get("page_title"), + "The page title must match the embed title." + ) + + def test_hyperlinked_item(self): + """Test hyperlinking of tags works as intended.""" + filler_before, filler_after = "empty filler text\n\n", "more\nfiller" + body = filler_before + "`!tags return`" + filler_after + Tag.objects.create(name="example", body=body, last_commit=self.commit) + + other_url = reverse("content:tag", kwargs={"location": "return"}) + response = self.client.get("/pages/tags/example/") + self.assertEqual( + markdown.markdown(filler_before + f"[`!tags return`]({other_url})" + filler_after), + response.context.get("page") + ) + + def test_hyperlinked_group(self): + """Test hyperlinking with a group works as intended.""" + Tag.objects.create( + name="example", body="!tags group-name grouped-tag", last_commit=self.commit + ) + Tag.objects.create(name="grouped-tag", group="group-name") + + other_url = reverse("content:tag", kwargs={"location": "group-name/grouped-tag"}) + response = self.client.get("/pages/tags/example/") + self.assertEqual( + markdown.markdown(f"[!tags group-name grouped-tag]({other_url})"), + response.context.get("page") + ) + + def test_hyperlinked_extra_text(self): + """Test hyperlinking when a tag is followed by extra, unrelated text.""" + Tag.objects.create( + name="example", body="!tags other unrelated text", last_commit=self.commit + ) + Tag.objects.create(name="other") + + other_url = reverse("content:tag", kwargs={"location": "other"}) + response = self.client.get("/pages/tags/example/") + self.assertEqual( + markdown.markdown(f"[!tags other]({other_url}) unrelated text"), + response.context.get("page") + ) + + def test_tag_root_page(self): + """Test the root tag page which lists all tags.""" + Tag.objects.create(name="tag-1", last_commit=self.commit) + Tag.objects.create(name="tag-2", last_commit=self.commit) + Tag.objects.create(name="tag-3", last_commit=self.commit) + + response = self.client.get("/pages/tags/") + content = response.content.decode("utf-8") + + self.assertTemplateUsed(response, "content/listing.html") + self.assertInHTML('<div class="level-left">Tags</div>', content) + + for tag_number in range(1, 4): + self.assertIn(f"tag-{tag_number}</span>", content) diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py index f8496095..a7695a27 100644 --- a/pydis_site/apps/content/urls.py +++ b/pydis_site/apps/content/urls.py @@ -3,7 +3,7 @@ from pathlib import Path from django_distill import distill_path -from . import views +from . import utils, views app_name = "content" @@ -29,15 +29,38 @@ def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[st return results -def get_all_pages() -> typing.Iterator[dict[str, str]]: +DISTILL_RETURN = typing.Iterator[dict[str, str]] + + +def get_all_pages() -> DISTILL_RETURN: """Yield a dict of all page categories.""" for location in __get_all_files(Path("pydis_site", "apps", "content", "resources")): yield {"location": location} +def get_all_tags() -> DISTILL_RETURN: + """Return all tag names and groups in static builds.""" + # We instantiate the set with None here to make filtering it out later easier + # whether it was added in the loop or not + groups = {None} + for tag in utils.get_tags_static(): + groups.add(tag.group) + yield {"location": (f"{tag.group}/" if tag.group else "") + tag.name} + + groups.remove(None) + for group in groups: + yield {"location": group} + + urlpatterns = [ distill_path("", views.PageOrCategoryView.as_view(), name='pages'), distill_path( + "tags/<path:location>/", + views.TagView.as_view(), + name="tag", + distill_func=get_all_tags + ), + distill_path( "<path:location>/", views.PageOrCategoryView.as_view(), name='page_category', diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index d3f270ff..c12893ef 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -1,14 +1,41 @@ +import datetime +import functools +import json +import tarfile +import tempfile +from io import BytesIO from pathlib import Path -from typing import Dict, Tuple import frontmatter +import httpx import markdown import yaml from django.http import Http404 +from django.utils import timezone from markdown.extensions.toc import TocExtension +from pydis_site import settings +from .models import Commit, Tag -def get_category(path: Path) -> Dict[str, str]: +TAG_CACHE_TTL = datetime.timedelta(hours=1) + + +def github_client(**kwargs) -> httpx.Client: + """Get a client to access the GitHub API with important settings pre-configured.""" + client = httpx.Client( + base_url=settings.GITHUB_API, + follow_redirects=True, + timeout=settings.TIMEOUT_PERIOD, + **kwargs + ) + if settings.GITHUB_TOKEN: # pragma: no cover + if not client.headers.get("Authorization"): + client.headers = {"Authorization": f"token {settings.GITHUB_TOKEN}"} + + return client + + +def get_category(path: Path) -> dict[str, str]: """Load category information by name from _info.yml.""" if not path.is_dir(): raise Http404("Category not found.") @@ -16,7 +43,7 @@ def get_category(path: Path) -> Dict[str, str]: return yaml.safe_load(path.joinpath("_info.yml").read_text(encoding="utf-8")) -def get_categories(path: Path) -> Dict[str, Dict]: +def get_categories(path: Path) -> dict[str, dict]: """Get information for all categories.""" categories = {} @@ -27,8 +54,253 @@ def get_categories(path: Path) -> Dict[str, Dict]: return categories -def get_category_pages(path: Path) -> Dict[str, Dict]: +def get_tags_static() -> list[Tag]: + """ + Fetch tag information in static builds. + + This also includes some fake tags to preview the tag groups feature. + This will return a cached value, so it should only be used for static builds. + """ + tags = fetch_tags() + for tag in tags[3:5]: # pragma: no cover + tag.group = "very-cool-group" + return tags + + +def fetch_tags() -> list[Tag]: + """ + Fetch tag data from the GitHub API. + + The entire repository is downloaded and extracted locally because + getting file content would require one request per file, and can get rate-limited. + """ + with github_client() as client: + # Grab metadata + metadata = client.get("/repos/python-discord/bot/contents/bot/resources") + metadata.raise_for_status() + + hashes = {} + for entry in metadata.json(): + if entry["type"] == "dir": + # Tag group + files = client.get(entry["url"]) + files.raise_for_status() + files = files.json() + else: + files = [entry] + + for file in files: + hashes[file["name"]] = file["sha"] + + # Download the files + tar_file = client.get("/repos/python-discord/bot/tarball") + tar_file.raise_for_status() + + tags = [] + with tempfile.TemporaryDirectory() as folder: + with tarfile.open(fileobj=BytesIO(tar_file.content)) as repo: + included = [] + for file in repo.getmembers(): + if "/bot/resources/tags" in file.path: + included.append(file) + repo.extractall(folder, included) + + for tag_file in Path(folder).rglob("*.md"): + name = tag_file.name + group = None + if tag_file.parent.name != "tags": + # Tags in sub-folders are considered part of a group + group = tag_file.parent.name + + tags.append(Tag( + name=name.removesuffix(".md"), + sha=hashes[name], + group=group, + body=tag_file.read_text(encoding="utf-8"), + last_commit=None, + )) + + return tags + + +def set_tag_commit(tag: Tag) -> None: + """Fetch commit information from the API, and save it for the tag.""" + if settings.STATIC_BUILD: # pragma: no cover + # Static builds request every page during build, which can ratelimit it. + # Instead, we return some fake data. + tag.last_commit = Commit( + sha="68da80efc00d9932a209d5cccd8d344cec0f09ea", + message="Initial Commit\n\nTHIS IS FAKE DEMO DATA", + date=datetime.datetime(2018, 2, 3, 12, 20, 26, tzinfo=datetime.timezone.utc), + authors=json.dumps([{"name": "Joseph", "email": "[email protected]"}]), + ) + return + + path = "/bot/resources/tags" + if tag.group: + path += f"/{tag.group}" + path += f"/{tag.name}.md" + + # Fetch and set the commit + with github_client() as client: + data = client.get("/repos/python-discord/bot/commits", params={"path": path}) + data.raise_for_status() + data = data.json()[0] + + commit = data["commit"] + author, committer = commit["author"], commit["committer"] + + date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) + date = date.replace(tzinfo=datetime.timezone.utc) + + if author["email"] == committer["email"]: + authors = [author] + else: + authors = [author, committer] + + commit_obj, _ = Commit.objects.get_or_create( + sha=data["sha"], + message=commit["message"], + date=date, + authors=json.dumps(authors), + ) + tag.last_commit = commit_obj + tag.save() + + +def record_tags(tags: list[Tag]) -> None: + """Sync the database with an updated set of tags.""" + # Remove entries which no longer exist + Tag.objects.exclude(name__in=[tag.name for tag in tags]).delete() + + # Insert/update the tags + for new_tag in tags: + try: + old_tag = Tag.objects.get(name=new_tag.name) + except Tag.DoesNotExist: + # The tag is not in the database yet, + # pretend it's previous state is the current state + old_tag = new_tag + + if old_tag.sha == new_tag.sha and old_tag.last_commit is not None: + # We still have an up-to-date commit entry + new_tag.last_commit = old_tag.last_commit + + new_tag.save() + + # Drop old, unused commits + Commit.objects.filter(tag__isnull=True).delete() + + +def get_tags() -> list[Tag]: + """Return a list of all tags visible to the application, from the cache or API.""" + if settings.STATIC_BUILD: # pragma: no cover + last_update = None + else: + last_update = ( + Tag.objects.values_list("last_updated", flat=True) + .order_by("last_updated").first() + ) + + if last_update is None or timezone.now() >= (last_update + TAG_CACHE_TTL): + # Stale or empty cache + if settings.STATIC_BUILD: # pragma: no cover + tags = get_tags_static() + else: + tags = fetch_tags() + record_tags(tags) + + return tags + else: + # Get tags from database + return list(Tag.objects.all()) + + +def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: + """ + Return a tag based on the search location. + + If certain tag data is out of sync (for instance a commit date is missing), + an extra request will be made to sync the information. + + The tag name and group must match. If only one argument is provided in the path, + it's assumed to either be a group name, or a no-group tag name. + + If it's a group name, a list of tags which belong to it is returned. + """ + path = path.split("/") + if len(path) == 2: + group, name = path + else: + name = path[0] + group = None + + matches = [] + for tag in get_tags(): + if tag.name == name and tag.group == group: + if tag.last_commit is None and not skip_sync: + set_tag_commit(tag) + return tag + elif tag.group == name and group is None: + matches.append(tag) + + if matches: + return matches + + raise Tag.DoesNotExist() + + +def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]: + """ + Generate context data for `tags`, or all tags if None. + + If `tags` is None, `get_tag` is used to populate the data. + If `collapse_groups` is True, tags with parent groups are not included in the list, + and instead the parent itself is included as a single entry with it's sub-tags + in the description. + """ + if not tags: + tags = get_tags() + + data = [] + groups = {} + + # Create all the metadata for the tags + for tag in tags: + if tag.group is None or not collapse_groups: + content = frontmatter.parse(tag.body)[1] + data.append({ + "title": tag.name, + "description": markdown.markdown(content, extensions=["pymdownx.superfences"]), + "icon": "fas fa-tag", + }) + else: + if tag.group not in groups: + groups[tag.group] = { + "title": tag.group, + "description": [tag.name], + "icon": "fas fa-tags", + } + else: + groups[tag.group]["description"].append(tag.name) + + # Flatten group description into a single string + for group in groups.values(): + # If the following string is updated, make sure to update it in the frontend JS as well + group["description"] = "Contains the following tags: " + ", ".join(group["description"]) + data.append(group) + + # Sort the tags, and return them in the proper format + return {tag["title"]: tag for tag in sorted(data, key=lambda tag: tag["title"].casefold())} + + +def get_category_pages(path: Path) -> dict[str, dict]: """Get all page names and their metadata at a category path.""" + # Special handling for tags + if path == Path(__file__).parent / "resources/tags": + return get_tag_category(collapse_groups=True) + pages = {} for item in path.glob("*.md"): @@ -39,7 +311,7 @@ def get_category_pages(path: Path) -> Dict[str, Dict]: return pages -def get_page(path: Path) -> Tuple[str, Dict]: +def get_page(path: Path) -> tuple[str, dict]: """Get one specific page.""" if not path.is_file(): raise Http404("Page not found.") diff --git a/pydis_site/apps/content/views/__init__.py b/pydis_site/apps/content/views/__init__.py index 70ea1c7a..a969b1dc 100644 --- a/pydis_site/apps/content/views/__init__.py +++ b/pydis_site/apps/content/views/__init__.py @@ -1,3 +1,4 @@ from .page_category import PageOrCategoryView +from .tags import TagView -__all__ = ["PageOrCategoryView"] +__all__ = ["PageOrCategoryView", "TagView"] diff --git a/pydis_site/apps/content/views/page_category.py b/pydis_site/apps/content/views/page_category.py index 5af77aff..062c2bc1 100644 --- a/pydis_site/apps/content/views/page_category.py +++ b/pydis_site/apps/content/views/page_category.py @@ -1,18 +1,17 @@ -import typing as t from pathlib import Path import frontmatter from django.conf import settings -from django.http import Http404 +from django.http import Http404, HttpRequest, HttpResponse from django.views.generic import TemplateView -from pydis_site.apps.content import utils +from pydis_site.apps.content import models, utils class PageOrCategoryView(TemplateView): """Handles pages and page categories.""" - def dispatch(self, request: t.Any, *args, **kwargs) -> t.Any: + def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: """Conform URL path location to the filesystem path.""" self.location = Path(kwargs.get("location", "")) @@ -25,7 +24,7 @@ class PageOrCategoryView(TemplateView): return super().dispatch(request, *args, **kwargs) - def get_template_names(self) -> t.List[str]: + def get_template_names(self) -> list[str]: """Checks if the view uses the page template or listing template.""" if self.page_path.is_file(): template_name = "content/page.html" @@ -36,7 +35,7 @@ class PageOrCategoryView(TemplateView): return [template_name] - def get_context_data(self, **kwargs) -> t.Dict[str, t.Any]: + def get_context_data(self, **kwargs) -> dict[str, any]: """Assign proper context variables based on what resource user requests.""" context = super().get_context_data(**kwargs) @@ -73,7 +72,7 @@ class PageOrCategoryView(TemplateView): return context @staticmethod - def _get_page_context(path: Path) -> t.Dict[str, t.Any]: + def _get_page_context(path: Path) -> dict[str, any]: page, metadata = utils.get_page(path) return { "page": page, @@ -84,7 +83,7 @@ class PageOrCategoryView(TemplateView): } @staticmethod - def _get_category_context(path: Path) -> t.Dict[str, t.Any]: + def _get_category_context(path: Path) -> dict[str, any]: category = utils.get_category(path) return { "categories": utils.get_categories(path), @@ -92,4 +91,7 @@ class PageOrCategoryView(TemplateView): "page_title": category["title"], "page_description": category["description"], "icon": category.get("icon"), + "app_name": "content:page_category", + "is_tag_listing": "/resources/tags" in path.as_posix(), + "tag_url": models.Tag.URL_BASE, } diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py new file mode 100644 index 00000000..4f4bb5a2 --- /dev/null +++ b/pydis_site/apps/content/views/tags.py @@ -0,0 +1,124 @@ +import re +import typing + +import frontmatter +import markdown +from django.conf import settings +from django.http import Http404 +from django.urls import reverse +from django.views.generic import TemplateView + +from pydis_site.apps.content import utils +from pydis_site.apps.content.models import Tag + +# The following regex tries to parse a tag command +# It'll read up to two words seperated by spaces +# If the command does not include a group, the tag name will be in the `first` group +# If there's a second word after the command, or if there's a tag group, extra logic +# is necessary to determine whether it's a tag with a group, or a tag with text after it +COMMAND_REGEX = re.compile(r"`*!tags? (?P<first>[\w-]+)(?P<second> [\w-]+)?`*") + + +class TagView(TemplateView): + """Handles tag pages.""" + + tag: typing.Union[Tag, list[Tag]] + is_group: bool + + def setup(self, *args, **kwargs) -> None: + """Look for a tag, and configure the view.""" + super().setup(*args, **kwargs) + + try: + self.tag = utils.get_tag(kwargs.get("location")) + self.is_group = isinstance(self.tag, list) + except Tag.DoesNotExist: + raise Http404 + + def get_template_names(self) -> list[str]: + """Either return the tag page template, or the listing.""" + if self.is_group: + template_name = "content/listing.html" + else: + template_name = "content/tag.html" + + return [template_name] + + def get_context_data(self, **kwargs) -> dict: + """Get the relevant context for this tag page or group.""" + context = super().get_context_data(**kwargs) + context["breadcrumb_items"] = [{ + "name": utils.get_category(settings.CONTENT_PAGES_PATH / location)["title"], + "path": location, + } for location in (".", "tags")] + + if self.is_group: + self._set_group_context(context, self.tag) + else: + self._set_tag_context(context, self.tag) + + return context + + @staticmethod + def _set_tag_context(context: dict[str, any], tag: Tag) -> None: + """Update the context with the information for a tag page.""" + context.update({ + "page_title": tag.name, + "tag": tag, + }) + + if tag.group: + # Add group names to the breadcrumbs + context["breadcrumb_items"].append({ + "name": tag.group, + "path": f"tags/{tag.group}", + }) + + # Clean up tag body + body = frontmatter.parse(tag.body) + content = body[1] + + # Check for tags which can be hyperlinked + def sub(match: re.Match) -> str: + first, second = match.groups() + location = first + text, extra = match.group(), "" + + if second is not None: + # Possibly a tag group + try: + new_location = f"{first}/{second.strip()}" + utils.get_tag(new_location, skip_sync=True) + location = new_location + except Tag.DoesNotExist: + # Not a group, remove the second argument from the link + extra = text[text.find(second):] + text = text[:text.find(second)] + + link = reverse("content:tag", kwargs={"location": location}) + return f"[{text}]({link}){extra}" + content = COMMAND_REGEX.sub(sub, content) + + # Add support for some embed elements + if embed := body[0].get("embed"): + context["page_title"] = embed["title"] + if image := embed.get("image"): + content = f"![{embed['title']}]({image['url']})\n\n" + content + + # Insert the content + context["page"] = markdown.markdown(content, extensions=["pymdownx.superfences"]) + + @staticmethod + def _set_group_context(context: dict[str, any], tags: list[Tag]) -> None: + """Update the context with the information for a group of tags.""" + group = tags[0].group + context.update({ + "categories": {}, + "pages": utils.get_tag_category(tags, collapse_groups=False), + "page_title": group, + "icon": "fab fa-tags", + "is_tag_listing": True, + "app_name": "content:tag", + "path": f"{group}/", + "tag_url": f"{tags[0].URL_BASE}/{group}" + }) diff --git a/pydis_site/apps/events/apps.py b/pydis_site/apps/events/apps.py index a1cf09ef..70762bc2 100644 --- a/pydis_site/apps/events/apps.py +++ b/pydis_site/apps/events/apps.py @@ -4,4 +4,4 @@ from django.apps import AppConfig class EventsConfig(AppConfig): """Django AppConfig for events app.""" - name = 'events' + name = 'pydis_site.apps.events' diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index d43bd28e..a963f733 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -36,13 +36,13 @@ class TestRepositoryMetadataHelpers(TestCase): """Executed before each test method.""" self.home_view = HomeView() - @mock.patch('requests.get', side_effect=mocked_requests_get) + @mock.patch('httpx.get', side_effect=mocked_requests_get) def test_returns_metadata(self, _: mock.MagicMock): """Test if the _get_repo_data helper actually returns what it should.""" metadata = self.home_view._get_repo_data() self.assertIsInstance(metadata[0], RepositoryMetadata) - self.assertEquals(len(metadata), len(self.home_view.repos)) + self.assertEqual(len(metadata), len(self.home_view.repos)) def test_returns_cached_metadata(self): """Test if the _get_repo_data helper returns cached data when available.""" @@ -59,7 +59,7 @@ class TestRepositoryMetadataHelpers(TestCase): self.assertIsInstance(metadata[0], RepositoryMetadata) self.assertIsInstance(str(metadata[0]), str) - @mock.patch('requests.get', side_effect=mocked_requests_get) + @mock.patch('httpx.get', side_effect=mocked_requests_get) def test_refresh_stale_metadata(self, _: mock.MagicMock): """Test if the _get_repo_data helper will refresh when the data is stale.""" repo_data = RepositoryMetadata( @@ -75,18 +75,18 @@ class TestRepositoryMetadataHelpers(TestCase): self.assertIsInstance(metadata[0], RepositoryMetadata) - @mock.patch('requests.get', side_effect=mocked_requests_get) + @mock.patch('httpx.get', side_effect=mocked_requests_get) def test_returns_api_data(self, _: mock.MagicMock): """Tests if the _get_api_data helper returns what it should.""" api_data = self.home_view._get_api_data() repo = self.home_view.repos[0] self.assertIsInstance(api_data, dict) - self.assertEquals(len(api_data), len(self.home_view.repos)) + self.assertEqual(len(api_data), len(self.home_view.repos)) self.assertIn(repo, api_data.keys()) self.assertIn("stargazers_count", api_data[repo]) - @mock.patch('requests.get', side_effect=mocked_requests_get) + @mock.patch('httpx.get', side_effect=mocked_requests_get) def test_mocked_requests_get(self, mock_get: mock.MagicMock): """Tests if our mocked_requests_get is returning what it should.""" success_data = mock_get(HomeView.github_api) @@ -98,7 +98,7 @@ class TestRepositoryMetadataHelpers(TestCase): self.assertIsNotNone(success_data.json_data) self.assertIsNone(fail_data.json_data) - @mock.patch('requests.get') + @mock.patch('httpx.get') def test_falls_back_to_database_on_error(self, mock_get: mock.MagicMock): """Tests that fallback to the database is performed when we get garbage back.""" repo_data = RepositoryMetadata( @@ -117,7 +117,7 @@ class TestRepositoryMetadataHelpers(TestCase): [item] = metadata self.assertEqual(item, repo_data) - @mock.patch('requests.get') + @mock.patch('httpx.get') def test_falls_back_to_database_on_error_without_entries(self, mock_get: mock.MagicMock): """Tests that fallback to the database is performed when we get garbage back.""" mock_get.return_value.json.return_value = ['garbage'] @@ -126,7 +126,7 @@ class TestRepositoryMetadataHelpers(TestCase): with self.assertLogs(): metadata = self.home_view._get_repo_data() - self.assertEquals(len(metadata), 0) + self.assertEqual(len(metadata), 0) def test_cleans_up_stale_metadata(self): """Tests that we clean up stale metadata when we start the HomeView.""" diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py index 69e706c5..8a165682 100644 --- a/pydis_site/apps/home/views/home.py +++ b/pydis_site/apps/home/views/home.py @@ -1,7 +1,7 @@ import logging from typing import Dict, List -import requests +import httpx from django.core.handlers.wsgi import WSGIRequest from django.http import HttpResponse from django.shortcuts import render @@ -32,9 +32,7 @@ class HomeView(View): def __init__(self): """Clean up stale RepositoryMetadata.""" - self._static_build = settings.env("STATIC_BUILD") - - if not self._static_build: + if not settings.STATIC_BUILD: RepositoryMetadata.objects.exclude(repo_name__in=self.repos).delete() # If no token is defined (for example in local development), then @@ -56,12 +54,12 @@ class HomeView(View): repo_dict = {} try: # Fetch the data from the GitHub API - api_data: List[dict] = requests.get( + api_data: List[dict] = httpx.get( self.github_api, headers=self.headers, timeout=settings.TIMEOUT_PERIOD ).json() - except requests.exceptions.Timeout: + except httpx.TimeoutException: log.error("Request to fetch GitHub repository metadata for timed out!") return repo_dict @@ -94,7 +92,7 @@ class HomeView(View): def _get_repo_data(self) -> List[RepositoryMetadata]: """Build a list of RepositoryMetadata objects that we can use to populate the front page.""" # First off, load the timestamp of the least recently updated entry. - if self._static_build: + if settings.STATIC_BUILD: last_update = None else: last_update = ( @@ -121,7 +119,7 @@ class HomeView(View): for api_data in api_repositories.values() ] - if settings.env("STATIC_BUILD"): + if settings.STATIC_BUILD: return data else: return RepositoryMetadata.objects.bulk_create(data) diff --git a/pydis_site/apps/redirect/apps.py b/pydis_site/apps/redirect/apps.py index 9b70d169..0234bc93 100644 --- a/pydis_site/apps/redirect/apps.py +++ b/pydis_site/apps/redirect/apps.py @@ -4,4 +4,4 @@ from django.apps import AppConfig class RedirectConfig(AppConfig): """AppConfig instance for Redirect app.""" - name = 'redirect' + name = 'pydis_site.apps.redirect' diff --git a/pydis_site/apps/redirect/urls.py b/pydis_site/apps/redirect/urls.py index f7ddf45b..067cccc3 100644 --- a/pydis_site/apps/redirect/urls.py +++ b/pydis_site/apps/redirect/urls.py @@ -3,6 +3,7 @@ import re import yaml from django import conf +from django.http import HttpResponse from django.urls import URLPattern, path from django_distill import distill_path @@ -31,7 +32,7 @@ class Redirect: def map_redirect(name: str, data: Redirect) -> list[URLPattern]: """Return a pattern using the Redirects app, or a static HTML redirect for static builds.""" - if not settings.env("STATIC_BUILD"): + if not settings.STATIC_BUILD: # Normal dynamic redirect return [path( data.original_path, @@ -53,7 +54,7 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]: class RedirectFunc: def __init__(self, new_url: str, _name: str): - self.result = REDIRECT_TEMPLATE.format(url=new_url) + self.result = HttpResponse(REDIRECT_TEMPLATE.format(url=new_url)) self.__qualname__ = _name def __call__(self, *args, **kwargs): @@ -95,7 +96,7 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]: return [distill_path( data.original_path, - lambda *args: REDIRECT_TEMPLATE.format(url=new_redirect), + lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), name=name, )] diff --git a/pydis_site/apps/resources/apps.py b/pydis_site/apps/resources/apps.py index e0c235bd..93117654 100644 --- a/pydis_site/apps/resources/apps.py +++ b/pydis_site/apps/resources/apps.py @@ -4,4 +4,4 @@ from django.apps import AppConfig class ResourcesConfig(AppConfig): """AppConfig instance for Resources app.""" - name = 'resources' + name = 'pydis_site.apps.resources' diff --git a/pydis_site/apps/resources/resources/atom.yaml b/pydis_site/apps/resources/resources/atom.yaml deleted file mode 100644 index 26e125b1..00000000 --- a/pydis_site/apps/resources/resources/atom.yaml +++ /dev/null @@ -1,14 +0,0 @@ -description: A free Electron-based editor, a "hackable text editor for the 21st century", maintained - by the GitHub team. -name: Atom -title_url: https://atom.io/ -tags: - topics: - - general - payment_tiers: - - free - difficulty: - - beginner - - intermediate - type: - - tool diff --git a/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml b/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml index c4ad1e1b..26e88cb9 100644 --- a/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml +++ b/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml @@ -2,7 +2,7 @@ description: '"Neural Networks From Scratch" is a book intended to teach you how without any libraries, so you can better understand deep learning and how all of the elements work. This is so you can go out and do new/novel things with deep learning as well as to become more successful with even more basic models. This book is to accompany the usual free tutorial videos and sample code from youtube.com/sentdex.' -name: Neural Networks from Scratch in Python +name: Neural Networks from Scratch title_url: https://nnfs.io/ urls: - icon: branding/goodreads diff --git a/pydis_site/apps/resources/resources/pycharm.yaml b/pydis_site/apps/resources/resources/pycharm.yaml index 574158bc..e8c787e6 100644 --- a/pydis_site/apps/resources/resources/pycharm.yaml +++ b/pydis_site/apps/resources/resources/pycharm.yaml @@ -1,6 +1,7 @@ description: The very best Python IDE, with a wealth of advanced features and convenience functions. name: PyCharm +title_image: https://resources.jetbrains.com/storage/products/pycharm/img/meta/pycharm_logo_300x300.png title_url: https://www.jetbrains.com/pycharm/ tags: topics: diff --git a/pydis_site/apps/resources/resources/the_algorithms_github.yaml b/pydis_site/apps/resources/resources/the_algorithms_github.yaml new file mode 100644 index 00000000..30a0a5da --- /dev/null +++ b/pydis_site/apps/resources/resources/the_algorithms_github.yaml @@ -0,0 +1,17 @@ +description: A git repository of Python implementations of many of the algorithms taught in algorithm + and data structure courses, as well as algorithms for neural networks, block chains, and compression. This is + a great resource for students wanting to see algorithms implemented in a familiar language. +name: The Algorithms +title_url: https://github.com/TheAlgorithms/Python +tags: + topics: + - algorithms and data structures + - data science + - security + payment_tiers: + - free + difficulty: + - beginner + - intermediate + type: + - tutorial diff --git a/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml b/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml index 61a7b6f6..12f2a154 100644 --- a/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml +++ b/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml @@ -1,4 +1,4 @@ -description: This tutorial, written by Python Discord staff member vcokltfre, +description: This tutorial, written by vcokltfre, will walk you through all the aspects of creating your own Discord bot, starting from creating the bot user itself. name: vcokltfre's Discord Bot Tutorial diff --git a/pydis_site/apps/resources/templatetags/get_category_icon.py b/pydis_site/apps/resources/templatetags/get_category_icon.py index 71f1393f..30bc4eaa 100644 --- a/pydis_site/apps/resources/templatetags/get_category_icon.py +++ b/pydis_site/apps/resources/templatetags/get_category_icon.py @@ -21,6 +21,7 @@ _ICONS = { "Paid": "fa-dollar-sign", "Podcast": "fa-microphone-alt", "Project Ideas": "fa-lightbulb-o", + "Security": "fa-solid fa-lock", "Software Design": "fa-paint-brush", "Subscription": "fa-credit-card", "Testing": "fa-vial", diff --git a/pydis_site/apps/staff/apps.py b/pydis_site/apps/staff/apps.py index 70a15f40..d68a80c3 100644 --- a/pydis_site/apps/staff/apps.py +++ b/pydis_site/apps/staff/apps.py @@ -4,4 +4,4 @@ from django.apps import AppConfig class StaffConfig(AppConfig): """Django AppConfig for the staff app.""" - name = 'staff' + name = 'pydis_site.apps.staff' diff --git a/pydis_site/settings.py b/pydis_site/settings.py index 17f220f3..e9e0ba67 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -21,7 +21,6 @@ import environ import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration - env = environ.Env( DEBUG=(bool, False), SITE_DSN=(str, ""), @@ -30,17 +29,31 @@ env = environ.Env( GIT_SHA=(str, 'development'), TIMEOUT_PERIOD=(int, 5), GITHUB_TOKEN=(str, None), + GITHUB_APP_ID=(str, None), + GITHUB_APP_KEY=(str, None), ) GIT_SHA = env("GIT_SHA") +GITHUB_API = "https://api.github.com" GITHUB_TOKEN = env("GITHUB_TOKEN") - -sentry_sdk.init( - dsn=env('SITE_DSN'), - integrations=[DjangoIntegration()], - send_default_pii=True, - release=f"site@{GIT_SHA}" -) +GITHUB_APP_ID = env("GITHUB_APP_ID") +GITHUB_APP_KEY = env("GITHUB_APP_KEY") +GITHUB_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%SZ" +"""The datetime string format GitHub uses.""" + +STATIC_BUILD: bool = env("STATIC_BUILD") + +if GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file(): + # Allow the OAuth key to be loaded from a file + GITHUB_APP_KEY = key_file.read_text(encoding="utf-8") + +if not STATIC_BUILD: + sentry_sdk.init( + dsn=env('SITE_DSN'), + integrations=[DjangoIntegration()], + send_default_pii=True, + release=f"site@{GIT_SHA}" + ) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -91,7 +104,7 @@ else: NON_STATIC_APPS = [ 'pydis_site.apps.api', 'pydis_site.apps.staff', -] if not env("STATIC_BUILD") else [] +] if not STATIC_BUILD else [] INSTALLED_APPS = [ *NON_STATIC_APPS, @@ -120,25 +133,29 @@ INSTALLED_APPS = [ if not env("BUILDING_DOCKER"): INSTALLED_APPS.append("django_prometheus") -NON_STATIC_MIDDLEWARE = [ - 'django_prometheus.middleware.PrometheusBeforeMiddleware', -] if not env("STATIC_BUILD") else [] - -# Ensure that Prometheus middlewares are first and last here. -MIDDLEWARE = [ - *NON_STATIC_MIDDLEWARE, - - 'django.middleware.security.SecurityMiddleware', - 'whitenoise.middleware.WhiteNoiseMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - - 'django_prometheus.middleware.PrometheusAfterMiddleware' -] +if STATIC_BUILD: + # The only middleware required during static builds + MIDDLEWARE = [ + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + ] +else: + # Ensure that Prometheus middlewares are first and last here. + MIDDLEWARE = [ + 'django_prometheus.middleware.PrometheusBeforeMiddleware', + + 'django.middleware.security.SecurityMiddleware', + 'whitenoise.middleware.WhiteNoiseMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + + 'django_prometheus.middleware.PrometheusAfterMiddleware' + ] ROOT_URLCONF = 'pydis_site.urls' @@ -167,7 +184,7 @@ WSGI_APPLICATION = 'pydis_site.wsgi.application' DATABASES = { 'default': env.db(), 'metricity': env.db('METRICITY_DB_URL'), -} if not env("STATIC_BUILD") else {} +} if not STATIC_BUILD else {} # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators @@ -192,7 +209,6 @@ AUTH_PASSWORD_VALIDATORS = [ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True -USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) @@ -219,6 +235,9 @@ if DEBUG: else: PARENT_HOST = env('PARENT_HOST', default='pythondiscord.com') +# Django Model Configuration +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" + # Django REST framework # https://www.django-rest-framework.org REST_FRAMEWORK = { diff --git a/pydis_site/static/css/content/color.css b/pydis_site/static/css/content/color.css new file mode 100644 index 00000000..f4801c28 --- /dev/null +++ b/pydis_site/static/css/content/color.css @@ -0,0 +1,7 @@ +.content .fa-github { + color: black; +} + +.content .fa-github:hover { + color: #7289DA; +} diff --git a/pydis_site/static/css/content/tag.css b/pydis_site/static/css/content/tag.css new file mode 100644 index 00000000..79795f9e --- /dev/null +++ b/pydis_site/static/css/content/tag.css @@ -0,0 +1,13 @@ +.content a * { + /* This is the original color, but propagated down the chain */ + /* which allows for elements inside links, such as codeblocks */ + color: #7289DA; +} + +.content a *:hover { + color: dimgray; +} + +span.update-time { + text-decoration: black underline dotted; +} diff --git a/pydis_site/static/images/content/fix-ssl-certificate/pem.png b/pydis_site/static/images/content/fix-ssl-certificate/pem.png Binary files differnew file mode 100644 index 00000000..face520f --- /dev/null +++ b/pydis_site/static/images/content/fix-ssl-certificate/pem.png diff --git a/pydis_site/static/js/content/listing.js b/pydis_site/static/js/content/listing.js new file mode 100644 index 00000000..4b722632 --- /dev/null +++ b/pydis_site/static/js/content/listing.js @@ -0,0 +1,41 @@ +/** + * Trim a tag listing to only show a few lines of content. + */ +function trimTag() { + const containers = document.getElementsByClassName("tag-container"); + for (const container of containers) { + if (container.textContent.startsWith("Contains the following tags:")) { + // Tag group, no need to trim + continue; + } + + // Remove every element after the first two paragraphs + while (container.children.length > 2) { + container.removeChild(container.lastChild); + } + + // Trim down the elements if they are too long + const containerLength = container.textContent.length; + if (containerLength > 300) { + if (containerLength - container.firstChild.textContent.length > 300) { + // The first element alone takes up more than 300 characters + container.removeChild(container.lastChild); + } + + let last = container.lastChild.lastChild; + while (container.textContent.length > 300 && container.lastChild.childNodes.length > 0) { + last = container.lastChild.lastChild; + last.remove(); + } + + if (container.textContent.length > 300 && (last instanceof HTMLElement && last.tagName !== "CODE")) { + // Add back the final element (up to a period if possible) + const stop = last.textContent.indexOf("."); + last.textContent = last.textContent.slice(0, stop > 0 ? stop + 1: null); + container.lastChild.appendChild(last); + } + } + } +} + +trimTag(); diff --git a/pydis_site/templates/base/navbar.html b/pydis_site/templates/base/navbar.html index d7fb4f4c..931693c8 100644 --- a/pydis_site/templates/base/navbar.html +++ b/pydis_site/templates/base/navbar.html @@ -67,6 +67,9 @@ <a class="navbar-item" href="{% url "resources:index" %}"> Resources </a> + <a class="navbar-item" href="{% url "content:pages" %}"> + Content + </a> <a class="navbar-item" href="{% url "events:index" %}"> Events </a> diff --git a/pydis_site/templates/content/base.html b/pydis_site/templates/content/base.html index 4a19a275..2fd721a3 100644 --- a/pydis_site/templates/content/base.html +++ b/pydis_site/templates/content/base.html @@ -8,6 +8,10 @@ <meta property="og:description" content="{{ page_description }}" /> <link rel="stylesheet" href="{% static "css/content/page.css" %}"> <link rel="stylesheet" href="{% static "css/collapsibles.css" %}"> + <link rel="stylesheet" + href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/styles/atom-one-dark.min.css"> + <script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/highlight.min.js"></script> + <script>hljs.highlightAll();</script> <script src="{% static "js/collapsibles.js" %}"></script> {% endblock %} @@ -35,7 +39,7 @@ <section class="section"> <div class="container"> <div class="content"> - <h1 class="title">{{ page_title }}</h1> + <h1 class="title">{% block title_element %}{{ page_title }}{% endblock %}</h1> {% block page_content %}{% endblock %} </div> </div> diff --git a/pydis_site/templates/content/listing.html b/pydis_site/templates/content/listing.html index ef0ef919..934b95f6 100644 --- a/pydis_site/templates/content/listing.html +++ b/pydis_site/templates/content/listing.html @@ -1,6 +1,22 @@ +{# Base navigation screen for resources #} {% extends 'content/base.html' %} +{% load static %} + +{# Show a GitHub button on tag pages #} +{% block title_element %} +{% if is_tag_listing %} + <link rel="stylesheet" href="{% static "css/content/color.css" %}"> + <div class="level"> + <div class="level-left">{{ block.super }}</div> + <div class="level-right"> + <a class="level-item fab fa-github" href="{{ tag_url }}"></a> + </div> + </div> +{% endif %} +{% endblock %} {% block page_content %} + {# Nested Categories #} {% for category, data in categories.items %} <div class="box" style="max-width: 800px;"> <span class="icon is-size-4 is-medium"> @@ -13,15 +29,22 @@ <p class="is-italic">{{ data.description }}</p> </div> {% endfor %} + + {# Single Pages #} {% for page, data in pages.items %} <div class="box" style="max-width: 800px;"> <span class="icon is-size-4 is-medium"> <i class="{{ data.icon|default:"fab fa-python" }} is-size-3 is-black has-icon-padding" aria-hidden="true"></i> </span> - <a href="{% url "content:page_category" location=path|add:page %}"> + <a href="{% url app_name location=path|add:page %}"> <span class="is-size-4 has-text-weight-bold">{{ data.title }}</span> </a> - <p class="is-italic">{{ data.description }}</p> + {% if is_tag_listing %} + <div class="tag-container">{{ data.description | safe }}</div> + {% else %} + <p class="is-italic">{{ data.description }}</p> + {% endif %} </div> {% endfor %} + <script src="{% static 'js/content/listing.js' %}"></script> {% endblock %} diff --git a/pydis_site/templates/content/page.html b/pydis_site/templates/content/page.html index 759286f6..679ecec6 100644 --- a/pydis_site/templates/content/page.html +++ b/pydis_site/templates/content/page.html @@ -1,13 +1,5 @@ {% extends 'content/base.html' %} -{% block head %} - {{ block.super }} - <link rel="stylesheet" - href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/styles/atom-one-dark.min.css"> - <script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/highlight.min.js"></script> - <script>hljs.initHighlightingOnLoad();</script> -{% endblock %} - {% block page_content %} {% if relevant_links or toc %} <div class="columns is-variable is-8"> diff --git a/pydis_site/templates/content/tag.html b/pydis_site/templates/content/tag.html new file mode 100644 index 00000000..fa9e44f5 --- /dev/null +++ b/pydis_site/templates/content/tag.html @@ -0,0 +1,40 @@ +{% extends "content/page.html" %} +{% load static %} + +{% block head %} + {{ block.super }} + <link rel="stylesheet" href="{% static 'css/content/color.css' %}"/> + <link rel="stylesheet" href="{% static 'css/content/tag.css' %}"/> + <title>{{ tag.name }}</title> +{% endblock %} + +{% block title_element %} + <div class="level mb-2"> + <div class="level-left">{{ block.super }}</div> + <div class="level-right"> + <a class="level-item fab fa-github" href="{{ tag.url }}"></a> + </div> + </div> + + <div class="dropdown is-size-6 is-hoverable"> + <div class="dropdown-trigger "> + <a aria-haspopup="menu" href="{{ tag.last_commit.url }}"> + <span class="update-time"> + Last Updated: {{ tag.last_commit.date | date:"F j, Y g:i A e" }} + </span> + </a> + </div> + <div class="dropdown-menu"> + <div class="dropdown-content"> + <div class="dropdown-item">Last edited by:</div> + {% for user in tag.last_commit.format_authors %} + <div class="dropdown-item">{{ user }}</div> + {% endfor %} + <div class="dropdown-divider"></div> + {% for line in tag.last_commit.lines %} + <div class="dropdown-item">{{ line }}</div> + {% endfor %} + </div> + </div> + </div> +{% endblock %} diff --git a/pydis_site/templates/events/index.html b/pydis_site/templates/events/index.html index db3e32f7..640682d0 100644 --- a/pydis_site/templates/events/index.html +++ b/pydis_site/templates/events/index.html @@ -10,8 +10,8 @@ <div class="box"> <h2 class="title is-4"><a href="{% url "events:page" path="code-jams" %}">Code Jams</a></h2> <div class="notification is-success"> - The <b>2022 Summer Code Jam</b> is currently underway and you can still enter! <b>The qualifier is open until July 13</b>; check out the details <a href="{% url "events:page" path="code-jams/9" %}">here</a>. - </div> + <a href="{% url "events:page" path="code-jams/9" %}">The <b>2022 Summer Code Jam</b> is underway!</a>. + </div> <p>Every year we hold a community-wide Summer Code Jam. For this event, members of our community are assigned to teams to collaborate and create something amazing using a technology we picked for them. One such technology that was picked for the Summer 2021 Code Jam was text user interfaces (TUIs), where teams could pick from a pre-approved list of frameworks.</p> <p>To help fuel the creative process, we provide a specific theme, like <strong>Think Inside the Box</strong> or <strong>Early Internet</strong>. At the end of the Code Jam, the projects are judged by Python Discord server staff members and guest judges from the larger Python community. The judges will consider creativity, code quality, teamwork, and adherence to the theme.</p> <p>If you want to read more about Code Jams, visit our <a href="{% url "events:page" path="code-jams" %}">Code Jam info page</a> or watch this video showcasing the best projects created during the <strong>Winter Code Jam 2020: Ancient Technology</strong>:</p> diff --git a/pydis_site/templates/events/pages/code-jams/9/_index.html b/pydis_site/templates/events/pages/code-jams/9/_index.html index 7c57b799..ca7c4f90 100644 --- a/pydis_site/templates/events/pages/code-jams/9/_index.html +++ b/pydis_site/templates/events/pages/code-jams/9/_index.html @@ -24,18 +24,28 @@ <ul> <li><strike>Saturday, June 18 - Form to submit theme suggestions opens</strike></li> <li><strike>Wednesday, June 29 - The Qualifier is released</strike></li> - <li>Wednesday, July 6 - Voting for the theme opens</li> - <li>Wednesday, July 13 - The Qualifier closes</li> - <li>Thursday, July 21 - Code Jam Begins</li> - <li>Sunday, July 31 - Coding portion of the jam ends</li> - <li>Sunday, August 4 - Code Jam submissions are closed</li> + <li><strike>Wednesday, July 6 - Voting for the theme opens</strike></li> + <li><strike>Wednesday, July 13 - The Qualifier closes</strike></li> + <li><strike>Thursday, July 21 - Code Jam Begins</strike></li> + <li><strike>Sunday, July 31 - Coding portion of the jam ends</strike></li> + <li><strike>Sunday, August 4 - Code Jam submissions are closed</strike></li> </ul> - <h3 id="how-to-join"><a href="#how-to-join">How to Join</a></h3> + + <h3 id="qualifier"><a href="#how-to-join">The Qualifier</a></h3> + <p> + The qualifier is a coding challenge that you are required to complete before registering for the code jam. + This is meant as a basic assessment of your skills to ensure you have enough python knowledge to effectively contribute in a team environment. + </p> + <p class="has-text-centered"><a class="button is-link" href="https://github.com/python-discord/code-jam-qualifier-9/" target="_blank" rel="noopener">View the Qualifier</a></p> <p> - Before being able to join the code jam, you must complete a qualifier which tests your knowledge in Python. - The qualifier can be found <a href="https://github.com/python-discord/code-jam-qualifier-9/" title="Code Jam 9 qualifier repository" target="_blank" rel="noopener">on our GitHub</a> - and once completed you should submit your solution using the <a href="https://forms.pythondiscord.com/form/cj9-qualifier" target="_blank" rel="noopener">sign-up form</a>. + Please note the requirements for the qualifier. + <ul> + <li>The qualifier must be completed using Python 3.10</li> + <li>No external modules are allowed, only those available through the standard library.</li> + <li>The Qualifier must be submitted through the Code Jam sign-up form.</li> + </ul> </p> + <h3 id="technology"><a href="#technology">Technology</a></h3> <p> The chosen technology/tech stack for this year is <strong>WebSockets</strong>. diff --git a/pydis_site/templates/events/pages/code-jams/9/frameworks.html b/pydis_site/templates/events/pages/code-jams/9/frameworks.html index 15e280aa..b462c733 100644 --- a/pydis_site/templates/events/pages/code-jams/9/frameworks.html +++ b/pydis_site/templates/events/pages/code-jams/9/frameworks.html @@ -16,12 +16,49 @@ Please work with your team to choose a library that everyone can and want to develop with. If there is a library not listed below that you think should be here, you're welcome to discuss it with the Events Team over at <a href="https://discord.gg/HnGd3znxhJ">the server</a>. </p> + + <div class="notification is-info is-light"> + <p>Most of the below frameworks implement what is called the ASGI Specification. + This specification documents how the frameworks should interact with ASGI servers. + You are also allowed to <strong>work with the ASGI specification directly</strong> without a framework, if your team so chooses to. + Refer to the <a href="https://asgi.readthedocs.io/en/latest/">specification online</a>. + </p> + </div> + + <h3 id="approved-frameworks"><a href="#approved-frameworks">Approved Frameworks</a></h3> + + <div class="card mb-4"> + <div class="card-content"> + <div class="content"> + <p class="subtitle">FastAPI</p> + <p>FastAPI is a modern web framework great for WebSockets based on standard Python type hints which provides great editor support.</p> + </div> + </div> + <div class="card-footer"> + <a href="https://fastapi.tiangolo.com/advanced/websockets" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> + <a href="https://github.com/tiangolo/fastapi" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> + </div> + </div> + + <div class="card mb-4"> + <div class="card-content"> + <div class="content"> + <p class="subtitle">Starlette</p> + <p>Starlette is a lightweight ASGI framework/toolkit, which is ideal for building async web services in Python. + </p> + </div> + </div> + <div class="card-footer"> + <a href="https://www.starlette.io/websockets" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> + <a href="https://github.com/encode/starlette" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> + </div> + </div> + <div class="card mb-4"> <div class="card-content"> <div class="content"> <p class="subtitle">websockets</p> - <p class="is-italic">websockets is a library for building WebSocket servers and clients in Python with a focus on correctness, simplicity, robustness, and performance. - Built on top of asyncio, Python’s standard asynchronous I/O framework, it provides an elegant coroutine-based API. + <p>websockets is a library for building both WebSocket clients and servers with focus on simplicity and performance. </p> </div> </div> @@ -30,25 +67,26 @@ <a href="https://github.com/aaugustin/websockets" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> </div> </div> + <div class="card mb-4"> <div class="card-content"> <div class="content"> - <p class="subtitle">Flask-SocketIO</p> - <p class="is-italic">Flask-SocketIO gives Flask applications access to low latency bi-directional communications between the clients and the server. + <p class="subtitle">aiohttp</p> + <p>aiohttp provides both a client and server WebSocket implementation, while avoiding callback-hell. </p> </div> </div> <div class="card-footer"> - <a href="https://flask-socketio.readthedocs.io/en/latest" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> - <a href="https://github.com/miguelgrinberg/flask-socketio" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> + <a href="https://docs.aiohttp.org/en/stable/client_quickstart.html#websockets" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> + <a href="https://github.com/aio-libs/aiohttp" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> </div> </div> + <div class="card mb-4"> <div class="card-content"> <div class="content"> <p class="subtitle">Django Channels</p> - <p class="is-italic">Channels is a project that takes Django and extends its abilities beyond HTTP - to handle WebSockets, chat protocols, IoT protocols, and more. - It’s built on a Python specification called ASGI. + <p>Django Channels adds WebSocket-support to Django - built on ASGI like other web frameworks. </p> </div> </div> @@ -57,45 +95,46 @@ <a href="https://github.com/django/channels" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> </div> </div> + <div class="card mb-4"> <div class="card-content"> <div class="content"> - <p class="subtitle">wsproto</p> - <p class="is-italic">wsproto is a WebSocket protocol stack written to be as flexible as possible. - To that end it is written in pure Python and performs no I/O of its own. - Instead it relies on the user to provide a bridge between it and whichever I/O mechanism is in use, allowing it to be used in single-threaded, multi-threaded or event-driven code. + <p class="subtitle">Starlite</p> + <p>Starlite is a light and flexible ASGI API framework, using Starlette and Pydantic as foundations. </p> </div> </div> <div class="card-footer"> - <a href="https://python-hyper.org/projects/wsproto/en/stable" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> - <a href="https://github.com/python-hyper/wsproto" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> + <a href="https://starlite-api.github.io/starlite" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> + <a href="https://github.com/starlite-api/starlite" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> </div> </div> + <div class="card mb-4"> <div class="card-content"> <div class="content"> - <p class="subtitle">Starlette</p> - <p class="is-italic">Starlette is a lightweight ASGI framework/toolkit, which is ideal for building async web services in Python. + <p class="subtitle">Sanic</p> + <p>Sanic is an ASGI compliant web framework designed for speed and simplicity. </p> </div> </div> <div class="card-footer"> - <a href="https://www.starlette.io/websockets" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> - <a href="https://github.com/encode/starlette" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> + <a href="https://sanic.dev/en/guide/advanced/websockets.html" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> + <a href="https://github.com/sanic-org/sanic" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> </div> </div> - <div class="card mb"> + + <div class="card mb-4"> <div class="card-content"> <div class="content"> - <p class="subtitle">FastAPI</p> - <p class="is-italic">FastAPI is a modern, fast (high-performance), web framework for building APIs with Python 3.6+ based on standard Python type hints. + <p class="subtitle">wsproto</p> + <p>wsproto is a pure-Python WebSocket protocol stack written to be as flexible as possible by having the user build the bridge to the I/O. </p> </div> </div> <div class="card-footer"> - <a href="https://fastapi.tiangolo.com/advanced/websockets" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> - <a href="https://github.com/tiangolo/fastapi" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> + <a href="https://python-hyper.org/projects/wsproto/en/stable" class="card-footer-item"><i class="fas fa-book"></i> Documentation</a> + <a href="https://github.com/python-hyper/wsproto" class="card-footer-item"><i class="fab fa-github"></i> GitHub</a> </div> </div> diff --git a/pydis_site/templates/events/pages/code-jams/9/rules.html b/pydis_site/templates/events/pages/code-jams/9/rules.html index 72c0372e..9a28852f 100644 --- a/pydis_site/templates/events/pages/code-jams/9/rules.html +++ b/pydis_site/templates/events/pages/code-jams/9/rules.html @@ -11,7 +11,18 @@ {% block event_content %} <ol> - <li><p>Your solution must use one of the approved frameworks (a list will be released soon). It is not permitted to circumvent this rule by e.g. using the approved framework as a wrapper for another framework.</p></li> + <li><p>Your solution must use one of the approved frameworks. It is not permitted to circumvent this rule by e.g. using the approved framework as a wrapper for another framework.</p></li> + <li> + <p> + <strong>The core of your project must use WebSockets as its communication protocol.</strong>. + This means that you are allowed to use other methods of communication where WebSockets cannot be implemented, however, that should be a non-significant portion of your project. + For example, serving static files for a website cannot be done over WebSockets and it does not pose as a significant portion of a project, therefore it is allowed. + </p> + + <p>This rule does not apply to databases and files when used for <i>storage purposes</i> even though that may be a significant portion of your project. Working with subprocesses (through stdin/stdout or <code>multiprocessing.Pool()</code>/<code>concurrent.futures.ProcessPoolExecutor()</code>) is also exempt from this rule.</p> + + <p>If you're unsure about your use of non-WebSocket communication, please reach out to the events team.</p> + </li> <li><p>Your solution should be platform agnostic. For example, if you use filepaths in your submission, use <code>pathlib</code> to create platform agnostic Path objects instead of hardcoding the paths.</p></li> <li> <p> diff --git a/pydis_site/templates/events/pages/code-jams/_index.html b/pydis_site/templates/events/pages/code-jams/_index.html index 74efcfaa..c7975679 100644 --- a/pydis_site/templates/events/pages/code-jams/_index.html +++ b/pydis_site/templates/events/pages/code-jams/_index.html @@ -8,12 +8,6 @@ {% block title %}Code Jams{% endblock %} {% block event_content %} - <div class="block"> - <div class="notification is-success"> - The <b>2022 Summer Code Jam</b> is currently underway and you can still enter! <b>The qualifier is open until July 13</b>; check out the details <a href="{% url "events:page" path="code-jams/9" %}">here</a>. - </div> - </div> - <p> If you've been around the server for a while, or you just happened to join at the right time, you may have heard of something known as a Code Jam. diff --git a/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html b/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html index 21b2ccb4..28412c53 100644 --- a/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html +++ b/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html @@ -1,6 +1,7 @@ <div class="box"> <p class="menu-label">Previous Code Jams</p> <ul class="menu-list"> + <li><a class="has-text-link" href="{% url "events:page" path="code-jams/9" %}">Code Jam 9: It's Not A Bug, It's A Feature</a></li> <li><a class="has-text-link" href="{% url "events:page" path="code-jams/8" %}">Code Jam 8: Think Inside the Box</a></li> <li><a class="has-text-link" href="{% url "events:page" path="code-jams/7" %}">Code Jam 7: Early Internet</a></li> <li><a class="has-text-link" href="{% url "events:page" path="code-jams/6" %}">Code Jam 6: Ancient Technology</a></li> diff --git a/pydis_site/templates/home/index.html b/pydis_site/templates/home/index.html index cdbac830..cf6ff8cd 100644 --- a/pydis_site/templates/home/index.html +++ b/pydis_site/templates/home/index.html @@ -12,7 +12,7 @@ <!-- Mobile-only Code Jam Banner --> <section id="mobile-notice" class="is-primary is-hidden-tablet"> <a href="/events/code-jams/9/"> - <img src="{% static "images/events/summer_code_jam_2022/front_page_banners/sign_up.png" %}" alt="Summer Code Jam 2022"> + <img src="{% static "images/events/summer_code_jam_2022/site_banner.png" %}" alt="Summer Code Jam 2022"> </a> </section> @@ -48,7 +48,7 @@ {# Code Jam Banner #} <div id="wave-hero-right" class="column is-half"> <a href="/events/code-jams/9/"> - <img src="{% static "images/events/summer_code_jam_2022/front_page_banners/sign_up.png" %}" alt="Summer Code Jam 2022"> + <img src="{% static "images/events/summer_code_jam_2022/site_banner.png" %}" alt="Summer Code Jam 2022"> </a> </div> </div> diff --git a/pydis_site/urls.py b/pydis_site/urls.py index 6cd31f26..0f2f6aeb 100644 --- a/pydis_site/urls.py +++ b/pydis_site/urls.py @@ -12,7 +12,7 @@ NON_STATIC_PATTERNS = [ path('pydis-api/', include('pydis_site.apps.api.urls', namespace='internal_api')), path('', include('django_prometheus.urls')), -] if not settings.env("STATIC_BUILD") else [] +] if not settings.STATIC_BUILD else [] urlpatterns = ( @@ -29,7 +29,7 @@ urlpatterns = ( ) -if not settings.env("STATIC_BUILD"): +if not settings.STATIC_BUILD: urlpatterns += ( path('staff/', include('pydis_site.apps.staff.urls', namespace='staff')), ) diff --git a/pyproject.toml b/pyproject.toml index b350836e..79f2ecc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,41 +6,41 @@ authors = ["Python Discord <[email protected]>"] license = "MIT" [tool.poetry.dependencies] -python = "3.9.*" -django = "~=3.1.14" -django-environ = "~=0.4.5" -django-filter = "~=21.1" -djangorestframework = "~=3.12.0" -psycopg2-binary = "~=2.8.0" -django-simple-bulma = "~=2.4" -whitenoise = "~=5.0" -requests = "~=2.21" -pyyaml = "~=5.1" -gunicorn = "~=20.0.4" -sentry-sdk = "~=0.19" -markdown = "~=3.3.4" -python-frontmatter = "~=1.0" -django-prometheus = "~=2.1" -django-distill = "~=2.9.0" +python = "3.10.*" +django = "4.1.3" +django-environ = "0.9.0" +django-filter = "22.1" +djangorestframework = "3.14.0" +psycopg2-binary = "2.9.5" +django-simple-bulma = "2.5.0" +whitenoise = "6.2.0" +httpx = "0.23.0" +pyyaml = "6.0" +gunicorn = "20.1.0" +sentry-sdk = "1.11.0" +markdown = "3.4.1" +python-frontmatter = "1.0.0" +django-prometheus = "2.2.0" +django-distill = "3.0.1" +PyJWT = {version = "2.6.0", extras = ["crypto"]} +pymdown-extensions = "9.8" [tool.poetry.dev-dependencies] -coverage = "~=5.0" -flake8 = "~=3.7" -flake8-annotations = "~=2.0" -flake8-bandit = "~=2.1" -flake8-bugbear = "~=20.1" -flake8-docstrings = "~=1.5" -flake8-import-order = "~=0.18" -flake8-string-format = "~=0.3" -flake8-tidy-imports = "~=4.0" -flake8-todo = "~=0.7" -mccabe = "~=0.6.1" -pep8-naming = "~=0.9" -pre-commit = "~=2.1" -pyfakefs = "~=4.5" -coveralls = "~=2.1" -taskipy = "~=1.7.0" -python-dotenv = "~=0.17.1" +coverage = "6.5.0" +flake8 = "5.0.4" +flake8-annotations = "2.9.1" +flake8-bandit = "4.1.1" +flake8-bugbear = "22.10.27" +flake8-docstrings = "1.6.0" +flake8-import-order = "0.18.1" +flake8-tidy-imports = "4.8.0" +flake8-string-format = "0.3.0" +flake8-todo = "0.7" +pep8-naming = "0.13.2" +pre-commit = "2.20.0" +pyfakefs = "5.0.0" +taskipy = "1.10.3" +python-dotenv = "0.21.0" [build-system] requires = ["poetry-core>=1.0.0"] @@ -51,7 +51,7 @@ start = "python manage.py run --debug" makemigrations = "python manage.py makemigrations" django_shell = "python manage.py shell" test = "coverage run manage.py test" -coverage = "coverage run manage.py test --no-input; coverage report -m" +coverage = "coverage run manage.py test --no-input && coverage report -m" report = "coverage report -m" lint = "pre-commit run --all-files" precommit = "pre-commit install" diff --git a/static-builds/README.md b/static-builds/README.md index 9b86ed08..a3c7962b 100644 --- a/static-builds/README.md +++ b/static-builds/README.md @@ -27,16 +27,29 @@ Alternatively, you can use the [Dockerfile](/Dockerfile) and extract the build. Both output their builds to a `build/` directory. ### Deploying To Netlify -To deploy to netlify, link your site GitHub repository to a netlify site, and use the following settings: +To deploy to netlify, link your site GitHub repository to a netlify site, and use the settings below. +The netlify build script uses the site API to fetch and download the artifact, using a GitHub app that +can access the repo. The app must have the `actions` and `artifacts` scopes enabled. +### Netlify Settings Build Command: -`python -m pip install httpx==0.19.0 && python static-builds/netlify_build.py` +`python -m pip install httpx==0.23.0 && python static-builds/netlify_build.py` Publish Directory: `build` -Environment Variables: -- PYTHON_VERSION: 3.8 +**Environment Variables** + +| Name | Value | Description | +|----------------|--------------------------------|-------------------------------------------------------------------------------------------| +| PYTHON_VERSION | 3.8 | The python version. Supported options are defined by netlify [here][netlify build image]. | +| API_URL | https://pythondiscord.com/ | The link to the API, which will be used to fetch the build artifacts. | +| ACTION_NAME | Build & Publish Static Preview | The name of the workflow which will be used to find the artifact. | +| ARTIFACT_NAME | static-build | The name of the artifact to download. | + + +[netlify build image]: https://github.com/netlify/build-image/tree/focal + Note that at this time, if you are deploying to netlify yourself, you won't have access to the @@ -45,6 +58,3 @@ You can either update the pack to one which will work on your domain, or you'll > Warning: If you are modifying the [build script](./netlify_build.py), make sure it is compatible with Python 3.8. - -Note: The build script uses [nightly.link](https://github.com/oprypin/nightly.link) -to fetch the artifact with no authentication. diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 4e1e6106..36520c28 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -4,106 +4,60 @@ # This script performs all the actions required to build and deploy our project on netlify # It depends on the following packages, which are set in the netlify UI: -# httpx == 0.19.0 +# httpx == 0.23.0 +import json import os import time -import typing import zipfile from pathlib import Path from urllib import parse import httpx -API_URL = "https://api.github.com" -NIGHTLY_URL = "https://nightly.link" -OWNER, REPO = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2] +def raise_response(response: httpx.Response) -> None: + """Raise an exception from a response if necessary.""" + if response.status_code // 100 != 2: + try: + print(response.json()) + except json.JSONDecodeError: + pass -def get_build_artifact() -> typing.Tuple[int, str]: - """ - Search for a build artifact, and return the result. + response.raise_for_status() - The return is a tuple of the check suite ID, and the URL to the artifacts. - """ - print("Fetching build URL.") - if os.getenv("PULL_REQUEST").lower() == "true": - print(f"Fetching data for PR #{os.getenv('REVIEW_ID')}") - - pull_url = f"{API_URL}/repos/{OWNER}/{REPO}/pulls/{os.getenv('REVIEW_ID')}" - pull_request = httpx.get(pull_url) - pull_request.raise_for_status() - - commit_sha = pull_request.json()["head"]["sha"] - - workflows_params = parse.urlencode({ - "event": "pull_request", - "per_page": 100 - }) - - else: - commit_sha = os.getenv("COMMIT_REF") - - workflows_params = parse.urlencode({ - "event": "push", - "per_page": 100 - }) - - print(f"Fetching action data for commit {commit_sha}") - - workflows = httpx.get(f"{API_URL}/repos/{OWNER}/{REPO}/actions/runs?{workflows_params}") - workflows.raise_for_status() - - for run in workflows.json()["workflow_runs"]: - if run["name"] == "Build & Publish Static Preview" and commit_sha == run["head_sha"]: - print(f"Found action for this commit: {run['id']}\n{run['html_url']}") - break - else: - raise Exception("Could not find the workflow run for this event.") - - polls = 0 - while polls <= 20: - if run["status"] != "completed": - print("Action isn't ready, sleeping for 10 seconds.") - polls += 1 - time.sleep(10) - - elif run["conclusion"] != "success": - print("Aborting build due to a failure in a previous CI step.") - exit(0) - - else: - print(f"Found artifact URL:\n{run['artifacts_url']}") - return run["check_suite_id"], run["artifacts_url"] - - _run = httpx.get(run["url"]) - _run.raise_for_status() - run = _run.json() - - raise Exception("Polled for the artifact workflow, but it was not ready in time.") - - -def download_artifact(suite_id: int, url: str) -> None: - """Download a build artifact from `url`, and unzip the content.""" - print("Fetching artifact data.") - - artifacts = httpx.get(url) - artifacts.raise_for_status() - artifacts = artifacts.json() - - if artifacts["total_count"] == "0": - raise Exception(f"No artifacts were found for this build, aborting.\n{url}") - - for artifact in artifacts["artifacts"]: - if artifact["name"] == "static-build": - print("Found artifact with build.") - break - else: - raise Exception("Could not find an artifact with the expected name.") - - artifact_url = f"{NIGHTLY_URL}/{OWNER}/{REPO}/suites/{suite_id}/artifacts/{artifact['id']}" - zipped_content = httpx.get(artifact_url) +if __name__ == "__main__": + client = httpx.Client( + follow_redirects=True, + timeout=3 * 60, + ) + + owner, repo = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2] + + download_url = "/".join([ + os.getenv("API_URL").rstrip("/"), + "api/github/artifact", + owner, + repo, + os.getenv("COMMIT_REF"), + parse.quote(os.getenv("ACTION_NAME")), + os.getenv("ARTIFACT_NAME"), + ]) + print(f"Fetching download URL from {download_url}") + response = client.get(download_url) + raise_response(response) + + # The workflow is still pending, retry in a bit + while response.status_code == 202: + print(f"{response.json()['error']}. Retrying in 10 seconds.") + time.sleep(10) + response = client.get(download_url) + + raise_response(response) + url = response.json()["url"] + print(f"Downloading build from {url}") + zipped_content = client.get(url) zipped_content.raise_for_status() zip_file = Path("temp.zip") @@ -115,8 +69,3 @@ def download_artifact(suite_id: int, url: str) -> None: zip_file.unlink(missing_ok=True) print("Wrote artifact content to target directory.") - - -if __name__ == "__main__": - print("Build started") - download_artifact(*get_build_artifact()) |