From 0404e0040a0a6fc4f4520bfdc14b800390800382 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 12 Jul 2022 09:52:06 +0400 Subject: Update Netlify Build Script Update the netlify build script to use the artifacts API from the site. Signed-off-by: Hassan Abouelela --- static-builds/netlify_build.py | 125 +++++++++-------------------------------- 1 file changed, 28 insertions(+), 97 deletions(-) (limited to 'static-builds/netlify_build.py') diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 4e1e6106..13cd0279 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -4,106 +4,42 @@ # This script performs all the actions required to build and deploy our project on netlify # It depends on the following packages, which are set in the netlify UI: -# httpx == 0.19.0 +# httpx == 0.23.0 +import json import os -import time -import typing import zipfile from pathlib import Path from urllib import parse import httpx -API_URL = "https://api.github.com" -NIGHTLY_URL = "https://nightly.link" -OWNER, REPO = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2] - - -def get_build_artifact() -> typing.Tuple[int, str]: - """ - Search for a build artifact, and return the result. - - The return is a tuple of the check suite ID, and the URL to the artifacts. - """ - print("Fetching build URL.") - - if os.getenv("PULL_REQUEST").lower() == "true": - print(f"Fetching data for PR #{os.getenv('REVIEW_ID')}") - - pull_url = f"{API_URL}/repos/{OWNER}/{REPO}/pulls/{os.getenv('REVIEW_ID')}" - pull_request = httpx.get(pull_url) - pull_request.raise_for_status() - - commit_sha = pull_request.json()["head"]["sha"] - - workflows_params = parse.urlencode({ - "event": "pull_request", - "per_page": 100 - }) - - else: - commit_sha = os.getenv("COMMIT_REF") - - workflows_params = parse.urlencode({ - "event": "push", - "per_page": 100 - }) - - print(f"Fetching action data for commit {commit_sha}") - - workflows = httpx.get(f"{API_URL}/repos/{OWNER}/{REPO}/actions/runs?{workflows_params}") - workflows.raise_for_status() - - for run in workflows.json()["workflow_runs"]: - if run["name"] == "Build & Publish Static Preview" and commit_sha == run["head_sha"]: - print(f"Found action for this commit: {run['id']}\n{run['html_url']}") - break - else: - raise Exception("Could not find the workflow run for this event.") - - polls = 0 - while polls <= 20: - if run["status"] != "completed": - print("Action isn't ready, sleeping for 10 seconds.") - polls += 1 - time.sleep(10) - - elif run["conclusion"] != "success": - print("Aborting build due to a failure in a previous CI step.") - exit(0) - - else: - print(f"Found artifact URL:\n{run['artifacts_url']}") - return run["check_suite_id"], run["artifacts_url"] - - _run = httpx.get(run["url"]) - _run.raise_for_status() - run = _run.json() - - raise Exception("Polled for the artifact workflow, but it was not ready in time.") - - -def download_artifact(suite_id: int, url: str) -> None: - """Download a build artifact from `url`, and unzip the content.""" - print("Fetching artifact data.") - - artifacts = httpx.get(url) - artifacts.raise_for_status() - artifacts = artifacts.json() - - if artifacts["total_count"] == "0": - raise Exception(f"No artifacts were found for this build, aborting.\n{url}") - - for artifact in artifacts["artifacts"]: - if artifact["name"] == "static-build": - print("Found artifact with build.") - break - else: - raise Exception("Could not find an artifact with the expected name.") - - artifact_url = f"{NIGHTLY_URL}/{OWNER}/{REPO}/suites/{suite_id}/artifacts/{artifact['id']}" - zipped_content = httpx.get(artifact_url) +if __name__ == "__main__": + owner, repo = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2] + + download_url = "/".join([ + os.getenv("API_URL").rstrip("/"), + "api/github/artifact", + owner, + repo, + os.getenv("COMMIT_REF"), + parse.quote(os.getenv("ACTION_NAME")), + os.getenv("ARTIFACT_NAME"), + ]) + print(f"Fetching download URL from {download_url}") + response = httpx.get(download_url, follow_redirects=True) + + if response.status_code != 200: + try: + print(response.json()) + except json.JSONDecodeError: + pass + + response.raise_for_status() + + url = response.json()["url"] + print(f"Downloading build from {url}") + zipped_content = httpx.get(url, follow_redirects=True) zipped_content.raise_for_status() zip_file = Path("temp.zip") @@ -115,8 +51,3 @@ def download_artifact(suite_id: int, url: str) -> None: zip_file.unlink(missing_ok=True) print("Wrote artifact content to target directory.") - - -if __name__ == "__main__": - print("Build started") - download_artifact(*get_build_artifact()) -- cgit v1.2.3 From 26a3c19b53883015e8ba87db2a668c3eece2ce20 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 12 Jul 2022 14:45:22 +0400 Subject: Make Awaiting Workflow Run A User Responsibility Moves the responsibility of re-requesting a workflow run from the API to the user. This makes the requests much shorter-lived, and allows the client to control how they want to handle sleeping and retrying. This also has the benefit of removing the only real piece of async code, so now the view is completely sync once again. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 79 +++++------ pydis_site/apps/api/tests/test_github_utils.py | 174 ++++++++++++------------- static-builds/netlify_build.py | 9 +- 3 files changed, 131 insertions(+), 131 deletions(-) (limited to 'static-builds/netlify_build.py') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 70dccdff..707b36e5 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -1,17 +1,17 @@ """Utilities for working with the GitHub API.""" -import asyncio import datetime import math import httpx import jwt -from asgiref.sync import async_to_sync from pydis_site import settings -MAX_POLLS = 20 -"""The maximum number of attempts at fetching a workflow run.""" +MAX_RUN_TIME = datetime.timedelta(minutes=3) +"""The maximum time allowed before an action is declared timed out.""" +ISO_FORMAT_STRING = "%Y-%m-%dT%H:%M:%SZ" +"""The datetime string format GitHub uses.""" class ArtifactProcessingError(Exception): @@ -44,6 +44,12 @@ class RunTimeoutError(ArtifactProcessingError): status = 408 +class RunPendingError(ArtifactProcessingError): + """The requested workflow run is still pending, try again later.""" + + status = 202 + + def generate_token() -> str: """ Generate a JWT token to access the GitHub API. @@ -66,7 +72,7 @@ def generate_token() -> str: ) -async def authorize(owner: str, repo: str) -> httpx.AsyncClient: +def authorize(owner: str, repo: str) -> httpx.Client: """ Get an access token for the requested repository. @@ -75,7 +81,7 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: - POST to get a token to access the given app - GET installation/repositories and check if the requested one is part of those """ - client = httpx.AsyncClient( + client = httpx.Client( base_url=settings.GITHUB_API, headers={"Authorization": f"bearer {generate_token()}"}, timeout=settings.TIMEOUT_PERIOD, @@ -83,7 +89,7 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: try: # Get a list of app installations we have access to - apps = await client.get("app/installations") + apps = client.get("app/installations") apps.raise_for_status() for app in apps.json(): @@ -92,11 +98,11 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: continue # Get the repositories of the specified owner - app_token = await client.post(app["access_tokens_url"]) + app_token = client.post(app["access_tokens_url"]) app_token.raise_for_status() client.headers["Authorization"] = f"bearer {app_token.json()['token']}" - repos = await client.get("installation/repositories") + repos = client.get("installation/repositories") repos.raise_for_status() # Search for the request repository @@ -111,44 +117,39 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: except BaseException as e: # Close the client if we encountered an unexpected exception - await client.aclose() + client.close() raise e -async def wait_for_run(client: httpx.AsyncClient, run: dict) -> str: - """Wait for the provided `run` to finish, and return the URL to its artifacts.""" - polls = 0 - while polls <= MAX_POLLS: - if run["status"] != "completed": - # The action is still processing, wait a bit longer - polls += 1 - await asyncio.sleep(10) - - elif run["conclusion"] != "success": - # The action failed, or did not run - raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") +def check_run_status(run: dict) -> str: + """Check if the provided run has been completed, otherwise raise an exception.""" + created_at = datetime.datetime.strptime(run["created_at"], ISO_FORMAT_STRING) + run_time = datetime.datetime.now() - created_at + if run["status"] != "completed": + if run_time <= MAX_RUN_TIME: + raise RunPendingError( + f"The requested run is still pending. It was created " + f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." + ) else: - # The desired action was found, and it ended successfully - return run["artifacts_url"] + raise RunTimeoutError("The requested workflow was not ready in time.") - run = await client.get(run["url"]) - run.raise_for_status() - run = run.json() + if run["conclusion"] != "success": + # The action failed, or did not run + raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") - raise RunTimeoutError("The requested workflow was not ready in time.") + # The requested action is ready + return run["artifacts_url"] -@async_to_sync -async def get_artifact( - owner: str, repo: str, sha: str, action_name: str, artifact_name: str -) -> str: +def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_name: str) -> str: """Get a download URL for a build artifact.""" - client = await authorize(owner, repo) + client = authorize(owner, repo) try: # Get the workflow runs for this repository - runs = await client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) + runs = client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) runs.raise_for_status() runs = runs.json() @@ -161,16 +162,16 @@ async def get_artifact( "Could not find a run matching the provided settings in the previous hundred runs." ) - # Wait for the workflow to finish - url = await wait_for_run(client, run) + # Check the workflow status + url = check_run_status(run) # Filter the artifacts, and return the download URL - artifacts = await client.get(url) + artifacts = client.get(url) artifacts.raise_for_status() for artifact in artifacts.json()["artifacts"]: if artifact["name"] == artifact_name: - data = await client.get(artifact["archive_download_url"]) + data = client.get(artifact["archive_download_url"]) if data.status_code == 302: return str(data.next_request.url) @@ -180,4 +181,4 @@ async def get_artifact( raise NotFoundError("Could not find an artifact matching the provided name.") finally: - await client.aclose() + client.close() diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index dc17d609..78f2979d 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -1,6 +1,4 @@ -import asyncio import datetime -import random import unittest from unittest import mock @@ -14,16 +12,6 @@ from django.urls import reverse from .. import github_utils -def patched_raise_for_status(response: httpx.Response): - """Fake implementation of raise_for_status which does not need a request to be set.""" - if response.status_code // 100 != 2: # pragma: no cover - raise httpx.HTTPStatusError( - f"Non 2xx response code: {response.status_code}", - request=getattr(response, "_request", httpx.Request("GET", "")), - response=response - ) - - class GeneralUtilityTests(unittest.TestCase): """Test the utility methods which do not fit in another class.""" @@ -51,53 +39,50 @@ class GeneralUtilityTests(unittest.TestCase): self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) -@mock.patch("httpx.AsyncClient", autospec=True) -@mock.patch("asyncio.sleep", new=mock.AsyncMock(return_value=asyncio.Future)) -@mock.patch("httpx.Response.raise_for_status", new=patched_raise_for_status) -class WaitForTests(unittest.IsolatedAsyncioTestCase): - """Tests the wait_for utility.""" - - async def test_wait_for_successful_run(self, client_mock: mock.Mock): - """Test that the wait_for method handles successfully runs.""" - final_url = "some_url" + str(random.randint(0, 10)) - - client_mock.get.side_effect = responses = [ - httpx.Response(200, json={"status": "queued", "url": ""}), - httpx.Response(200, json={"status": "pending", "url": ""}), - httpx.Response(200, json={ - "status": "completed", - "conclusion": "success", - "url": "", - "artifacts_url": final_url - }) - ] +class WaitForTests(unittest.TestCase): + """Tests the check_run_status utility.""" - result = await github_utils.wait_for_run(client_mock, responses[0].json()) - self.assertEqual(final_url, result) + def test_completed_run(self): + final_url = "some_url_string_1234" - async def test_wait_for_failed_run(self, client_mock: mock.Mock): - """Test that the wait_for method handles failed runs.""" - client_mock.get.return_value = httpx.Response(200, json={ + result = github_utils.check_run_status({ "status": "completed", - "conclusion": "failed", + "conclusion": "success", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + "artifacts_url": final_url, }) + self.assertEqual(final_url, result) - with self.assertRaises(github_utils.ActionFailedError): - await github_utils.wait_for_run(client_mock, {"status": "pending", "url": ""}) + def test_pending_run(self): + """Test that a pending run raises the proper exception.""" + with self.assertRaises(github_utils.RunPendingError): + github_utils.check_run_status({ + "status": "pending", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + }) - async def test_wait_for_timeout(self, client_mock: mock.Mock): - """Test that the wait_for method quits after a few attempts.""" - client_mock.get.side_effect = responses = [ - httpx.Response(200, json={"status": "pending", "url": ""}) - ] * (github_utils.MAX_POLLS + 5) + def test_timeout_error(self): + """Test that a timeout is declared after a certain duration.""" + # Set the creation time to well before the MAX_RUN_TIME + # to guarantee the right conclusion + created = ( + datetime.datetime.now() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + ).strftime(github_utils.ISO_FORMAT_STRING) with self.assertRaises(github_utils.RunTimeoutError): - await github_utils.wait_for_run(client_mock, responses[0].json()) + github_utils.check_run_status({"status": "pending", "created_at": created}) + + def test_failed_run(self): + """Test that a failed run raises the proper exception.""" + with self.assertRaises(github_utils.ActionFailedError): + github_utils.check_run_status({ + "status": "completed", + "conclusion": "failed", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + }) -async def get_response_authorize( - _: httpx.AsyncClient, request: httpx.Request, **__ -) -> httpx.Response: +def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> httpx.Response: """ Helper method for the authorize tests. @@ -141,76 +126,83 @@ async def get_response_authorize( return httpx.Response(500, request=request) # pragma: no cover -@mock.patch("httpx.AsyncClient.send", new=get_response_authorize) +@mock.patch("httpx.Client.send", new=get_response_authorize) @mock.patch.object(github_utils, "generate_token", new=mock.Mock(return_value="JWT initial token")) -class AuthorizeTests(unittest.IsolatedAsyncioTestCase): +class AuthorizeTests(unittest.TestCase): """Test the authorize utility.""" - async def test_invalid_apps_auth(self): + def test_invalid_apps_auth(self): """Test that an exception is raised if authorization was attempted with an invalid token.""" with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): with self.assertRaises(httpx.HTTPStatusError) as error: - await github_utils.authorize("VALID_OWNER", "VALID_REPO") + github_utils.authorize("VALID_OWNER", "VALID_REPO") exception: httpx.HTTPStatusError = error.exception self.assertEqual(401, exception.response.status_code) self.assertEqual("auth app/installations", exception.response.json()["error"]) - async def test_missing_repo(self): + def test_missing_repo(self): """Test that an exception is raised when the selected owner or repo are not available.""" with self.assertRaises(github_utils.NotFoundError): - await github_utils.authorize("INVALID_OWNER", "VALID_REPO") + github_utils.authorize("INVALID_OWNER", "VALID_REPO") with self.assertRaises(github_utils.NotFoundError): - await github_utils.authorize("VALID_OWNER", "INVALID_REPO") + github_utils.authorize("VALID_OWNER", "INVALID_REPO") - async def test_valid_authorization(self): + def test_valid_authorization(self): """Test that an accessible repository can be accessed.""" - client = await github_utils.authorize("VALID_OWNER", "VALID_REPO") + client = github_utils.authorize("VALID_OWNER", "VALID_REPO") self.assertEqual("bearer app access token", client.headers.get("Authorization")) -async def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: - """ - Helper method for the get_artifact tests. +class ArtifactFetcherTests(unittest.TestCase): + """Test the get_artifact utility.""" - Requests are intercepted before being sent out, and the appropriate responses are returned. - """ - path = request.url.path + @staticmethod + def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: + """ + Helper method for the get_artifact tests. - if "force_error" in path: - return httpx.Response(404, request=request) + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path - if request.method == "GET": - if path == "/repos/owner/repo/actions/runs": - return httpx.Response(200, request=request, json={"workflow_runs": [{ - "name": "action_name", - "head_sha": "action_sha" - }]}) - elif path == "/artifact_url": - return httpx.Response(200, request=request, json={"artifacts": [{ - "name": "artifact_name", - "archive_download_url": "artifact_download_url" - }]}) - elif path == "/artifact_download_url": - response = httpx.Response(302, request=request) - response.next_request = httpx.Request("GET", httpx.URL("https://final_download.url")) - return response - - # Reaching this point means something has gone wrong - return httpx.Response(500, request=request) # pragma: no cover + if "force_error" in path: + return httpx.Response(404, request=request) + if request.method == "GET": + if path == "/repos/owner/repo/actions/runs": + return httpx.Response( + 200, request=request, json={"workflow_runs": [{ + "name": "action_name", + "head_sha": "action_sha" + }]} + ) + elif path == "/artifact_url": + return httpx.Response( + 200, request=request, json={"artifacts": [{ + "name": "artifact_name", + "archive_download_url": "artifact_download_url" + }]} + ) + elif path == "/artifact_download_url": + response = httpx.Response(302, request=request) + response.next_request = httpx.Request( + "GET", + httpx.URL("https://final_download.url") + ) + return response -class ArtifactFetcherTests(unittest.IsolatedAsyncioTestCase): - """Test the get_artifact utility.""" + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover def setUp(self) -> None: self.call_args = ["owner", "repo", "action_sha", "action_name", "artifact_name"] - self.client = httpx.AsyncClient(base_url="https://example.com") + self.client = httpx.Client(base_url="https://example.com") self.patchers = [ - mock.patch.object(self.client, "send", new=get_response_get_artifact), + mock.patch.object(self.client, "send", new=self.get_response_get_artifact), mock.patch.object(github_utils, "authorize", return_value=self.client), - mock.patch.object(github_utils, "wait_for_run", return_value="artifact_url"), + mock.patch.object(github_utils, "check_run_status", return_value="artifact_url"), ] for patcher in self.patchers: @@ -266,7 +258,7 @@ class GitHubArtifactViewTests(django.test.TestCase): } cls.url = reverse("api:github-artifacts", kwargs=cls.kwargs) - async def test_successful(self, artifact_mock: mock.Mock): + def test_successful(self, artifact_mock: mock.Mock): """Test a proper response is returned with proper input.""" artifact_mock.return_value = "final download url" result = self.client.get(self.url) @@ -274,7 +266,7 @@ class GitHubArtifactViewTests(django.test.TestCase): self.assertIsInstance(result, rest_framework.response.Response) self.assertEqual({"url": artifact_mock.return_value}, result.data) - async def test_failed_fetch(self, artifact_mock: mock.Mock): + def test_failed_fetch(self, artifact_mock: mock.Mock): """Test that a proper error is returned when the request fails.""" artifact_mock.side_effect = github_utils.NotFoundError("Test error message") result = self.client.get(self.url) diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 13cd0279..a473bd91 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -8,6 +8,7 @@ import json import os +import time import zipfile from pathlib import Path from urllib import parse @@ -29,7 +30,7 @@ if __name__ == "__main__": print(f"Fetching download URL from {download_url}") response = httpx.get(download_url, follow_redirects=True) - if response.status_code != 200: + if response.status_code // 100 != 2: try: print(response.json()) except json.JSONDecodeError: @@ -37,6 +38,12 @@ if __name__ == "__main__": response.raise_for_status() + # The workflow is still pending, retry in a bit + while response.status_code == 202: + print(f"{response.json()['error']}. Retrying in 10 seconds.") + time.sleep(10) + response = httpx.get(download_url, follow_redirects=True) + url = response.json()["url"] print(f"Downloading build from {url}") zipped_content = httpx.get(url, follow_redirects=True) -- cgit v1.2.3 From ff9633d189ba37aa8c80cd8d168cc259f5ba21b3 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 12 Aug 2022 17:46:27 +0200 Subject: Raise Static Build After Waiting Adds a missing raise in the static build for situations where everything was successful, but the build time passed the deadline specified by the site API. Signed-off-by: Hassan Abouelela --- static-builds/netlify_build.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) (limited to 'static-builds/netlify_build.py') diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index a473bd91..7143a19a 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -15,6 +15,18 @@ from urllib import parse import httpx + +def raise_response(response: httpx.Response) -> None: + """Raise an exception from a response if necessary.""" + if response.status_code // 100 != 2: + try: + print(response.json()) + except json.JSONDecodeError: + pass + + response.raise_for_status() + + if __name__ == "__main__": owner, repo = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2] @@ -29,14 +41,7 @@ if __name__ == "__main__": ]) print(f"Fetching download URL from {download_url}") response = httpx.get(download_url, follow_redirects=True) - - if response.status_code // 100 != 2: - try: - print(response.json()) - except json.JSONDecodeError: - pass - - response.raise_for_status() + raise_response(response) # The workflow is still pending, retry in a bit while response.status_code == 202: @@ -44,6 +49,7 @@ if __name__ == "__main__": time.sleep(10) response = httpx.get(download_url, follow_redirects=True) + raise_response(response) url = response.json()["url"] print(f"Downloading build from {url}") zipped_content = httpx.get(url, follow_redirects=True) -- cgit v1.2.3 From 3335cc6c710f028e0e2303a7fa9dae5bd4220d6e Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 12 Aug 2022 22:56:51 +0200 Subject: Increase Timeout For Artifact Download Sets the timeout of downloads to 3 minutes (from a default of 5 seconds) since some artifacts can be quite large. Signed-off-by: Hassan Abouelela --- static-builds/netlify_build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'static-builds/netlify_build.py') diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 7143a19a..f3a53f72 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -52,7 +52,7 @@ if __name__ == "__main__": raise_response(response) url = response.json()["url"] print(f"Downloading build from {url}") - zipped_content = httpx.get(url, follow_redirects=True) + zipped_content = httpx.get(url, follow_redirects=True, timeout=3 * 60) zipped_content.raise_for_status() zip_file = Path("temp.zip") -- cgit v1.2.3 From 1d47dc4cc435131934883a0ccb2a359d2ac177d8 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Wed, 17 Aug 2022 00:05:33 +0100 Subject: Use a httpx.Client for netlify static builds This allows for setting the config in one place and reusing it. This also solves an issue where the requests to fetch the download URL would timeout after the (default) 5 seconds. --- static-builds/netlify_build.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) (limited to 'static-builds/netlify_build.py') diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index f3a53f72..36520c28 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -28,6 +28,11 @@ def raise_response(response: httpx.Response) -> None: if __name__ == "__main__": + client = httpx.Client( + follow_redirects=True, + timeout=3 * 60, + ) + owner, repo = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2] download_url = "/".join([ @@ -40,19 +45,19 @@ if __name__ == "__main__": os.getenv("ARTIFACT_NAME"), ]) print(f"Fetching download URL from {download_url}") - response = httpx.get(download_url, follow_redirects=True) + response = client.get(download_url) raise_response(response) # The workflow is still pending, retry in a bit while response.status_code == 202: print(f"{response.json()['error']}. Retrying in 10 seconds.") time.sleep(10) - response = httpx.get(download_url, follow_redirects=True) + response = client.get(download_url) raise_response(response) url = response.json()["url"] print(f"Downloading build from {url}") - zipped_content = httpx.get(url, follow_redirects=True, timeout=3 * 60) + zipped_content = client.get(url) zipped_content.raise_for_status() zip_file = Path("temp.zip") -- cgit v1.2.3 From 07855963a1eedd80c410ab2dd51fcae1200c9cee Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Wed, 10 May 2023 12:30:57 +0200 Subject: Switch to ruff for linting --- .github/workflows/lint-test.yaml | 17 +- .pre-commit-config.yaml | 12 +- poetry.lock | 700 ++++++--------------- pydis_site/apps/api/admin.py | 29 +- pydis_site/apps/api/github_utils.py | 13 +- pydis_site/apps/api/models/bot/message.py | 7 +- pydis_site/apps/api/models/bot/metricity.py | 19 +- pydis_site/apps/api/tests/base.py | 3 +- pydis_site/apps/api/tests/test_bumped_threads.py | 2 +- pydis_site/apps/api/tests/test_deleted_messages.py | 11 +- .../apps/api/tests/test_documentation_links.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 12 +- pydis_site/apps/api/tests/test_github_utils.py | 41 +- pydis_site/apps/api/tests/test_infractions.py | 110 ++-- pydis_site/apps/api/tests/test_models.py | 4 +- pydis_site/apps/api/tests/test_nominations.py | 2 +- .../apps/api/tests/test_off_topic_channel_names.py | 2 +- .../apps/api/tests/test_offensive_message.py | 10 +- pydis_site/apps/api/tests/test_reminders.py | 4 +- pydis_site/apps/api/tests/test_roles.py | 2 +- pydis_site/apps/api/tests/test_rules.py | 2 +- pydis_site/apps/api/tests/test_users.py | 29 +- pydis_site/apps/api/tests/test_validators.py | 4 +- pydis_site/apps/api/views.py | 4 +- pydis_site/apps/api/viewsets/bot/filters.py | 4 +- .../api/viewsets/bot/off_topic_channel_name.py | 7 +- pydis_site/apps/api/viewsets/bot/user.py | 5 +- pydis_site/apps/content/models/tag.py | 3 +- .../guides/pydis-guides/contributing/linting.md | 2 +- pydis_site/apps/content/tests/helpers.py | 20 +- pydis_site/apps/content/urls.py | 2 +- pydis_site/apps/content/utils.py | 16 +- pydis_site/apps/content/views/tags.py | 3 +- pydis_site/apps/events/urls.py | 2 +- pydis_site/apps/events/views/page.py | 3 +- .../apps/home/tests/test_repodata_helpers.py | 2 +- pydis_site/apps/home/views.py | 13 +- pydis_site/apps/redirect/urls.py | 25 +- pydis_site/apps/redirect/views.py | 3 +- pydis_site/apps/resources/views.py | 7 +- .../staff/templatetags/deletedmessage_filters.py | 3 +- .../staff/tests/test_deletedmessage_filters.py | 2 +- pyproject.toml | 39 +- static-builds/netlify_build.py | 6 +- 44 files changed, 445 insertions(+), 763 deletions(-) (limited to 'static-builds/netlify_build.py') diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index 1c744470..a12fb650 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -24,20 +24,11 @@ jobs: # We will not run `flake8` here, as we will use a separate flake8 # action. - name: Run pre-commit hooks - run: SKIP=flake8 pre-commit run --all-files + run: SKIP=ruff pre-commit run --all-files - # Run flake8 and have it format the linting errors in the format of - # the GitHub Workflow command to register error annotations. This - # means that our flake8 output is automatically added as an error - # annotation to both the run result and in the "Files" tab of a - # pull request. - # - # Format used: - # ::error file={filename},line={line},col={col}::{message} - - name: Run flake8 - run: "flake8 \ - --format='::error file=%(path)s,line=%(row)d,col=%(col)d::\ - [flake8] %(code)s: %(text)s'" + # Run `ruff` using github formatting to enable automatic inline annotations. + - name: Run ruff + run: "ruff check --format=github ." - name: Migrations and run tests with coverage.py run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b2a03559..700dd0a0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,15 +10,11 @@ repos: args: [--fix=lf] - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.5.1 - hooks: - - id: python-check-blanket-noqa - repo: local hooks: - - id: flake8 - name: Flake8 - description: This hook runs flake8 within our project's environment. - entry: poetry run flake8 + - id: ruff + name: ruff + description: This hook runs ruff within our project's environment. + entry: poetry run ruff language: system types: [python] diff --git a/poetry.lock b/poetry.lock index d6e09cbe..4d9a87de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "anyio" @@ -36,58 +36,16 @@ files = [ [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - -[[package]] -name = "bandit" -version = "1.7.4" -description = "Security oriented static analyser for python code." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -181,100 +139,87 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = "*" -files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] @@ -355,35 +300,31 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "39.0.1" +version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"}, - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"}, - {file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"}, - {file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"}, - {file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, + {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, + {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, + {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, ] [package.dependencies] @@ -392,10 +333,10 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] test-randomorder = ["pytest-randomly"] tox = ["tox"] @@ -536,193 +477,19 @@ pytz = "*" [[package]] name = "filelock" -version = "3.9.0" +version = "3.12.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, - {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, + {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, + {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - -[[package]] -name = "flake8-annotations" -version = "3.0.1" -description = "Flake8 Type Annotation Checks" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8_annotations-3.0.1-py3-none-any.whl", hash = "sha256:af78e3216ad800d7e144745ece6df706c81b3255290cbf870e54879d495e8ade"}, - {file = "flake8_annotations-3.0.1.tar.gz", hash = "sha256:ff37375e71e3b83f2a5a04d443c41e2c407de557a884f3300a7fa32f3c41cb0a"}, -] - -[package.dependencies] -attrs = ">=21.4" -flake8 = ">=5.0" - -[[package]] -name = "flake8-bandit" -version = "4.1.1" -description = "Automated security testing with bandit and flake8." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, - {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, -] - -[package.dependencies] -bandit = ">=1.7.3" -flake8 = ">=5.0.0" - -[[package]] -name = "flake8-bugbear" -version = "23.5.9" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-bugbear-23.5.9.tar.gz", hash = "sha256:695c84a5d7da54eb35d79a7354dbaf3aaba80de32250608868aa1c85534b2a86"}, - {file = "flake8_bugbear-23.5.9-py3-none-any.whl", hash = "sha256:631fa927fbc799e8ca636b849dd7dfc304812287137b6ecb3277821f028bee40"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=6.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-import-order" -version = "0.18.2" -description = "Flake8 and pylama plugin that checks the ordering of import statements." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-import-order-0.18.2.tar.gz", hash = "sha256:e23941f892da3e0c09d711babbb0c73bc735242e9b216b726616758a920d900e"}, - {file = "flake8_import_order-0.18.2-py2.py3-none-any.whl", hash = "sha256:82ed59f1083b629b030ee9d3928d9e06b6213eb196fe745b3a7d4af2168130df"}, -] - -[package.dependencies] -pycodestyle = "*" -setuptools = "*" - -[[package]] -name = "flake8-string-format" -version = "0.3.0" -description = "string format checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, - {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-tidy-imports" -version = "4.8.0" -description = "A flake8 plugin that helps you write tidier imports." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, - {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, -] - -[package.dependencies] -flake8 = ">=3.8.0" - -[[package]] -name = "flake8-todo" -version = "0.7" -description = "TODO notes checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, -] - -[package.dependencies] -pycodestyle = ">=2.0.0,<3.0.0" - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.31" -description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, - {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "gunicorn" @@ -759,14 +526,14 @@ files = [ [[package]] name = "httpcore" -version = "0.16.3" +version = "0.17.0" description = "A minimal low-level HTTP client." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, + {file = "httpcore-0.17.0-py3-none-any.whl", hash = "sha256:0fdfea45e94f0c9fd96eab9286077f9ff788dd186635ae61b312693e4d943599"}, + {file = "httpcore-0.17.0.tar.gz", hash = "sha256:cc045a3241afbf60ce056202301b4d8b6af08845e3294055eb26b09913ef903c"}, ] [package.dependencies] @@ -805,14 +572,14 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "identify" -version = "2.5.18" +version = "2.5.24" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.18-py2.py3-none-any.whl", hash = "sha256:93aac7ecf2f6abf879b8f29a8002d3c6de7086b8c28d88e1ad15045a15ab63f9"}, - {file = "identify-2.5.18.tar.gz", hash = "sha256:89e144fa560cc4cffb6ef2ab5e9fb18ed9f9b3cb054384bab4b95c12f6c309fe"}, + {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, + {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, ] [package.extras] @@ -860,18 +627,6 @@ files = [ [package.extras] testing = ["coverage", "pyyaml"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mslex" version = "0.3.0" @@ -899,48 +654,21 @@ files = [ [package.dependencies] setuptools = "*" -[[package]] -name = "pbr" -version = "5.11.1" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" -files = [ - {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, - {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, -] - -[[package]] -name = "pep8-naming" -version = "0.13.3" -description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"}, - {file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"}, -] - -[package.dependencies] -flake8 = ">=5.0.0" - [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.5.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, - {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, + {file = "platformdirs-3.5.0-py3-none-any.whl", hash = "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4"}, + {file = "platformdirs-3.5.0.tar.gz", hash = "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pre-commit" @@ -978,26 +706,26 @@ twisted = ["twisted"] [[package]] name = "psutil" -version = "5.9.4" +version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, ] [package.extras] @@ -1075,18 +803,6 @@ files = [ {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, ] -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] - [[package]] name = "pycparser" version = "2.21" @@ -1099,36 +815,6 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - [[package]] name = "pyjwt" version = "2.7.0" @@ -1202,14 +888,14 @@ test = ["pyaml", "pytest", "toml"] [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] @@ -1264,26 +950,53 @@ files = [ [[package]] name = "requests" -version = "2.28.2" +version = "2.30.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, + {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.0.265" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.265-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:30ddfe22de6ce4eb1260408f4480bbbce998f954dbf470228a21a9b2c45955e4"}, + {file = "ruff-0.0.265-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a11bd0889e88d3342e7bc514554bb4461bf6cc30ec115821c2425cfaac0b1b6a"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a9b38bdb40a998cbc677db55b6225a6c4fadcf8819eb30695e1b8470942426b"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8b44a245b60512403a6a03a5b5212da274d33862225c5eed3bcf12037eb19bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b279fa55ea175ef953208a6d8bfbcdcffac1c39b38cdb8c2bfafe9222add70bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5028950f7af9b119d43d91b215d5044976e43b96a0d1458d193ef0dd3c587bf8"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4057eb539a1d88eb84e9f6a36e0a999e0f261ed850ae5d5817e68968e7b89ed9"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d586e69ab5cbf521a1910b733412a5735936f6a610d805b89d35b6647e2a66aa"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa17b13cd3f29fc57d06bf34c31f21d043735cc9a681203d634549b0e41047d1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9ac13b11d9ad3001de9d637974ec5402a67cefdf9fffc3929ab44c2fcbb850a1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:62a9578b48cfd292c64ea3d28681dc16b1aa7445b7a7709a2884510fc0822118"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0f9967f84da42d28e3d9d9354cc1575f96ed69e6e40a7d4b780a7a0418d9409"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1d5a8de2fbaf91ea5699451a06f4074e7a312accfa774ad9327cde3e4fda2081"}, + {file = "ruff-0.0.265-py3-none-win32.whl", hash = "sha256:9e9db5ccb810742d621f93272e3cc23b5f277d8d00c4a79668835d26ccbe48dd"}, + {file = "ruff-0.0.265-py3-none-win_amd64.whl", hash = "sha256:f54facf286103006171a00ce20388d88ed1d6732db3b49c11feb9bf3d46f90e9"}, + {file = "ruff-0.0.265-py3-none-win_arm64.whl", hash = "sha256:c78470656e33d32ddc54e8482b1b0fc6de58f1195586731e5ff1405d74421499"}, + {file = "ruff-0.0.265.tar.gz", hash = "sha256:53c17f0dab19ddc22b254b087d1381b601b155acfa8feed514f0d6a413d0ab3a"}, +] + [[package]] name = "sentry-sdk" version = "1.22.2" @@ -1328,14 +1041,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.4.0" +version = "67.7.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"}, - {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"}, + {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, + {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, ] [package.extras] @@ -1343,18 +1056,6 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] - [[package]] name = "sniffio" version = "1.3.0" @@ -1367,18 +1068,6 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - [[package]] name = "sqlparse" version = "0.4.4" @@ -1396,21 +1085,6 @@ dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] -[[package]] -name = "stevedore" -version = "5.0.0" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, - {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, -] - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - [[package]] name = "taskipy" version = "1.10.4" @@ -1443,26 +1117,26 @@ files = [ [[package]] name = "tzdata" -version = "2022.7" +version = "2023.3" description = "Provider of IANA time zone data" category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2022.7-py2.py3-none-any.whl", hash = "sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d"}, - {file = "tzdata-2022.7.tar.gz", hash = "sha256:fe5f866eddd8b96e9fcba978f8e503c909b19ea7efda11e52e39494bad3a7bfa"}, + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.extras] @@ -1472,24 +1146,24 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.19.0" +version = "20.23.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"}, - {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"}, + {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, + {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +filelock = ">=3.11,<4" +platformdirs = ">=3.2,<4" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] [[package]] name = "whitenoise" @@ -1509,4 +1183,4 @@ brotli = ["Brotli"] [metadata] lock-version = "2.0" python-versions = "3.10.*" -content-hash = "a89b60823f9d7717b78c1071654c0c1f237768669a8e37e76175730de209e804" +content-hash = "f9076a1d72b610e77d0c389a4992c814a4c81028232a1ec4f060f77f2cb9125c" diff --git a/pydis_site/apps/api/admin.py b/pydis_site/apps/api/admin.py index e123d150..f3cc0405 100644 --- a/pydis_site/apps/api/admin.py +++ b/pydis_site/apps/api/admin.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import Iterable, Optional, Tuple +from collections.abc import Iterable from django import urls from django.contrib import admin @@ -62,16 +62,16 @@ class InfractionActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = Infraction.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -149,7 +149,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): list_display = ("id", "author", "channel_id") - def embed_data(self, message: DeletedMessage) -> Optional[str]: + def embed_data(self, message: DeletedMessage) -> str | None: """Format embed data in a code block for better readability.""" if message.embeds: return format_html( @@ -157,6 +157,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): "{0}", json.dumps(message.embeds, indent=4) ) + return None embed_data.short_description = "Embeds" @@ -229,16 +230,16 @@ class NominationActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None nomination_ids = NominationEntry.objects.filter( actor__id=self.value() ).values_list("nomination_id").distinct() @@ -292,16 +293,16 @@ class NominationEntryActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -425,15 +426,15 @@ class UserRoleFilter(admin.SimpleListFilter): title = "Role" parameter_name = "role" - def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[Tuple[str, str]]: + def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[tuple[str, str]]: """Selectable values for viewer to filter by.""" roles = Role.objects.all() return ((r.name, r.name) for r in roles) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None role = Role.objects.get(name=self.value()) return queryset.filter(roles__contains=[role.id]) diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 44c571c3..af659195 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -82,7 +82,7 @@ def generate_token() -> str: Refer to: https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app """ - now = datetime.datetime.now() + now = datetime.datetime.now(tz=datetime.timezone.utc) return jwt.encode( { "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at @@ -145,8 +145,12 @@ def authorize(owner: str, repo: str) -> httpx.Client: def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" - created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) - run_time = datetime.datetime.utcnow() - created_at + created_at = ( + datetime.datetime + .strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) + run_time = datetime.datetime.now(tz=datetime.timezone.utc) - created_at if run.status != "completed": if run_time <= MAX_RUN_TIME: @@ -154,8 +158,7 @@ def check_run_status(run: WorkflowRun) -> str: f"The requested run is still pending. It was created " f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." ) - else: - raise RunTimeoutError("The requested workflow was not ready in time.") + raise RunTimeoutError("The requested workflow was not ready in time.") if run.conclusion != "success": # The action failed, or did not run diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index 89ae27e4..d8147cd4 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -61,9 +61,10 @@ class Message(ModelReprMixin, models.Model): @property def timestamp(self) -> datetime.datetime: """Attribute that represents the message timestamp as derived from the snowflake id.""" - return datetime.datetime.utcfromtimestamp( - ((self.id >> 22) + 1420070400000) / 1000 - ).replace(tzinfo=datetime.timezone.utc) + return datetime.datetime.fromtimestamp( + ((self.id >> 22) + 1420070400000) / 1000, + tz=datetime.timezone.utc, + ) class Meta: """Metadata provided for Django's ORM.""" diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index f53dd33c..a55f5e5b 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -1,4 +1,3 @@ -from typing import List, Tuple from django.db import connections @@ -10,10 +9,9 @@ EXCLUDE_CHANNELS = ( ) -class NotFoundError(Exception): # noqa: N818 +class NotFoundError(Exception): """Raised when an entity cannot be found.""" - pass class Metricity: @@ -31,15 +29,14 @@ class Metricity: def user(self, user_id: str) -> dict: """Query a user's data.""" # TODO: Swap this back to some sort of verified at date - columns = ["joined_at"] - query = f"SELECT {','.join(columns)} FROM users WHERE id = '%s'" + query = "SELECT joined_at FROM users WHERE id = '%s'" self.cursor.execute(query, [user_id]) values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError - return dict(zip(columns, values)) + return {'joined_at': values[0]} def total_messages(self, user_id: str) -> int: """Query total number of messages for a user.""" @@ -58,7 +55,7 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] @@ -88,11 +85,11 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] - def top_channel_activity(self, user_id: str) -> List[Tuple[str, int]]: + def top_channel_activity(self, user_id: str) -> list[tuple[str, int]]: """ Query the top three channels in which the user is most active. @@ -127,7 +124,7 @@ class Metricity: values = self.cursor.fetchall() if not values: - raise NotFoundError() + raise NotFoundError return values diff --git a/pydis_site/apps/api/tests/base.py b/pydis_site/apps/api/tests/base.py index c9f3cb7e..704b22cf 100644 --- a/pydis_site/apps/api/tests/base.py +++ b/pydis_site/apps/api/tests/base.py @@ -61,6 +61,7 @@ class AuthenticatedAPITestCase(APITestCase): ... self.assertEqual(response.status_code, 200) """ - def setUp(self): + def setUp(self) -> None: + """Bootstrap the user and authenticate it.""" super().setUp() self.client.force_authenticate(test_user) diff --git a/pydis_site/apps/api/tests/test_bumped_threads.py b/pydis_site/apps/api/tests/test_bumped_threads.py index 316e3f0b..2e3892c7 100644 --- a/pydis_site/apps/api/tests/test_bumped_threads.py +++ b/pydis_site/apps/api/tests/test_bumped_threads.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import BumpedThread +from pydis_site.apps.api.models import BumpedThread class UnauthedBumpedThreadAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_deleted_messages.py b/pydis_site/apps/api/tests/test_deleted_messages.py index 1eb535d8..62d17e58 100644 --- a/pydis_site/apps/api/tests/test_deleted_messages.py +++ b/pydis_site/apps/api/tests/test_deleted_messages.py @@ -1,10 +1,9 @@ -from datetime import datetime +from datetime import datetime, timezone from django.urls import reverse -from django.utils import timezone from .base import AuthenticatedAPITestCase -from ..models import MessageDeletionContext, User +from pydis_site.apps.api.models import MessageDeletionContext, User class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): @@ -18,7 +17,7 @@ class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': None, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -58,7 +57,7 @@ class DeletedMessagesWithActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': cls.actor.id, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -90,7 +89,7 @@ class DeletedMessagesLogURLTests(AuthenticatedAPITestCase): cls.deletion_context = MessageDeletionContext.objects.create( actor=cls.actor, - creation=timezone.now() + creation=datetime.now(tz=timezone.utc), ) def test_valid_log_url(self): diff --git a/pydis_site/apps/api/tests/test_documentation_links.py b/pydis_site/apps/api/tests/test_documentation_links.py index 4e238cbb..f4a332cb 100644 --- a/pydis_site/apps/api/tests/test_documentation_links.py +++ b/pydis_site/apps/api/tests/test_documentation_links.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import DocumentationLink +from pydis_site.apps.api.models import DocumentationLink class UnauthedDocumentationLinkAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 5059d651..4cef1c8f 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -1,7 +1,7 @@ import contextlib from dataclasses import dataclass from datetime import timedelta -from typing import Any, Dict, Tuple, Type +from typing import Any from django.db.models import Model from django.urls import reverse @@ -12,22 +12,22 @@ from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @dataclass() class TestSequence: - model: Type[Model] + model: type[Model] route: str - object: Dict[str, Any] - ignored_fields: Tuple[str, ...] = () + object: dict[str, Any] + ignored_fields: tuple[str, ...] = () def url(self, detail: bool = False) -> str: return reverse(f'api:bot:{self.route}-{"detail" if detail else "list"}') -FK_FIELDS: Dict[Type[Model], Tuple[str, ...]] = { +FK_FIELDS: dict[type[Model], tuple[str, ...]] = { FilterList: (), Filter: ("filter_list",), } -def get_test_sequences() -> Dict[str, TestSequence]: +def get_test_sequences() -> dict[str, TestSequence]: filter_list1_deny_dict = { "name": "testname", "list_type": 0, diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index 95bafec0..34fae875 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -12,7 +12,7 @@ import rest_framework.test from django.urls import reverse from pydis_site import settings -from .. import github_utils +from pydis_site.apps.api import github_utils class GeneralUtilityTests(unittest.TestCase): @@ -39,7 +39,8 @@ class GeneralUtilityTests(unittest.TestCase): delta = datetime.timedelta(minutes=10) self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) - self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) + then = datetime.datetime.now(tz=datetime.timezone.utc) + delta + self.assertLess(decoded["exp"], then.timestamp()) class CheckRunTests(unittest.TestCase): @@ -50,7 +51,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + "created_at": datetime.datetime.now(tz=datetime.timezone.utc).strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -74,7 +75,8 @@ class CheckRunTests(unittest.TestCase): # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion kwargs["created_at"] = ( - datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + datetime.datetime.now(tz=datetime.timezone.utc) + - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) with self.assertRaises(github_utils.RunTimeoutError): @@ -103,29 +105,26 @@ def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> htt "account": {"login": "VALID_OWNER"}, "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL" }]) - else: - return httpx.Response( - 401, json={"error": "auth app/installations"}, request=request - ) + return httpx.Response( + 401, json={"error": "auth app/installations"}, request=request + ) - elif path == "/installation/repositories": + elif path == "/installation/repositories": # noqa: RET505 if auth == "bearer app access token": return httpx.Response(200, request=request, json={ "repositories": [{ "name": "VALID_REPO" }] }) - else: # pragma: no cover - return httpx.Response( - 401, json={"error": "auth installation/repositories"}, request=request - ) + return httpx.Response( # pragma: no cover + 401, json={"error": "auth installation/repositories"}, request=request + ) - elif request.method == "POST": + elif request.method == "POST": # noqa: RET505 if path == "/ACCESS_TOKEN_URL": if auth == "bearer JWT initial token": return httpx.Response(200, request=request, json={"token": "app access token"}) - else: # pragma: no cover - return httpx.Response(401, json={"error": "auth access_token"}, request=request) + return httpx.Response(401, json={"error": "auth access_token"}, request=request) # pragma: no cover # Reaching this point means something has gone wrong return httpx.Response(500, request=request) # pragma: no cover @@ -138,7 +137,7 @@ class AuthorizeTests(unittest.TestCase): def test_invalid_apps_auth(self): """Test that an exception is raised if authorization was attempted with an invalid token.""" - with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): + with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): # noqa: SIM117 with self.assertRaises(httpx.HTTPStatusError) as error: github_utils.authorize("VALID_OWNER", "VALID_REPO") @@ -179,7 +178,11 @@ class ArtifactFetcherTests(unittest.TestCase): run = github_utils.WorkflowRun( name="action_name", head_sha="action_sha", - created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + created_at=( + datetime.datetime + .now(tz=datetime.timezone.utc) + .strftime(settings.GITHUB_TIMESTAMP_FORMAT) + ), status="completed", conclusion="success", artifacts_url="artifacts_url" @@ -187,7 +190,7 @@ class ArtifactFetcherTests(unittest.TestCase): return httpx.Response( 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]} ) - elif path == "/artifact_url": + elif path == "/artifact_url": # noqa: RET505 return httpx.Response( 200, request=request, json={"artifacts": [{ "name": "artifact_name", diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index ceb5591b..71611ee9 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -8,8 +8,8 @@ from django.db.utils import IntegrityError from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, User -from ..serializers import InfractionSerializer +from pydis_site.apps.api.models import Infraction, User +from pydis_site.apps.api.serializers import InfractionSerializer class UnauthenticatedTests(AuthenticatedAPITestCase): @@ -152,8 +152,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_after(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_after': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -161,8 +161,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_before': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -185,11 +185,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_before_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=4) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=4) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?expires_before={target_time_late.isoformat()}' - f'&expires_after={target_time.isoformat()}' + url, + {'expires_before': target_time_late.isoformat(), + 'expires_after': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -198,11 +199,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_after_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=9) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=9) response = self.client.get( - f'{url}?expires_before={target_time.isoformat()}' - f'&expires_after={target_time_late.isoformat()}' + url, + {'expires_before': target_time.isoformat(), + 'expires_after': target_time_late.isoformat()}, ) self.assertEqual(response.status_code, 400) @@ -212,8 +214,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_after_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_after': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -221,8 +226,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_before': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -230,9 +238,10 @@ class InfractionTests(AuthenticatedAPITestCase): def test_nonpermanent_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?permanent=false&expires_before={target_time.isoformat()}' + url, + {'permanent': 'false', 'expires_before': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -522,39 +531,38 @@ class CreationTests(AuthenticatedAPITestCase): active_infraction_types = ('timeout', 'ban', 'superstar') for infraction_type in active_infraction_types: - with self.subTest(infraction_type=infraction_type): - with transaction.atomic(): - first_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take me on!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' - } + with self.subTest(infraction_type=infraction_type), transaction.atomic(): + first_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take me on!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + + # Post the first active infraction of a type and confirm it's accepted. + first_response = self.client.post(url, data=first_active_infraction) + self.assertEqual(first_response.status_code, 201) - # Post the first active infraction of a type and confirm it's accepted. - first_response = self.client.post(url, data=first_active_infraction) - self.assertEqual(first_response.status_code, 201) - - second_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take on me!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' + second_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take on me!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + second_response = self.client.post(url, data=second_active_infraction) + self.assertEqual(second_response.status_code, 400) + self.assertEqual( + second_response.json(), + { + 'non_field_errors': [ + 'This user already has an active infraction of this type.' + ] } - second_response = self.client.post(url, data=second_active_infraction) - self.assertEqual(second_response.status_code, 400) - self.assertEqual( - second_response.json(), - { - 'non_field_errors': [ - 'This user already has an active infraction of this type.' - ] - } - ) + ) def test_returns_201_for_second_active_infraction_of_different_type(self): """Test if the API accepts a second active infraction of a different type than the first.""" diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index d3341b35..1cca133d 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -118,7 +118,7 @@ class StringDunderMethodTests(SimpleTestCase): OffensiveMessage( id=602951077675139072, channel_id=291284109232308226, - delete_date=dt(3000, 1, 1) + delete_date=dt(3000, 1, 1, tzinfo=timezone.utc) ), OffTopicChannelName(name='bob-the-builders-playground'), Role( @@ -132,7 +132,7 @@ class StringDunderMethodTests(SimpleTestCase): name='shawn', discriminator=555, ), - creation=dt.utcnow() + creation=dt.now(tz=timezone.utc) ), User( id=5, diff --git a/pydis_site/apps/api/tests/test_nominations.py b/pydis_site/apps/api/tests/test_nominations.py index b3742cdd..ee6b1fbd 100644 --- a/pydis_site/apps/api/tests/test_nominations.py +++ b/pydis_site/apps/api/tests/test_nominations.py @@ -3,7 +3,7 @@ from datetime import datetime as dt, timedelta, timezone from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Nomination, NominationEntry, User +from pydis_site.apps.api.models import Nomination, NominationEntry, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_off_topic_channel_names.py b/pydis_site/apps/api/tests/test_off_topic_channel_names.py index 34098c92..315f707d 100644 --- a/pydis_site/apps/api/tests/test_off_topic_channel_names.py +++ b/pydis_site/apps/api/tests/test_off_topic_channel_names.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffTopicChannelName +from pydis_site.apps.api.models import OffTopicChannelName class UnauthenticatedTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_offensive_message.py b/pydis_site/apps/api/tests/test_offensive_message.py index 3cf95b75..53f9cb48 100644 --- a/pydis_site/apps/api/tests/test_offensive_message.py +++ b/pydis_site/apps/api/tests/test_offensive_message.py @@ -3,13 +3,13 @@ import datetime from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffensiveMessage +from pydis_site.apps.api.models import OffensiveMessage class CreationTests(AuthenticatedAPITestCase): def test_accept_valid_data(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -32,7 +32,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_non_future_date(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() - datetime.timedelta(days=1) + delete_at = datetime.datetime.now() - datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -46,7 +46,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_negative_id_or_channel_id(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -72,7 +72,7 @@ class CreationTests(AuthenticatedAPITestCase): class ListTests(AuthenticatedAPITestCase): @classmethod def setUpTestData(cls): - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 aware_delete_at = delete_at.replace(tzinfo=datetime.timezone.utc) cls.messages = [ diff --git a/pydis_site/apps/api/tests/test_reminders.py b/pydis_site/apps/api/tests/test_reminders.py index e17569f0..9bb5fe4d 100644 --- a/pydis_site/apps/api/tests/test_reminders.py +++ b/pydis_site/apps/api/tests/test_reminders.py @@ -4,7 +4,7 @@ from django.forms.models import model_to_dict from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Reminder, User +from pydis_site.apps.api.models import Reminder, User class UnauthedReminderAPITests(AuthenticatedAPITestCase): @@ -59,7 +59,7 @@ class ReminderCreationTests(AuthenticatedAPITestCase): data = { 'author': self.author.id, 'content': 'Remember to...wait what was it again?', - 'expiration': datetime.utcnow().isoformat(), + 'expiration': datetime.now(tz=timezone.utc).isoformat(), 'jump_url': "https://www.google.com", 'channel_id': 123, 'mentions': [8888, 9999], diff --git a/pydis_site/apps/api/tests/test_roles.py b/pydis_site/apps/api/tests/test_roles.py index 73c80c77..d3031990 100644 --- a/pydis_site/apps/api/tests/test_roles.py +++ b/pydis_site/apps/api/tests/test_roles.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Role, User +from pydis_site.apps.api.models import Role, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_rules.py b/pydis_site/apps/api/tests/test_rules.py index 3ee2d4e0..662fb8e9 100644 --- a/pydis_site/apps/api/tests/test_rules.py +++ b/pydis_site/apps/api/tests/test_rules.py @@ -5,7 +5,7 @@ from pathlib import Path from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..views import RulesView +from pydis_site.apps.api.views import RulesView class RuleAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index d86e80bb..cff4a825 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -4,9 +4,9 @@ from unittest.mock import Mock, patch from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, Role, User -from ..models.bot.metricity import NotFoundError -from ..viewsets.bot.user import UserListPagination +from pydis_site.apps.api.models import Infraction, Role, User +from pydis_site.apps.api.models.bot.metricity import NotFoundError +from pydis_site.apps.api.viewsets.bot.user import UserListPagination class UnauthedUserAPITests(AuthenticatedAPITestCase): @@ -469,18 +469,17 @@ class UserMetricityTests(AuthenticatedAPITestCase): with self.subTest( voice_infractions=case['voice_infractions'], voice_gate_blocked=case['voice_gate_blocked'] - ): - with patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: - p.return_value = case['voice_infractions'] - - url = reverse('api:bot:user-metricity-data', args=[0]) - response = self.client.get(url) - - self.assertEqual(response.status_code, 200) - self.assertEqual( - response.json()["voice_gate_blocked"], - case["voice_gate_blocked"] - ) + ), patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: + p.return_value = case['voice_infractions'] + + url = reverse('api:bot:user-metricity-data', args=[0]) + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.json()["voice_gate_blocked"], + case["voice_gate_blocked"] + ) def test_metricity_review_data(self): # Given diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index 8c46fcbc..a7ec6e38 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -3,8 +3,8 @@ from datetime import datetime, timezone from django.core.exceptions import ValidationError from django.test import TestCase -from ..models.bot.bot_setting import validate_bot_setting_name -from ..models.bot.offensive_message import future_date_validator +from pydis_site.apps.api.models.bot.bot_setting import validate_bot_setting_name +from pydis_site.apps.api.models.bot.offensive_message import future_date_validator REQUIRED_KEYS = ( diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index b1b7dc0f..32f41667 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -93,7 +93,7 @@ class RulesView(APIView): """ if target == 'html': return f'{description}' - elif target == 'md': + elif target == 'md': # noqa: RET505 return f'[{description}]({link})' else: raise ValueError( @@ -101,7 +101,7 @@ class RulesView(APIView): ) # `format` here is the result format, we have a link format here instead. - def get(self, request, format=None): # noqa: D102,ANN001,ANN201 + def get(self, request, format=None): # noqa: ANN001, ANN201 """ Returns a list of our community rules coupled with their keywords. diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index d6c2d18c..9c9e8338 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -1,10 +1,10 @@ from rest_framework.viewsets import ModelViewSet -from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.models.bot.filters import ( # - Preserving the filter order FilterList, Filter ) -from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.serializers import ( # - Preserving the filter order FilterListSerializer, FilterSerializer, ) diff --git a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py index d0519e86..1774004c 100644 --- a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py +++ b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py @@ -85,10 +85,9 @@ class OffTopicChannelNameViewSet(ModelViewSet): serializer.save() return Response(create_data, status=HTTP_201_CREATED) - else: - raise ParseError(detail={ - 'name': ["This query parameter is required."] - }) + raise ParseError(detail={ + 'name': ["This query parameter is required."] + }) def list(self, request: Request, *args, **kwargs) -> Response: """ diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index db73a83c..88fa3415 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -1,4 +1,3 @@ -import typing from collections import OrderedDict from django.db.models import Q @@ -24,14 +23,14 @@ class UserListPagination(PageNumberPagination): page_size = 2500 page_size_query_param = "page_size" - def get_next_page_number(self) -> typing.Optional[int]: + def get_next_page_number(self) -> int | None: """Get the next page number.""" if not self.page.has_next(): return None page_number = self.page.next_page_number() return page_number - def get_previous_page_number(self) -> typing.Optional[int]: + def get_previous_page_number(self) -> int | None: """Get the previous page number.""" if not self.page.has_previous(): return None diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py index 1a20d775..7c49902f 100644 --- a/pydis_site/apps/content/models/tag.py +++ b/pydis_site/apps/content/models/tag.py @@ -30,8 +30,7 @@ class Commit(models.Model): def lines(self) -> collections.abc.Iterable[str]: """Return each line in the commit message.""" - for line in self.message.split("\n"): - yield line + yield from self.message.split("\n") def format_authors(self) -> collections.abc.Iterable[str]: """Return a nice representation of the author(s)' name and email.""" diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md index f6f8a5f2..b634f513 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md @@ -4,7 +4,7 @@ description: A guide for linting and setting up pre-commit. --- Your commit will be rejected by the build server if it fails to lint. -On most of our projects, we use `flake8` and `pre-commit` to ensure that the code style is consistent across the code base. +On most of our projects, we use `ruff` and `pre-commit` to ensure that the code style is consistent across the code base. `pre-commit` is a powerful tool that helps you automatically lint before you commit. If the linter complains, the commit is aborted so that you can fix the linting errors before committing again. diff --git a/pydis_site/apps/content/tests/helpers.py b/pydis_site/apps/content/tests/helpers.py index fad91050..0e7562e8 100644 --- a/pydis_site/apps/content/tests/helpers.py +++ b/pydis_site/apps/content/tests/helpers.py @@ -62,19 +62,19 @@ class MockPagesTestCase(TestCase): ├── not_a_page.md ├── tmp.md ├── tmp - |   ├── _info.yml - |   └── category - |    ├── _info.yml - |      └── subcategory_without_info + | ├── _info.yml + | └── category + | ├── _info.yml + | └── subcategory_without_info └── category -    ├── _info.yml -    ├── with_metadata.md -    └── subcategory -    ├── with_metadata.md -       └── without_metadata.md + ├── _info.yml + ├── with_metadata.md + └── subcategory + ├── with_metadata.md + └── without_metadata.md """ - def setUp(self): + def setUp(self) -> None: """Create the fake filesystem.""" Path(f"{BASE_PATH}/_info.yml").write_text(CATEGORY_INFO) Path(f"{BASE_PATH}/root.md").write_text(MARKDOWN_WITH_METADATA) diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py index a7695a27..baae154d 100644 --- a/pydis_site/apps/content/urls.py +++ b/pydis_site/apps/content/urls.py @@ -8,7 +8,7 @@ from . import utils, views app_name = "content" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and markdown files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index c12893ef..347640dd 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -151,8 +151,11 @@ def set_tag_commit(tag: Tag) -> None: commit = data["commit"] author, committer = commit["author"], commit["committer"] - date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) - date = date.replace(tzinfo=datetime.timezone.utc) + date = ( + datetime.datetime + .strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) if author["email"] == committer["email"]: authors = [author] @@ -212,9 +215,8 @@ def get_tags() -> list[Tag]: record_tags(tags) return tags - else: - # Get tags from database - return list(Tag.objects.all()) + + return list(Tag.objects.all()) def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: @@ -242,13 +244,13 @@ def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: if tag.last_commit is None and not skip_sync: set_tag_commit(tag) return tag - elif tag.group == name and group is None: + elif tag.group == name and group is None: # noqa: RET505 matches.append(tag) if matches: return matches - raise Tag.DoesNotExist() + raise Tag.DoesNotExist def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]: diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py index 4f4bb5a2..8d3e3321 100644 --- a/pydis_site/apps/content/views/tags.py +++ b/pydis_site/apps/content/views/tags.py @@ -1,5 +1,4 @@ import re -import typing import frontmatter import markdown @@ -22,7 +21,7 @@ COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w-]+)(?P [\w-]+)?`*") class TagView(TemplateView): """Handles tag pages.""" - tag: typing.Union[Tag, list[Tag]] + tag: Tag | list[Tag] is_group: bool def setup(self, *args, **kwargs) -> None: diff --git a/pydis_site/apps/events/urls.py b/pydis_site/apps/events/urls.py index 7ea65a31..6121d264 100644 --- a/pydis_site/apps/events/urls.py +++ b/pydis_site/apps/events/urls.py @@ -8,7 +8,7 @@ from pydis_site.apps.events.views import IndexView, PageView app_name = "events" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and HTML files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/events/views/page.py b/pydis_site/apps/events/views/page.py index 1622ad70..adf9e952 100644 --- a/pydis_site/apps/events/views/page.py +++ b/pydis_site/apps/events/views/page.py @@ -1,4 +1,3 @@ -from typing import List from django.conf import settings from django.http import Http404 @@ -8,7 +7,7 @@ from django.views.generic import TemplateView class PageView(TemplateView): """Handles event pages showing.""" - def get_template_names(self) -> List[str]: + def get_template_names(self) -> list[str]: """Get specific template names.""" path: str = self.kwargs['path'] page_path = settings.EVENTS_PAGES_PATH / path diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index a963f733..acf4a817 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -22,7 +22,7 @@ def mocked_requests_get(*args, **kwargs) -> "MockResponse": # noqa: F821 if args[0] == HomeView.github_api: json_path = Path(__file__).resolve().parent / "mock_github_api_response.json" - with open(json_path, 'r') as json_file: + with open(json_path) as json_file: mock_data = json.load(json_file) return MockResponse(mock_data, 200) diff --git a/pydis_site/apps/home/views.py b/pydis_site/apps/home/views.py index 8a165682..bfa9e02d 100644 --- a/pydis_site/apps/home/views.py +++ b/pydis_site/apps/home/views.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, List import httpx from django.core.handlers.wsgi import WSGIRequest @@ -45,7 +44,7 @@ class HomeView(View): else: self.headers = {} - def _get_api_data(self) -> Dict[str, Dict[str, str]]: + def _get_api_data(self) -> dict[str, dict[str, str]]: """ Call the GitHub API and get information about our repos. @@ -54,7 +53,7 @@ class HomeView(View): repo_dict = {} try: # Fetch the data from the GitHub API - api_data: List[dict] = httpx.get( + api_data: list[dict] = httpx.get( self.github_api, headers=self.headers, timeout=settings.TIMEOUT_PERIOD @@ -89,7 +88,7 @@ class HomeView(View): return repo_dict - def _get_repo_data(self) -> List[RepositoryMetadata]: + def _get_repo_data(self) -> list[RepositoryMetadata]: """Build a list of RepositoryMetadata objects that we can use to populate the front page.""" # First off, load the timestamp of the least recently updated entry. if settings.STATIC_BUILD: @@ -121,8 +120,7 @@ class HomeView(View): if settings.STATIC_BUILD: return data - else: - return RepositoryMetadata.objects.bulk_create(data) + return RepositoryMetadata.objects.bulk_create(data) # If the data is stale, we should refresh it. if (timezone.now() - last_update).seconds > self.repository_cache_ttl: @@ -149,8 +147,7 @@ class HomeView(View): return database_repositories # Otherwise, if the data is fresher than 2 minutes old, we should just return it. - else: - return RepositoryMetadata.objects.all() + return RepositoryMetadata.objects.all() def get(self, request: WSGIRequest) -> HttpResponse: """Collect repo data and render the homepage view.""" diff --git a/pydis_site/apps/redirect/urls.py b/pydis_site/apps/redirect/urls.py index 067cccc3..a221ea12 100644 --- a/pydis_site/apps/redirect/urls.py +++ b/pydis_site/apps/redirect/urls.py @@ -83,22 +83,21 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]: return paths + redirect_path_name = "pages" if new_app_name == "content" else new_app_name + if len(data.redirect_arguments) > 0: + redirect_arg = data.redirect_arguments[0] else: - redirect_path_name = "pages" if new_app_name == "content" else new_app_name - if len(data.redirect_arguments) > 0: - redirect_arg = data.redirect_arguments[0] - else: - redirect_arg = "resources/" - new_redirect = f"/{redirect_path_name}/{redirect_arg}" + redirect_arg = "resources/" + new_redirect = f"/{redirect_path_name}/{redirect_arg}" - if new_redirect == "/resources/resources/": - new_redirect = "/resources/" + if new_redirect == "/resources/resources/": + new_redirect = "/resources/" - return [distill_path( - data.original_path, - lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), - name=name, - )] + return [distill_path( + data.original_path, + lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), + name=name, + )] urlpatterns = [] diff --git a/pydis_site/apps/redirect/views.py b/pydis_site/apps/redirect/views.py index 21180cdf..374daf2b 100644 --- a/pydis_site/apps/redirect/views.py +++ b/pydis_site/apps/redirect/views.py @@ -1,4 +1,3 @@ -import typing as t from django.views.generic import RedirectView @@ -15,7 +14,7 @@ class CustomRedirectView(RedirectView): """Overwrites original as_view to add static args.""" return super().as_view(**initkwargs) - def get_redirect_url(self, *args, **kwargs) -> t.Optional[str]: + def get_redirect_url(self, *args, **kwargs) -> str | None: """Extends default behaviour to use static args.""" args = self.static_args + args + tuple(kwargs.values()) if self.prefix_redirect: diff --git a/pydis_site/apps/resources/views.py b/pydis_site/apps/resources/views.py index 2375f722..a2cd8d0c 100644 --- a/pydis_site/apps/resources/views.py +++ b/pydis_site/apps/resources/views.py @@ -1,5 +1,4 @@ import json -import typing as t from pathlib import Path import yaml @@ -22,7 +21,7 @@ class ResourceView(View): """Sort a tuple by its key alphabetically, disregarding 'the' as a prefix.""" name, resource = tuple_ name = name.casefold() - if name.startswith("the ") or name.startswith("the_"): + if name.startswith(("the ", "the_")): return name[4:] return name @@ -48,7 +47,7 @@ class ResourceView(View): } for resource_name, resource in self.resources.items(): css_classes = [] - for tag_type in resource_tags.keys(): + for tag_type in resource_tags: # Store the tags into `resource_tags` tags = resource.get("tags", {}).get(tag_type, []) for tag in tags: @@ -102,7 +101,7 @@ class ResourceView(View): "difficulty": [to_kebabcase(tier) for tier in self.filters["Difficulty"]["filters"]], } - def get(self, request: WSGIRequest, resource_type: t.Optional[str] = None) -> HttpResponse: + def get(self, request: WSGIRequest, resource_type: str | None = None) -> HttpResponse: """List out all the resources, and any filtering options from the URL.""" # Add type filtering if the request is made to somewhere like /resources/video. # We also convert all spaces to dashes, so they'll correspond with the filters. diff --git a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py index 9d8f1819..c6638a3b 100644 --- a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py +++ b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Union from django import template @@ -7,7 +6,7 @@ register = template.Library() @register.filter -def hex_colour(colour: Union[str, int]) -> str: +def hex_colour(colour: str | int) -> str: """ Converts the given representation of a colour to its RGB hex string. diff --git a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py index 31215784..5e49f103 100644 --- a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py +++ b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py @@ -3,7 +3,7 @@ import enum from django.test import TestCase from django.utils import timezone -from ..templatetags import deletedmessage_filters +from pydis_site.apps.staff.templatetags import deletedmessage_filters class Colour(enum.IntEnum): diff --git a/pyproject.toml b/pyproject.toml index 40461f0b..9019efb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,18 +29,9 @@ whitenoise = "6.4.0" [tool.poetry.group.dev.dependencies] python-dotenv = "1.0.0" taskipy = "1.10.4" +ruff = "^0.0.265" [tool.poetry.group.lint.dependencies] -flake8 = "6.0.0" -flake8-annotations = "3.0.1" -flake8-bandit = "4.1.1" -flake8-bugbear = "23.5.9" -flake8-docstrings = "1.7.0" -flake8-import-order = "0.18.2" -flake8-tidy-imports = "4.8.0" -flake8-string-format = "0.3.0" -flake8-todo = "0.7" -pep8-naming = "0.13.3" pre-commit = "3.3.1" [tool.poetry.group.test.dependencies] @@ -50,6 +41,34 @@ coverage = "7.2.5" requires = ["poetry-core>=1.2.0"] build-backend = "poetry.core.masonry.api" +[tool.ruff] +target-version = "py310" +extend-exclude = [".cache"] +ignore = [ + "ANN002", "ANN003", "ANN101", "ANN102", "ANN204", "ANN206", "ANN401", + "B904", + "C401", "C408", + "D100", "D104", "D105", "D107", "D203", "D212", "D214", "D215", "D301", + "D400", "D401", "D402", "D404", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D414", "D416", "D417", + "E731", + "RET504", + "RUF005", + "S311", + "SIM102", "SIM108", +] +line-length = 120 +select = ["ANN", "B", "C4", "D", "DTZ", "E", "F", "ISC", "INT", "N", "PGH", "PIE", "RET", "RSE", "RUF", "S", "SIM", "T20", "TID", "UP", "W"] + +[tool.ruff.per-file-ignores] +"pydis_site/apps/**/migrations/*.py" = ["ALL"] +"manage.py" = ["T201"] +"pydis_site/apps/api/tests/base.py" = ["S106"] +"pydis_site/apps/**/tests/test_*.py" = ["ANN", "D"] +"static-builds/netlify_build.py" = ["T201"] +"pydis_site/apps/api/tests/test_off_topic_channel_names.py" = ["RUF001"] +"gunicorn.conf.py" = ["ANN", "D"] +"pydis_site/apps/api/models/bot/off_topic_channel_name.py" = ["RUF001"] + [tool.taskipy.tasks] start = "python manage.py run --debug" makemigrations = "python manage.py makemigrations" diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 36520c28..2d311a11 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -14,15 +14,15 @@ from pathlib import Path from urllib import parse import httpx +import contextlib def raise_response(response: httpx.Response) -> None: """Raise an exception from a response if necessary.""" if response.status_code // 100 != 2: - try: + with contextlib.suppress(json.JSONDecodeError): print(response.json()) - except json.JSONDecodeError: - pass + response.raise_for_status() -- cgit v1.2.3 From 45855fe2e3906614490f3577cb50e530423c35b7 Mon Sep 17 00:00:00 2001 From: jchristgit Date: Wed, 2 Aug 2023 11:06:02 +0200 Subject: Capitalize Python outside of tags (#1046) Related to #1044. Co-authored-by: Xithrius <15021300+Xithrius@users.noreply.github.com> --- .../pydis-guides/contributing/bot-extended-configuration-options.md | 2 +- .../content/resources/guides/python-guides/docker-hosting-guide.md | 2 +- pydis_site/templates/events/pages/code-jams/8/_index.html | 4 ++-- pydis_site/templates/events/pages/code-jams/8/frameworks.html | 2 +- pydis_site/templates/events/pages/code-jams/9/_index.html | 2 +- static-builds/README.md | 2 +- static-builds/netlify_build.py | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) (limited to 'static-builds/netlify_build.py') diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot-extended-configuration-options.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot-extended-configuration-options.md index a35d79a9..f5425d88 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot-extended-configuration-options.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot-extended-configuration-options.md @@ -73,7 +73,7 @@ If you wish to set all values in your `env.server` for your testing server, you **Note**: This is only required when you're not configuring the bot [automatically](#automatic-configuration) -If you will be working on a feature that includes the python help forum, you will need to use `Forum Channels`. +If you will be working on a feature that includes the Python help forum, you will need to use `Forum Channels`. Forum channels cannot be included in a template, which is why this needs to be done by hand for the time being. diff --git a/pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md b/pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md index 57d86e99..7af6a0bb 100644 --- a/pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md +++ b/pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md @@ -26,7 +26,7 @@ description: This guide shows how to host a bot with Docker and GitHub Actions o ## Introduction -Let's say you have got a nice discord bot written in python and you have a VPS to host it on. Now the only question is +Let's say you have got a nice discord bot written in Python and you have a VPS to host it on. Now the only question is how to run it 24/7. You might have been suggested to use *screen multiplexer*, but it has some disadvantages: 1. Every time you update the bot you have to SSH to your server, attach to screen, shutdown the bot, run `git pull` and diff --git a/pydis_site/templates/events/pages/code-jams/8/_index.html b/pydis_site/templates/events/pages/code-jams/8/_index.html index 628a2c22..74e0ad4a 100644 --- a/pydis_site/templates/events/pages/code-jams/8/_index.html +++ b/pydis_site/templates/events/pages/code-jams/8/_index.html @@ -26,7 +26,7 @@

Perceptive Porcupines: WTPython!?

VV, Poppinawhile, ethansocal, Jeff Z, Cohan, ¯\_(ツ)_/¯

- What the Python (wtpython) is a simple terminal user interface that allows you to explore relevant answers on Stackoverflow without leaving your terminal or IDE. When you get an error, all you have to do is swap python for wtpython. When your code hits an error, you'll see a textual interface for exploring relevant answers allowing you to stay focused and ship faster! + What the Python (wtpython) is a simple terminal user interface that allows you to explore relevant answers on Stackoverflow without leaving your terminal or IDE. When you get an error, all you have to do is swap Python for wtpython. When your code hits an error, you'll see a textual interface for exploring relevant answers allowing you to stay focused and ship faster!

Demo video @@ -89,7 +89,7 @@

The Qualifier

The qualifier is a coding challenge that you are required to complete before registering for the code jam. - This is meant as a basic assessment of your skills to ensure you have enough python knowledge to effectively contribute in a team environment. + This is meant as a basic assessment of your skills to ensure you have enough Python knowledge to effectively contribute in a team environment.

View the Qualifier

diff --git a/pydis_site/templates/events/pages/code-jams/8/frameworks.html b/pydis_site/templates/events/pages/code-jams/8/frameworks.html index 1c02e38a..d8fbe96f 100644 --- a/pydis_site/templates/events/pages/code-jams/8/frameworks.html +++ b/pydis_site/templates/events/pages/code-jams/8/frameworks.html @@ -103,7 +103,7 @@

  • Documentation Link
  • Supports: Linux, Mac, and Windows
  • -
  • Pure python library
  • +
  • Pure Python library
diff --git a/pydis_site/templates/events/pages/code-jams/9/_index.html b/pydis_site/templates/events/pages/code-jams/9/_index.html index 3e60c387..5c094b22 100644 --- a/pydis_site/templates/events/pages/code-jams/9/_index.html +++ b/pydis_site/templates/events/pages/code-jams/9/_index.html @@ -75,7 +75,7 @@

The Qualifier

The qualifier is a coding challenge that you are required to complete before registering for the code jam. - This is meant as a basic assessment of your skills to ensure you have enough python knowledge to effectively contribute in a team environment. + This is meant as a basic assessment of your skills to ensure you have enough Python knowledge to effectively contribute in a team environment.

View the Qualifier

diff --git a/static-builds/README.md b/static-builds/README.md index a3c7962b..afd34bb0 100644 --- a/static-builds/README.md +++ b/static-builds/README.md @@ -42,7 +42,7 @@ Publish Directory: | Name | Value | Description | |----------------|--------------------------------|-------------------------------------------------------------------------------------------| -| PYTHON_VERSION | 3.8 | The python version. Supported options are defined by netlify [here][netlify build image]. | +| PYTHON_VERSION | 3.8 | The Python version. Supported options are defined by netlify [here][netlify build image]. | | API_URL | https://pythondiscord.com/ | The link to the API, which will be used to fetch the build artifacts. | | ACTION_NAME | Build & Publish Static Preview | The name of the workflow which will be used to find the artifact. | | ARTIFACT_NAME | static-build | The name of the artifact to download. | diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 2d311a11..4d4a613d 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -1,6 +1,6 @@ """Build script to deploy project on netlify.""" -# WARNING: This file must remain compatible with python 3.8 +# WARNING: This file must remain compatible with Python 3.8 # This script performs all the actions required to build and deploy our project on netlify # It depends on the following packages, which are set in the netlify UI: -- cgit v1.2.3