From d50028d6b92909a39139007f0f3bcd7c90a88420 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sat, 13 Aug 2022 06:08:22 +0200 Subject: Add Tags To Content Listings Adds bot tags to the content page, as well as a model to go along with it. Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/utils.py | 125 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 120 insertions(+), 5 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index d3f270ff..a4252284 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -1,14 +1,26 @@ +import datetime +import functools +import tarfile +import tempfile +from io import BytesIO from pathlib import Path -from typing import Dict, Tuple import frontmatter +import httpx import markdown import yaml from django.http import Http404 +from django.utils import timezone from markdown.extensions.toc import TocExtension +from pydis_site import settings +from .models.tag import Tag -def get_category(path: Path) -> Dict[str, str]: +TAG_URL_BASE = "https://github.com/python-discord/bot/tree/main/bot/resources/tags" +TAG_CACHE_TTL = datetime.timedelta(hours=1) + + +def get_category(path: Path) -> dict[str, str]: """Load category information by name from _info.yml.""" if not path.is_dir(): raise Http404("Category not found.") @@ -16,7 +28,7 @@ def get_category(path: Path) -> Dict[str, str]: return yaml.safe_load(path.joinpath("_info.yml").read_text(encoding="utf-8")) -def get_categories(path: Path) -> Dict[str, Dict]: +def get_categories(path: Path) -> dict[str, dict]: """Get information for all categories.""" categories = {} @@ -27,8 +39,111 @@ def get_categories(path: Path) -> Dict[str, Dict]: return categories -def get_category_pages(path: Path) -> Dict[str, Dict]: +@functools.cache +def get_tags_static() -> list[Tag]: + """ + Fetch tag information in static builds. + + This will return a cached value, so it should only be used for static builds. + """ + return fetch_tags() + + +def fetch_tags() -> list[Tag]: + """ + Fetch tag data from the GitHub API. + + The entire repository is downloaded and extracted locally because + getting file content would require one request per file, and can get rate-limited. + """ + if settings.GITHUB_TOKEN: + headers = {"Authorization": f"token {settings.GITHUB_TOKEN}"} + else: + headers = {} + + tar_file = httpx.get( + f"{settings.GITHUB_API}/repos/python-discord/bot/tarball", + follow_redirects=True, + timeout=settings.TIMEOUT_PERIOD, + headers=headers, + ) + tar_file.raise_for_status() + + tags = [] + with tempfile.TemporaryDirectory() as folder: + with tarfile.open(fileobj=BytesIO(tar_file.content)) as repo: + included = [] + for file in repo.getmembers(): + if "/bot/resources/tags" in file.path: + included.append(file) + repo.extractall(folder, included) + + for tag_file in Path(folder).rglob("*.md"): + tags.append(Tag( + name=tag_file.name.removesuffix(".md"), + body=tag_file.read_text(encoding="utf-8"), + url=f"{TAG_URL_BASE}/{tag_file.name}" + )) + + return tags + + +def get_tags() -> list[Tag]: + """Return a list of all tags visible to the application, from the cache or API.""" + if settings.STATIC_BUILD: + last_update = None + else: + last_update = ( + Tag.objects.values_list("last_updated", flat=True) + .order_by("last_updated").first() + ) + + if last_update is None or timezone.now() >= (last_update + TAG_CACHE_TTL): + # Stale or empty cache + if settings.STATIC_BUILD: + tags = get_tags_static() + else: + tags = fetch_tags() + Tag.objects.exclude(name__in=[tag.name for tag in tags]).delete() + for tag in tags: + tag.save() + + return tags + else: + # Get tags from database + return Tag.objects.all() + + +def get_tag(name: str) -> Tag: + """Return a tag by name.""" + tags = get_tags() + for tag in tags: + if tag.name == name: + return tag + + raise Tag.DoesNotExist() + + +def get_category_pages(path: Path) -> dict[str, dict]: """Get all page names and their metadata at a category path.""" + # Special handling for tags + if path == Path(__file__).parent / "resources/tags": + tags = {} + for tag in get_tags(): + content = frontmatter.parse(tag.body)[1] + if len(content) > 100: + # Trim the preview to a maximum of 100 visible characters + # This causes some markdown to break, but we ignore that + content = content[:100] + "..." + + tags[tag.name] = { + "title": tag.name, + "description": markdown.markdown(content), + "icon": "fas fa-tag" + } + + return {name: tags[name] for name in sorted(tags)} + pages = {} for item in path.glob("*.md"): @@ -39,7 +154,7 @@ def get_category_pages(path: Path) -> Dict[str, Dict]: return pages -def get_page(path: Path) -> Tuple[str, Dict]: +def get_page(path: Path) -> tuple[str, dict]: """Get one specific page.""" if not path.is_file(): raise Http404("Page not found.") -- cgit v1.2.3 From 733d5c084b1cba91a76f495b37ef0a391e5f9900 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sat, 13 Aug 2022 06:43:05 +0200 Subject: Export Tag Model As Top Level Model Object Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/models/__init__.py | 3 +++ pydis_site/apps/content/utils.py | 2 +- pydis_site/apps/content/views/tags.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/models/__init__.py b/pydis_site/apps/content/models/__init__.py index e69de29b..2718ce94 100644 --- a/pydis_site/apps/content/models/__init__.py +++ b/pydis_site/apps/content/models/__init__.py @@ -0,0 +1,3 @@ +from .tag import Tag + +__all__ = ["Tag"] diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index a4252284..de609596 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -14,7 +14,7 @@ from django.utils import timezone from markdown.extensions.toc import TocExtension from pydis_site import settings -from .models.tag import Tag +from .models import Tag TAG_URL_BASE = "https://github.com/python-discord/bot/tree/main/bot/resources/tags" TAG_CACHE_TTL = datetime.timedelta(hours=1) diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py index 12e311dc..e2cfb488 100644 --- a/pydis_site/apps/content/views/tags.py +++ b/pydis_site/apps/content/views/tags.py @@ -8,7 +8,7 @@ from django.urls import reverse from django.views.generic import TemplateView from pydis_site.apps.content import utils -from pydis_site.apps.content.models.tag import Tag +from pydis_site.apps.content.models import Tag COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w\d-]+)`*") -- cgit v1.2.3 From a8fd8b823748ced96bb8ea34e8dcd8bd0dd57671 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sat, 13 Aug 2022 08:47:29 +0200 Subject: Add Tag View Tests Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/tests/test_utils.py | 103 +++++++++++++++++++++- pydis_site/apps/content/tests/test_views.py | 128 ++++++++++++++++++++++++++++ pydis_site/apps/content/utils.py | 6 +- 3 files changed, 233 insertions(+), 4 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index be5ea897..89ef81c4 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -1,8 +1,16 @@ +import tarfile +import tempfile +import textwrap from pathlib import Path +from unittest import mock +import httpx +import markdown from django.http import Http404 +from django.test import TestCase -from pydis_site.apps.content import utils +from pydis_site import settings +from pydis_site.apps.content import models, utils from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) @@ -96,3 +104,96 @@ class GetPageTests(MockPagesTestCase): def test_get_nonexistent_page_returns_404(self): with self.assertRaises(Http404): utils.get_page(Path(BASE_PATH, "invalid")) + + +class TagUtilsTests(TestCase): + """Tests for the tag-related utilities.""" + + @mock.patch.object(utils, "fetch_tags") + def test_static_fetch(self, fetch_mock: mock.Mock): + """Test that the static fetch function is only called at most once during static builds.""" + tags = [models.Tag(name="Name", body="body", url="url")] + fetch_mock.return_value = tags + result = utils.get_tags_static() + second_result = utils.get_tags_static() + + fetch_mock.assert_called_once() + self.assertEqual(tags, result) + self.assertEqual(tags, second_result) + + @mock.patch("httpx.get") + def test_mocked_fetch(self, get_mock: mock.Mock): + """Test that proper data is returned from fetch, but with a mocked API response.""" + bodies = ( + "This is the first tag!", + textwrap.dedent(""" + --- + frontmatter: empty + --- + This tag has frontmatter! + """), + ) + + # Generate a tar archive with a few tags + with tempfile.TemporaryDirectory() as tar_folder: + tar_folder = Path(tar_folder) + with tempfile.TemporaryDirectory() as folder: + folder = Path(folder) + (folder / "ignored_file.md").write_text("This is an ignored file.") + tags_folder = folder / "bot/resources/tags" + tags_folder.mkdir(parents=True) + + (tags_folder / "first_tag.md").write_text(bodies[0]) + (tags_folder / "second_tag.md").write_text(bodies[1]) + + with tarfile.open(tar_folder / "temp.tar", "w") as file: + file.add(folder, recursive=True) + + body = (tar_folder / "temp.tar").read_bytes() + + get_mock.return_value = httpx.Response( + status_code=200, + content=body, + request=httpx.Request("GET", "https://google.com"), + ) + + result = utils.fetch_tags() + self.assertEqual([ + models.Tag(name="first_tag", body=bodies[0], url=f"{utils.TAG_URL_BASE}/first_tag.md"), + models.Tag(name="second_tag", body=bodies[1], url=f"{utils.TAG_URL_BASE}/first_tag.md"), + ], sorted(result, key=lambda tag: tag.name)) + + def test_get_real_tag(self): + """Test that a single tag is returned if it exists.""" + tag = models.Tag.objects.create(name="real-tag") + result = utils.get_tag("real-tag") + + self.assertEqual(tag, result) + + def test_get_tag_404(self): + """Test that an error is raised when we fetch a non-existing tag.""" + models.Tag.objects.create(name="real-tag") + with self.assertRaises(models.Tag.DoesNotExist): + utils.get_tag("fake") + + def test_category_pages(self): + """Test that the category pages function returns the correct records for tags.""" + models.Tag.objects.create(name="second-tag", body="Normal body") + models.Tag.objects.create(name="first-tag", body="Normal body") + tag_body = {"description": markdown.markdown("Normal body"), "icon": "fas fa-tag"} + + result = utils.get_category_pages(settings.CONTENT_PAGES_PATH / "tags") + self.assertDictEqual({ + "first-tag": {**tag_body, "title": "first-tag"}, + "second-tag": {**tag_body, "title": "second-tag"}, + }, result) + + def test_trimmed_tag_content(self): + """Test a tag with a long body that requires trimming.""" + tag = models.Tag.objects.create(name="long-tag", body="E" * 300) + result = utils.get_category_pages(settings.CONTENT_PAGES_PATH / "tags") + self.assertDictEqual({"long-tag": { + "title": "long-tag", + "description": markdown.markdown(tag.body[:100] + "..."), + "icon": "fas fa-tag", + }}, result) diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index eadad7e3..a5867260 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -1,9 +1,14 @@ +import textwrap from pathlib import Path from unittest import TestCase +import django.test +import markdown from django.http import Http404 from django.test import RequestFactory, SimpleTestCase, override_settings +from django.urls import reverse +from pydis_site.apps.content.models import Tag from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) @@ -180,3 +185,126 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase): {"name": PARSED_CATEGORY_INFO["title"], "path": Path("category/subcategory")}, ] ) + + +class TagViewTests(django.test.TestCase): + """Tests for the TagView class.""" + + def setUp(self): + """Set test helpers, then set up fake filesystem.""" + super().setUp() + + def test_valid_tag_returns_200(self): + """Test that a page is returned for a valid tag.""" + Tag.objects.create(name="example", body="This is the tag body.", url="URL") + response = self.client.get("/pages/tags/example/") + self.assertEqual(200, response.status_code) + self.assertIn("This is the tag body", response.content.decode("utf-8")) + self.assertTemplateUsed(response, "content/tag.html") + + def test_invalid_tag_404(self): + """Test that a tag which doesn't exist raises a 404.""" + response = self.client.get("/pages/tags/non-existent/") + self.assertEqual(404, response.status_code) + + def test_context(self): + """Check that the context contains all the necessary data.""" + body = textwrap.dedent(""" + --- + unused: frontmatter + ---- + Tag content here. + """) + + tag = Tag.objects.create(name="example", body=body, url="URL") + response = self.client.get("/pages/tags/example/") + expected = { + "page_title": "example", + "page": markdown.markdown("Tag content here."), + "tag": tag, + } + for key in expected: + self.assertEqual( + expected[key], response.context.get(key), f"context.{key} did not match" + ) + + def test_markdown(self): + """Test that markdown content is rendered properly.""" + body = textwrap.dedent(""" + ```py + Hello world! + ``` + + **This text is in bold** + """) + + Tag.objects.create(name="example", body=body, url="URL") + response = self.client.get("/pages/tags/example/") + content = response.content.decode("utf-8") + + self.assertInHTML('Hello world!', content) + self.assertInHTML("This text is in bold", content) + + def test_embed(self): + """Test that an embed from the frontmatter is treated correctly.""" + body = textwrap.dedent(""" + --- + embed: + title: Embed title + image: + url: https://google.com + --- + Tag body. + """) + + Tag.objects.create(name="example", body=body, url="URL") + response = self.client.get("/pages/tags/example/") + content = response.content.decode("utf-8") + + self.assertInHTML('Embed title', content) + self.assertInHTML("

Tag body.

", content) + + def test_embed_title(self): + """Test that the page title gets set to the embed title.""" + body = textwrap.dedent(""" + --- + embed: + title: Embed title + --- + """) + + Tag.objects.create(name="example", body=body, url="URL") + response = self.client.get("/pages/tags/example/") + self.assertEqual( + "Embed title", + response.context.get("page_title"), + "The page title must match the embed title." + ) + + def test_hyperlinked_item(self): + """Test hyperlinking of tags works as intended.""" + filler_before, filler_after = "empty filler text\n\n", "more\nfiller" + body = filler_before + "`!tags return`" + filler_after + Tag.objects.create(name="example", body=body, url="URL") + + other_url = reverse("content:tag", kwargs={"name": "return"}) + response = self.client.get("/pages/tags/example/") + self.assertEqual( + markdown.markdown(filler_before + f"[`!tags return`]({other_url})" + filler_after), + response.context.get("page") + ) + + def test_tag_root_page(self): + """Test the root tag page which lists all tags.""" + Tag.objects.create(name="tag-1") + Tag.objects.create(name="tag-2") + Tag.objects.create(name="tag-3") + + response = self.client.get("/pages/tags/") + content = response.content.decode("utf-8") + + self.assertTemplateUsed(response, "content/listing.html") + self.assertInHTML('

Tags

', content) + + for tag_number in range(1, 4): + self.assertIn(f"tag-{tag_number}", content) diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index de609596..76437593 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -56,7 +56,7 @@ def fetch_tags() -> list[Tag]: The entire repository is downloaded and extracted locally because getting file content would require one request per file, and can get rate-limited. """ - if settings.GITHUB_TOKEN: + if settings.GITHUB_TOKEN: # pragma: no cover headers = {"Authorization": f"token {settings.GITHUB_TOKEN}"} else: headers = {} @@ -90,7 +90,7 @@ def fetch_tags() -> list[Tag]: def get_tags() -> list[Tag]: """Return a list of all tags visible to the application, from the cache or API.""" - if settings.STATIC_BUILD: + if settings.STATIC_BUILD: # pragma: no cover last_update = None else: last_update = ( @@ -100,7 +100,7 @@ def get_tags() -> list[Tag]: if last_update is None or timezone.now() >= (last_update + TAG_CACHE_TTL): # Stale or empty cache - if settings.STATIC_BUILD: + if settings.STATIC_BUILD: # pragma: no cover tags = get_tags_static() else: tags = fetch_tags() -- cgit v1.2.3 From f2ad3eed8ef8872713666f69ec783f59006d3d81 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sat, 13 Aug 2022 22:53:50 +0200 Subject: Improve Tag Cropping Move the tag cropping logic to the frontend, which makes it easier to crop without crossing boundaries such as link or code block boundaries. Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/utils.py | 6 +----- pydis_site/static/js/content/listing.js | 36 +++++++++++++++++++++++++++++++ pydis_site/templates/content/listing.html | 4 +++- 3 files changed, 40 insertions(+), 6 deletions(-) create mode 100644 pydis_site/static/js/content/listing.js (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 76437593..cc08f81f 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -131,14 +131,10 @@ def get_category_pages(path: Path) -> dict[str, dict]: tags = {} for tag in get_tags(): content = frontmatter.parse(tag.body)[1] - if len(content) > 100: - # Trim the preview to a maximum of 100 visible characters - # This causes some markdown to break, but we ignore that - content = content[:100] + "..." tags[tag.name] = { "title": tag.name, - "description": markdown.markdown(content), + "description": markdown.markdown(content, extensions=["pymdownx.superfences"]), "icon": "fas fa-tag" } diff --git a/pydis_site/static/js/content/listing.js b/pydis_site/static/js/content/listing.js new file mode 100644 index 00000000..3502cb2a --- /dev/null +++ b/pydis_site/static/js/content/listing.js @@ -0,0 +1,36 @@ +/** + * Trim a tag listing to only show a few lines of content. + */ +function trimTag() { + const containers = document.getElementsByClassName("tag-container"); + for (const container of containers) { + // Remove every element after the first two paragraphs + while (container.children.length > 2) { + container.removeChild(container.lastChild); + } + + // Trim down the elements if they are too long + const containerLength = container.textContent.length; + if (containerLength > 300) { + if (containerLength - container.firstChild.textContent.length > 300) { + // The first element alone takes up more than 300 characters + container.removeChild(container.lastChild); + } + + let last = container.lastChild.lastChild; + while (container.textContent.length > 300 && container.lastChild.childNodes.length > 0) { + last = container.lastChild.lastChild; + last.remove(); + } + + if (container.textContent.length > 300 && (last instanceof HTMLElement && last.tagName !== "CODE")) { + // Add back the final element (up to a period if possible) + const stop = last.textContent.indexOf("."); + last.textContent = last.textContent.slice(0, stop > 0 ? stop + 1: null); + container.lastChild.appendChild(last); + } + } + } +} + +trimTag(); diff --git a/pydis_site/templates/content/listing.html b/pydis_site/templates/content/listing.html index eeb6b5e2..098f4237 100644 --- a/pydis_site/templates/content/listing.html +++ b/pydis_site/templates/content/listing.html @@ -1,5 +1,6 @@ {# Base navigation screen for resources #} {% extends 'content/base.html' %} +{% load static %} {% block page_content %} {# Nested Categories #} @@ -26,10 +27,11 @@ {{ data.title }} {% if "tags" in location %} -

{{ data.description | safe }}

+
{{ data.description | safe }}
{% else %}

{{ data.description }}

{% endif %} {% endfor %} + {% endblock %} -- cgit v1.2.3 From 45cdb27a82297ede18d7bd908213dde54fef06a9 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 14 Aug 2022 05:34:27 +0200 Subject: Add Tag Group Support Adds support for tag groups in content. This involves some modification to the routing, and templating. Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/urls.py | 12 ++- pydis_site/apps/content/utils.py | 101 ++++++++++++++++++++----- pydis_site/apps/content/views/page_category.py | 5 +- pydis_site/apps/content/views/tags.py | 79 +++++++++++++++---- pydis_site/static/css/content/color.css | 7 ++ pydis_site/static/css/content/tag.css | 8 -- pydis_site/static/js/content/listing.js | 5 ++ pydis_site/templates/content/listing.html | 17 ++++- pydis_site/templates/content/tag.html | 5 +- 9 files changed, 187 insertions(+), 52 deletions(-) create mode 100644 pydis_site/static/css/content/color.css (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py index b4ffc07d..03c0015a 100644 --- a/pydis_site/apps/content/urls.py +++ b/pydis_site/apps/content/urls.py @@ -39,15 +39,21 @@ def get_all_pages() -> DISTILL_RETURN: def get_all_tags() -> DISTILL_RETURN: - """Return all tag names in the repository in static builds.""" + """Return all tag names and groups in static builds.""" + groups = {None} for tag in utils.get_tags_static(): - yield {"name": tag.name} + groups.add(tag.group) + yield {"location": (f"{tag.group}/" if tag.group else "") + tag.name} + + groups.remove(None) + for group in groups: + yield {"location": group} urlpatterns = [ distill_path("", views.PageOrCategoryView.as_view(), name='pages'), distill_path( - "tags//", + "tags//", views.TagView.as_view(), name="tag", distill_func=get_all_tags diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index cc08f81f..da6a024d 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -2,6 +2,7 @@ import datetime import functools import tarfile import tempfile +import typing from io import BytesIO from pathlib import Path @@ -16,7 +17,6 @@ from markdown.extensions.toc import TocExtension from pydis_site import settings from .models import Tag -TAG_URL_BASE = "https://github.com/python-discord/bot/tree/main/bot/resources/tags" TAG_CACHE_TTL = datetime.timedelta(hours=1) @@ -44,9 +44,13 @@ def get_tags_static() -> list[Tag]: """ Fetch tag information in static builds. + This also includes some fake tags to preview the tag groups feature. This will return a cached value, so it should only be used for static builds. """ - return fetch_tags() + tags = fetch_tags() + for tag in tags[3:5]: + tag.group = "very-cool-group" + return tags def fetch_tags() -> list[Tag]: @@ -79,10 +83,15 @@ def fetch_tags() -> list[Tag]: repo.extractall(folder, included) for tag_file in Path(folder).rglob("*.md"): + group = None + if tag_file.parent.name != "tags": + # Tags in sub-folders are considered part of a group + group = tag_file.parent.name + tags.append(Tag( name=tag_file.name.removesuffix(".md"), + group=group, body=tag_file.read_text(encoding="utf-8"), - url=f"{TAG_URL_BASE}/{tag_file.name}" )) return tags @@ -114,31 +123,85 @@ def get_tags() -> list[Tag]: return Tag.objects.all() -def get_tag(name: str) -> Tag: - """Return a tag by name.""" - tags = get_tags() - for tag in tags: - if tag.name == name: +def get_tag(path: str) -> typing.Union[Tag, list[Tag]]: + """ + Return a tag based on the search location. + + The tag name and group must match. If only one argument is provided in the path, + it's assumed to either be a group name, or a no-group tag name. + + If it's a group name, a list of tags which belong to it is returned. + """ + path = path.split("/") + if len(path) == 2: + group, name = path[0], path[1] + else: + name = path[0] + group = None + + matches = [] + for tag in get_tags(): + if tag.name == name and tag.group == group: return tag + elif tag.group == name and group is None: + matches.append(tag) + + if matches: + return matches raise Tag.DoesNotExist() -def get_category_pages(path: Path) -> dict[str, dict]: - """Get all page names and their metadata at a category path.""" - # Special handling for tags - if path == Path(__file__).parent / "resources/tags": - tags = {} - for tag in get_tags(): - content = frontmatter.parse(tag.body)[1] +def get_tag_category( + tags: typing.Optional[list[Tag]] = None, *, collapse_groups: bool +) -> dict[str, dict]: + """ + Generate context data for `tags`, or all tags if None. + + If `tags` is None, `get_tag` is used to populate the data. + If `collapse_groups` is True, tags with parent groups are not included in the list, + and instead the parent itself is included as a single entry with it's sub-tags + in the description. + """ + if not tags: + tags = get_tags() + + data = [] + groups = {} - tags[tag.name] = { + # Create all the metadata for the tags + for tag in tags: + if tag.group is None or not collapse_groups: + content = frontmatter.parse(tag.body)[1] + data.append({ "title": tag.name, "description": markdown.markdown(content, extensions=["pymdownx.superfences"]), - "icon": "fas fa-tag" - } + "icon": "fas fa-tag", + }) + else: + if tag.group not in groups: + groups[tag.group] = { + "title": tag.group, + "description": [tag.name], + "icon": "fas fa-tags", + } + else: + groups[tag.group]["description"].append(tag.name) - return {name: tags[name] for name in sorted(tags)} + # Flatten group description into a single string + for group in groups.values(): + group["description"] = "Contains the following tags: " + ", ".join(group["description"]) + data.append(group) + + # Sort the tags, and return them in the proper format + return {tag["title"]: tag for tag in sorted(data, key=lambda tag: tag["title"].lower())} + + +def get_category_pages(path: Path) -> dict[str, dict]: + """Get all page names and their metadata at a category path.""" + # Special handling for tags + if path == Path(__file__).parent / "resources/tags": + return get_tag_category(collapse_groups=True) pages = {} diff --git a/pydis_site/apps/content/views/page_category.py b/pydis_site/apps/content/views/page_category.py index 01ce8402..062c2bc1 100644 --- a/pydis_site/apps/content/views/page_category.py +++ b/pydis_site/apps/content/views/page_category.py @@ -5,7 +5,7 @@ from django.conf import settings from django.http import Http404, HttpRequest, HttpResponse from django.views.generic import TemplateView -from pydis_site.apps.content import utils +from pydis_site.apps.content import models, utils class PageOrCategoryView(TemplateView): @@ -91,4 +91,7 @@ class PageOrCategoryView(TemplateView): "page_title": category["title"], "page_description": category["description"], "icon": category.get("icon"), + "app_name": "content:page_category", + "is_tag_listing": "/resources/tags" in path.as_posix(), + "tag_url": models.Tag.URL_BASE, } diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py index 5295537d..a8df65db 100644 --- a/pydis_site/apps/content/views/tags.py +++ b/pydis_site/apps/content/views/tags.py @@ -1,4 +1,5 @@ import re +import typing import frontmatter import markdown @@ -16,23 +17,65 @@ COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w\d-]+)`*") class TagView(TemplateView): """Handles tag pages.""" - template_name = "content/tag.html" + tag: typing.Union[Tag, list[Tag]] + is_group: bool + + def setup(self, *args, **kwargs) -> None: + """Look for a tag, and configure the view.""" + super().setup(*args, **kwargs) - def get_context_data(self, **kwargs) -> dict: - """Get the relevant context for this tag page.""" try: - tag = utils.get_tag(kwargs.get("name")) + self.tag = utils.get_tag(kwargs.get("location")) + self.is_group = isinstance(self.tag, list) except Tag.DoesNotExist: raise Http404 + def get_template_names(self) -> list[str]: + """Either return the tag page template, or the listing.""" + if self.is_group: + template_name = "content/listing.html" + else: + template_name = "content/tag.html" + + return [template_name] + + def get_context_data(self, **kwargs) -> dict: + """Get the relevant context for this tag page or group.""" context = super().get_context_data(**kwargs) - context["page_title"] = tag.name + context["breadcrumb_items"] = [{ + "name": utils.get_category(settings.CONTENT_PAGES_PATH / location)["title"], + "path": location, + } for location in (".", "tags")] + + if self.is_group: + self._set_group_context(context, self.tag) + else: + self._set_tag_context(context, self.tag) + + return context + + @staticmethod + def _set_tag_context(context: dict[str, any], tag: Tag) -> None: + """Update the context with the information for a tag page.""" + context.update({ + "page_title": tag.name, + "tag": tag, + }) + + if tag.group: + # Add group names to the breadcrumbs + context["breadcrumb_items"].append({ + "name": tag.group, + "path": f"tags/{tag.group}", + }) + + # Clean up tag body body = frontmatter.parse(tag.body) content = body[1] # Check for tags which can be hyperlinked def sub(match: re.Match) -> str: - link = reverse("content:tag", kwargs={"name": match.group("name")}) + link = reverse("content:tag", kwargs={"location": match.group("name")}) return f"[{match.group()}]({link})" content = COMMAND_REGEX.sub(sub, content) @@ -42,14 +85,20 @@ class TagView(TemplateView): if image := embed.get("image"): content = f"![{embed['title']}]({image['url']})\n\n" + content + # Insert the content + context["page"] = markdown.markdown(content, extensions=["pymdownx.superfences"]) + + @staticmethod + def _set_group_context(context: dict[str, any], tags: list[Tag]) -> None: + """Update the context with the information for a group of tags.""" + group = tags[0].group context.update({ - "page": markdown.markdown(content, extensions=["pymdownx.superfences"]), - "tag": tag, + "categories": {}, + "pages": utils.get_tag_category(tags, collapse_groups=False), + "page_title": group, + "icon": "fab fa-tags", + "is_tag_listing": True, + "app_name": "content:tag", + "path": f"{group}/", + "tag_url": f"{tags[0].URL_BASE}/{group}" }) - - context["breadcrumb_items"] = [{ - "name": utils.get_category(settings.CONTENT_PAGES_PATH / location)["title"], - "path": str(location) - } for location in [".", "tags"]] - - return context diff --git a/pydis_site/static/css/content/color.css b/pydis_site/static/css/content/color.css new file mode 100644 index 00000000..f4801c28 --- /dev/null +++ b/pydis_site/static/css/content/color.css @@ -0,0 +1,7 @@ +.content .fa-github { + color: black; +} + +.content .fa-github:hover { + color: #7289DA; +} diff --git a/pydis_site/static/css/content/tag.css b/pydis_site/static/css/content/tag.css index a3db046c..ec45bfc7 100644 --- a/pydis_site/static/css/content/tag.css +++ b/pydis_site/static/css/content/tag.css @@ -1,11 +1,3 @@ -h1.title a { - color: black; -} - -h1.title a:hover { - color: #7289DA; -} - .content a * { /* This is the original color, but propagated down the chain */ /* which allows for elements inside links, such as codeblocks */ diff --git a/pydis_site/static/js/content/listing.js b/pydis_site/static/js/content/listing.js index 3502cb2a..4b722632 100644 --- a/pydis_site/static/js/content/listing.js +++ b/pydis_site/static/js/content/listing.js @@ -4,6 +4,11 @@ function trimTag() { const containers = document.getElementsByClassName("tag-container"); for (const container of containers) { + if (container.textContent.startsWith("Contains the following tags:")) { + // Tag group, no need to trim + continue; + } + // Remove every element after the first two paragraphs while (container.children.length > 2) { container.removeChild(container.lastChild); diff --git a/pydis_site/templates/content/listing.html b/pydis_site/templates/content/listing.html index 098f4237..934b95f6 100644 --- a/pydis_site/templates/content/listing.html +++ b/pydis_site/templates/content/listing.html @@ -2,6 +2,19 @@ {% extends 'content/base.html' %} {% load static %} +{# Show a GitHub button on tag pages #} +{% block title_element %} +{% if is_tag_listing %} + +
+
{{ block.super }}
+
+ +
+
+{% endif %} +{% endblock %} + {% block page_content %} {# Nested Categories #} {% for category, data in categories.items %} @@ -23,10 +36,10 @@ - + {{ data.title }} - {% if "tags" in location %} + {% if is_tag_listing %}
{{ data.description | safe }}
{% else %}

{{ data.description }}

diff --git a/pydis_site/templates/content/tag.html b/pydis_site/templates/content/tag.html index 264f63d0..9bd65744 100644 --- a/pydis_site/templates/content/tag.html +++ b/pydis_site/templates/content/tag.html @@ -3,6 +3,7 @@ {% block head %} {{ block.super }} + {{ tag.name }} {% endblock %} @@ -15,7 +16,3 @@ {% endblock %} - -{% block page_content %} - {{ block.super }} -{% endblock %} -- cgit v1.2.3 From 5aeffa5ab4dd8b251d2ae742d1a1e2bf3ba461c7 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 14 Aug 2022 07:14:50 +0200 Subject: Update Tests For Tag Groups Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/tests/test_utils.py | 110 +++++++++++++++++++++++----- pydis_site/apps/content/tests/test_views.py | 68 ++++++++++++++++- pydis_site/apps/content/utils.py | 3 +- 3 files changed, 157 insertions(+), 24 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index a5d5dcb4..556f633c 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -132,6 +132,7 @@ class TagUtilsTests(TestCase): --- This tag has frontmatter! """), + "This is a grouped tag!", ) # Generate a tar archive with a few tags @@ -146,6 +147,10 @@ class TagUtilsTests(TestCase): (tags_folder / "first_tag.md").write_text(bodies[0]) (tags_folder / "second_tag.md").write_text(bodies[1]) + group_folder = tags_folder / "some_group" + group_folder.mkdir() + (group_folder / "grouped_tag.md").write_text(bodies[2]) + with tarfile.open(tar_folder / "temp.tar", "w") as file: file.add(folder, recursive=True) @@ -158,10 +163,15 @@ class TagUtilsTests(TestCase): ) result = utils.fetch_tags() - self.assertEqual([ + + def sort(_tag: models.Tag) -> str: + return _tag.name + + self.assertEqual(sorted([ models.Tag(name="first_tag", body=bodies[0]), models.Tag(name="second_tag", body=bodies[1]), - ], sorted(result, key=lambda tag: tag.name)) + models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name), + ], key=sort), sorted(result, key=sort)) def test_get_real_tag(self): """Test that a single tag is returned if it exists.""" @@ -170,30 +180,92 @@ class TagUtilsTests(TestCase): self.assertEqual(tag, result) + def test_get_grouped_tag(self): + """Test fetching a tag from a group.""" + tag = models.Tag.objects.create(name="real-tag", group="real-group") + result = utils.get_tag("real-group/real-tag") + + self.assertEqual(tag, result) + + def test_get_group(self): + """Test fetching a group of tags.""" + included = [ + models.Tag.objects.create(name="tag-1", group="real-group"), + models.Tag.objects.create(name="tag-2", group="real-group"), + models.Tag.objects.create(name="tag-3", group="real-group"), + ] + + models.Tag.objects.create(name="not-included-1") + models.Tag.objects.create(name="not-included-2", group="other-group") + + result = utils.get_tag("real-group") + self.assertListEqual(included, result) + def test_get_tag_404(self): """Test that an error is raised when we fetch a non-existing tag.""" models.Tag.objects.create(name="real-tag") with self.assertRaises(models.Tag.DoesNotExist): utils.get_tag("fake") - def test_category_pages(self): - """Test that the category pages function returns the correct records for tags.""" - models.Tag.objects.create(name="second-tag", body="Normal body") - models.Tag.objects.create(name="first-tag", body="Normal body") - tag_body = {"description": markdown.markdown("Normal body"), "icon": "fas fa-tag"} - + @mock.patch.object(utils, "get_tag_category") + def test_category_pages(self, get_mock: mock.Mock): + """Test that the category pages function calls the correct method for tags.""" + tag = models.Tag.objects.create(name="tag") + get_mock.return_value = tag result = utils.get_category_pages(settings.CONTENT_PAGES_PATH / "tags") + self.assertEqual(tag, result) + get_mock.assert_called_once_with(collapse_groups=True) + + def test_get_category_root(self): + """Test that all tags are returned and formatted properly for the tag root page.""" + body = "normal body" + base = {"description": markdown.markdown(body), "icon": "fas fa-tag"} + + models.Tag.objects.create(name="tag-1", body=body), + models.Tag.objects.create(name="tag-2", body=body), + models.Tag.objects.create(name="tag-3", body=body), + + models.Tag.objects.create(name="tag-4", body=body, group="tag-group") + models.Tag.objects.create(name="tag-5", body=body, group="tag-group") + + result = utils.get_tag_category(collapse_groups=True) + self.assertDictEqual({ - "first-tag": {**tag_body, "title": "first-tag"}, - "second-tag": {**tag_body, "title": "second-tag"}, + "tag-1": {**base, "title": "tag-1"}, + "tag-2": {**base, "title": "tag-2"}, + "tag-3": {**base, "title": "tag-3"}, + "tag-group": { + "title": "tag-group", + "description": "Contains the following tags: tag-4, tag-5", + "icon": "fas fa-tags" + } }, result) - def test_trimmed_tag_content(self): - """Test a tag with a long body that requires trimming.""" - tag = models.Tag.objects.create(name="long-tag", body="E" * 300) - result = utils.get_category_pages(settings.CONTENT_PAGES_PATH / "tags") - self.assertDictEqual({"long-tag": { - "title": "long-tag", - "description": markdown.markdown(tag.body[:100] + "..."), - "icon": "fas fa-tag", - }}, result) + def test_get_category_group(self): + """Test the function for a group root page.""" + body = "normal body" + base = {"description": markdown.markdown(body), "icon": "fas fa-tag"} + + included = [ + models.Tag.objects.create(name="tag-1", body=body, group="group"), + models.Tag.objects.create(name="tag-2", body=body, group="group"), + ] + models.Tag.objects.create(name="not-included", body=body) + + result = utils.get_tag_category(included, collapse_groups=False) + self.assertDictEqual({ + "tag-1": {**base, "title": "tag-1"}, + "tag-2": {**base, "title": "tag-2"}, + }, result) + + def test_tag_url(self): + """Test that tag URLs are generated correctly.""" + cases = [ + ({"name": "tag"}, f"{models.Tag.URL_BASE}/tag.md"), + ({"name": "grouped", "group": "abc"}, f"{models.Tag.URL_BASE}/abc/grouped.md"), + ] + + for options, url in cases: + tag = models.Tag(**options) + with self.subTest(tag=tag): + self.assertEqual(url, tag.url) diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index c4d3474e..c5c25be4 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -194,6 +194,23 @@ class TagViewTests(django.test.TestCase): """Set test helpers, then set up fake filesystem.""" super().setUp() + def test_routing(self): + """Test that the correct template is returned for each route.""" + Tag.objects.create(name="example") + Tag.objects.create(name="grouped-tag", group="group-name") + + cases = [ + ("/pages/tags/example/", "content/tag.html"), + ("/pages/tags/group-name/", "content/listing.html"), + ("/pages/tags/group-name/grouped-tag/", "content/tag.html"), + ] + + for url, template in cases: + with self.subTest(url=url): + response = self.client.get(url) + self.assertEqual(200, response.status_code) + self.assertTemplateUsed(response, template) + def test_valid_tag_returns_200(self): """Test that a page is returned for a valid tag.""" Tag.objects.create(name="example", body="This is the tag body.") @@ -207,8 +224,8 @@ class TagViewTests(django.test.TestCase): response = self.client.get("/pages/tags/non-existent/") self.assertEqual(404, response.status_code) - def test_context(self): - """Check that the context contains all the necessary data.""" + def test_context_tag(self): + """Test that the context contains the required data for a tag.""" body = textwrap.dedent(""" --- unused: frontmatter @@ -222,12 +239,55 @@ class TagViewTests(django.test.TestCase): "page_title": "example", "page": markdown.markdown("Tag content here."), "tag": tag, + "breadcrumb_items": [ + {"name": "Pages", "path": "."}, + {"name": "Tags", "path": "tags"}, + ] } for key in expected: self.assertEqual( expected[key], response.context.get(key), f"context.{key} did not match" ) + def test_context_grouped_tag(self): + """ + Test the context for a tag in a group. + + The only difference between this and a regular tag are the breadcrumbs, + so only those are checked. + """ + Tag.objects.create(name="example", body="Body text", group="group-name") + response = self.client.get("/pages/tags/group-name/example/") + self.assertListEqual([ + {"name": "Pages", "path": "."}, + {"name": "Tags", "path": "tags"}, + {"name": "group-name", "path": "tags/group-name"}, + ], response.context.get("breadcrumb_items")) + + def test_group_page(self): + """Test rendering of a group's root page.""" + Tag.objects.create(name="tag-1", body="Body 1", group="group-name") + Tag.objects.create(name="tag-2", body="Body 2", group="group-name") + Tag.objects.create(name="not-included") + + response = self.client.get("/pages/tags/group-name/") + content = response.content.decode("utf-8") + + self.assertInHTML("
group-name
", content) + self.assertInHTML( + f"", + content + ) + self.assertIn(">tag-1", content) + self.assertIn(">tag-2", content) + self.assertNotIn( + ">not-included", + content, + "Tags not in this group shouldn't be rendered." + ) + + self.assertInHTML("

Body 1

", content) + def test_markdown(self): """Test that markdown content is rendered properly.""" body = textwrap.dedent(""" @@ -287,7 +347,7 @@ class TagViewTests(django.test.TestCase): body = filler_before + "`!tags return`" + filler_after Tag.objects.create(name="example", body=body) - other_url = reverse("content:tag", kwargs={"name": "return"}) + other_url = reverse("content:tag", kwargs={"location": "return"}) response = self.client.get("/pages/tags/example/") self.assertEqual( markdown.markdown(filler_before + f"[`!tags return`]({other_url})" + filler_after), @@ -304,7 +364,7 @@ class TagViewTests(django.test.TestCase): content = response.content.decode("utf-8") self.assertTemplateUsed(response, "content/listing.html") - self.assertInHTML('

Tags

', content) + self.assertInHTML('
Tags
', content) for tag_number in range(1, 4): self.assertIn(f"tag-{tag_number}", content) diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index da6a024d..11100ba5 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -48,7 +48,7 @@ def get_tags_static() -> list[Tag]: This will return a cached value, so it should only be used for static builds. """ tags = fetch_tags() - for tag in tags[3:5]: + for tag in tags[3:5]: # pragma: no cover tag.group = "very-cool-group" return tags @@ -190,6 +190,7 @@ def get_tag_category( # Flatten group description into a single string for group in groups.values(): + # If the following string is updated, make sure to update it in the frontend JS as well group["description"] = "Contains the following tags: " + ", ".join(group["description"]) data.append(group) -- cgit v1.2.3 From 04babac2f281487adcddbf1e92d9d028896e086e Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 16 Aug 2022 21:21:59 +0400 Subject: Add Tag Metadata Uses the commit API to obtain tag metadata such as when it was last edited, and by whom. Signed-off-by: Hassan Abouelela --- .../apps/content/migrations/0001_add_tags.py | 5 +- pydis_site/apps/content/models/tag.py | 14 ++- pydis_site/apps/content/utils.py | 124 ++++++++++++++++++--- pydis_site/static/css/content/tag.css | 6 +- pydis_site/templates/content/tag.html | 22 +++- 5 files changed, 146 insertions(+), 25 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/migrations/0001_add_tags.py b/pydis_site/apps/content/migrations/0001_add_tags.py index 2e9d8c45..73525243 100644 --- a/pydis_site/apps/content/migrations/0001_add_tags.py +++ b/pydis_site/apps/content/migrations/0001_add_tags.py @@ -1,4 +1,4 @@ -# Generated by Django 4.0.6 on 2022-08-16 16:17 +# Generated by Django 4.0.6 on 2022-08-16 17:38 import django.db.models.deletion from django.db import migrations, models @@ -25,10 +25,11 @@ class Migration(migrations.Migration): name='Tag', fields=[ ('last_updated', models.DateTimeField(auto_now=True, help_text='The date and time this data was last fetched.')), + ('sha', models.CharField(help_text="The tag's hash, as calculated by GitHub.", max_length=40)), ('name', models.CharField(help_text="The tag's name.", max_length=50, primary_key=True, serialize=False)), ('group', models.CharField(help_text='The group the tag belongs to.', max_length=50, null=True)), ('body', models.TextField(help_text='The content of the tag.')), - ('last_commit', models.OneToOneField(help_text='The commit this file was last touched in.', null=True, on_delete=django.db.models.deletion.CASCADE, to='content.commit')), + ('last_commit', models.ForeignKey(help_text='The commit this file was last touched in.', null=True, on_delete=django.db.models.deletion.CASCADE, to='content.commit')), ], ), ] diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py index 1c89fe1e..3c729768 100644 --- a/pydis_site/apps/content/models/tag.py +++ b/pydis_site/apps/content/models/tag.py @@ -1,3 +1,4 @@ +import collections.abc import json from django.db import models @@ -22,13 +23,10 @@ class Commit(models.Model): """The URL to the commit on GitHub.""" return self.URL_BASE + self.sha - @property - def format_users(self) -> str: + def format_users(self) -> collections.abc.Iterable[str]: """Return a nice representation of the user(s)' name and email.""" - authors = [] for author in json.loads(self.author): - authors.append(f"{author['name']} <{author['email']}>") - return ", ".join(authors) + yield f"{author['name']} <{author['email']}>" class Tag(models.Model): @@ -40,7 +38,11 @@ class Tag(models.Model): help_text="The date and time this data was last fetched.", auto_now=True, ) - last_commit = models.OneToOneField( + sha = models.CharField( + help_text="The tag's hash, as calculated by GitHub.", + max_length=40, + ) + last_commit = models.ForeignKey( Commit, help_text="The commit this file was last touched in.", null=True, diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 11100ba5..7b078de6 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -1,5 +1,6 @@ import datetime import functools +import json import tarfile import tempfile import typing @@ -15,11 +16,26 @@ from django.utils import timezone from markdown.extensions.toc import TocExtension from pydis_site import settings -from .models import Tag +from .models import Commit, Tag TAG_CACHE_TTL = datetime.timedelta(hours=1) +def github_client(**kwargs) -> httpx.Client: + """Get a client to access the GitHub API with important settings pre-configured.""" + client = httpx.Client( + base_url=settings.GITHUB_API, + follow_redirects=True, + timeout=settings.TIMEOUT_PERIOD, + **kwargs + ) + if settings.GITHUB_TOKEN: # pragma: no cover + if not client.headers.get("Authorization"): + client.headers = {"Authorization": f"token {settings.GITHUB_TOKEN}"} + + return client + + def get_category(path: Path) -> dict[str, str]: """Load category information by name from _info.yml.""" if not path.is_dir(): @@ -60,19 +76,31 @@ def fetch_tags() -> list[Tag]: The entire repository is downloaded and extracted locally because getting file content would require one request per file, and can get rate-limited. """ - if settings.GITHUB_TOKEN: # pragma: no cover - headers = {"Authorization": f"token {settings.GITHUB_TOKEN}"} - else: - headers = {} + client = github_client() + + # Grab metadata + metadata = client.get("/repos/python-discord/bot/contents/bot/resources") + metadata.raise_for_status() + + hashes = {} + for entry in metadata.json(): + if entry["type"] == "dir": + # Tag group + files = client.get(entry["url"]) + files.raise_for_status() + files = files.json() + else: + files = [entry] - tar_file = httpx.get( - f"{settings.GITHUB_API}/repos/python-discord/bot/tarball", - follow_redirects=True, - timeout=settings.TIMEOUT_PERIOD, - headers=headers, - ) + for file in files: + hashes[file["name"]] = file["sha"] + + # Download the files + tar_file = client.get("/repos/python-discord/bot/tarball") tar_file.raise_for_status() + client.close() + tags = [] with tempfile.TemporaryDirectory() as folder: with tarfile.open(fileobj=BytesIO(tar_file.content)) as repo: @@ -83,20 +111,83 @@ def fetch_tags() -> list[Tag]: repo.extractall(folder, included) for tag_file in Path(folder).rglob("*.md"): + name = tag_file.name group = None if tag_file.parent.name != "tags": # Tags in sub-folders are considered part of a group group = tag_file.parent.name tags.append(Tag( - name=tag_file.name.removesuffix(".md"), + name=name.removesuffix(".md"), + sha=hashes[name], group=group, body=tag_file.read_text(encoding="utf-8"), + last_commit=None, )) return tags +def set_tag_commit(tag: Tag) -> Tag: + """Fetch commit information from the API, and save it for the tag.""" + path = "/bot/resources/tags" + if tag.group: + path += f"/{tag.group}" + path += f"/{tag.name}.md" + + # Fetch and set the commit + with github_client() as client: + data = client.get("/repos/python-discord/bot/commits", params={"path": path}) + data.raise_for_status() + data = data.json()[0] + + commit = data["commit"] + author, committer = commit["author"], commit["committer"] + + date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) + date = date.replace(tzinfo=datetime.timezone.utc) + + if author["email"] == committer["email"]: + commit_author = [author] + else: + commit_author = [author, committer] + + commit_obj, _ = Commit.objects.get_or_create( + sha=data["sha"], + message=commit["message"], + date=date, + author=json.dumps(commit_author), + ) + tag.last_commit = commit_obj + tag.save() + + return tag + + +def record_tags(tags: list[Tag]) -> None: + """Sync the database with an updated set of tags.""" + # Remove entries which no longer exist + Tag.objects.exclude(name__in=[tag.name for tag in tags]).delete() + + # Insert/update the tags + for tag in tags: + try: + old_tag = Tag.objects.get(name=tag.name) + except Tag.DoesNotExist: + # The tag is not in the database yet, + # pretend it's previous state is the current state + old_tag = tag + + if old_tag.sha == tag.sha and old_tag.last_commit is not None: + # We still have an up-to-date commit entry + tag.last_commit = old_tag.last_commit + + tag.save() + + # Drop old, unused commits + Commit.objects.filter(tag__isnull=True).delete() + + def get_tags() -> list[Tag]: """Return a list of all tags visible to the application, from the cache or API.""" if settings.STATIC_BUILD: # pragma: no cover @@ -113,9 +204,7 @@ def get_tags() -> list[Tag]: tags = get_tags_static() else: tags = fetch_tags() - Tag.objects.exclude(name__in=[tag.name for tag in tags]).delete() - for tag in tags: - tag.save() + record_tags(tags) return tags else: @@ -127,6 +216,9 @@ def get_tag(path: str) -> typing.Union[Tag, list[Tag]]: """ Return a tag based on the search location. + If certain tag data is out of sync (for instance a commit date is missing), + an extra request will be made to sync the information. + The tag name and group must match. If only one argument is provided in the path, it's assumed to either be a group name, or a no-group tag name. @@ -142,6 +234,8 @@ def get_tag(path: str) -> typing.Union[Tag, list[Tag]]: matches = [] for tag in get_tags(): if tag.name == name and tag.group == group: + if tag.last_commit is None: + set_tag_commit(tag) return tag elif tag.group == name and group is None: matches.append(tag) diff --git a/pydis_site/static/css/content/tag.css b/pydis_site/static/css/content/tag.css index 32a605a8..79795f9e 100644 --- a/pydis_site/static/css/content/tag.css +++ b/pydis_site/static/css/content/tag.css @@ -5,5 +5,9 @@ } .content a *:hover { - color: black; + color: dimgray; +} + +span.update-time { + text-decoration: black underline dotted; } diff --git a/pydis_site/templates/content/tag.html b/pydis_site/templates/content/tag.html index 9bd65744..513009da 100644 --- a/pydis_site/templates/content/tag.html +++ b/pydis_site/templates/content/tag.html @@ -9,10 +9,30 @@ {% endblock %} {% block title_element %} -
+ + + {% endblock %} -- cgit v1.2.3 From 89b0853245fdf5ba7f1f386d7ea7ab1548b538da Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 16 Aug 2022 22:09:14 +0400 Subject: Fix Tag Metadata For Static Builds Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/utils.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 7b078de6..e4a24a73 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -128,8 +128,19 @@ def fetch_tags() -> list[Tag]: return tags -def set_tag_commit(tag: Tag) -> Tag: +def set_tag_commit(tag: Tag) -> None: """Fetch commit information from the API, and save it for the tag.""" + if settings.STATIC_BUILD: + # Static builds request every page during build, which can ratelimit it. + # Instead, we return some fake data. + tag.last_commit = Commit( + sha="68da80efc00d9932a209d5cccd8d344cec0f09ea", + message="Initial Commit\n\nTHIS IS FAKE DEMO DATA", + date=datetime.datetime(2018, 2, 3, 12, 20, 26, tzinfo=datetime.timezone.utc), + author=json.dumps([{"name": "Joseph", "email": "joseph@josephbanks.me"}]), + ) + return + path = "/bot/resources/tags" if tag.group: path += f"/{tag.group}" @@ -161,8 +172,6 @@ def set_tag_commit(tag: Tag) -> Tag: tag.last_commit = commit_obj tag.save() - return tag - def record_tags(tags: list[Tag]) -> None: """Sync the database with an updated set of tags.""" -- cgit v1.2.3 From 92a42694b6ad1a29e5a21e0b3e57639528837113 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 16 Aug 2022 23:45:25 +0400 Subject: Fix Tests For Tag Metadata Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_github_utils.py | 7 +- pydis_site/apps/content/tests/test_utils.py | 132 +++++++++++++++++++++++-- pydis_site/apps/content/tests/test_views.py | 36 ++++--- pydis_site/apps/content/utils.py | 2 +- 4 files changed, 148 insertions(+), 29 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index f642f689..6e25bc80 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -11,6 +11,7 @@ import rest_framework.response import rest_framework.test from django.urls import reverse +from pydis_site import settings from .. import github_utils @@ -49,7 +50,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.utcnow().strftime(github_utils.ISO_FORMAT_STRING), + "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -74,7 +75,7 @@ class CheckRunTests(unittest.TestCase): # to guarantee the right conclusion kwargs["created_at"] = ( datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) - ).strftime(github_utils.ISO_FORMAT_STRING) + ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) with self.assertRaises(github_utils.RunTimeoutError): github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) @@ -178,7 +179,7 @@ class ArtifactFetcherTests(unittest.TestCase): run = github_utils.WorkflowRun( name="action_name", head_sha="action_sha", - created_at=datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), status="completed", conclusion="success", artifacts_url="artifacts_url" diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index 556f633c..2ef033e4 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -1,3 +1,5 @@ +import datetime +import json import tarfile import tempfile import textwrap @@ -15,6 +17,18 @@ from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.timezone.utc) +_time_str = _time.strftime(settings.GITHUB_TIMESTAMP_FORMAT) +TEST_COMMIT_KWARGS = { + "sha": "123", + "message": "Hello world\n\nThis is a commit message", + "date": _time, + "author": json.dumps([ + {"name": "Author 1", "email": "mail1@example.com", "date": _time_str}, + {"name": "Author 2", "email": "mail2@example.com", "date": _time_str}, + ]), +} + class GetCategoryTests(MockPagesTestCase): """Tests for the get_category function.""" @@ -109,6 +123,10 @@ class GetPageTests(MockPagesTestCase): class TagUtilsTests(TestCase): """Tests for the tag-related utilities.""" + def setUp(self) -> None: + super().setUp() + self.commit = models.Commit.objects.create(**TEST_COMMIT_KWARGS) + @mock.patch.object(utils, "fetch_tags") def test_static_fetch(self, fetch_mock: mock.Mock): """Test that the static fetch function is only called at most once during static builds.""" @@ -121,9 +139,27 @@ class TagUtilsTests(TestCase): self.assertEqual(tags, result) self.assertEqual(tags, second_result) - @mock.patch("httpx.get") + @mock.patch("httpx.Client.get") def test_mocked_fetch(self, get_mock: mock.Mock): """Test that proper data is returned from fetch, but with a mocked API response.""" + fake_request = httpx.Request("GET", "https://google.com") + + # Metadata requests + returns = [httpx.Response( + request=fake_request, + status_code=200, + json=[ + {"type": "file", "name": "first_tag.md", "sha": "123"}, + {"type": "file", "name": "second_tag.md", "sha": "456"}, + {"type": "dir", "name": "some_group", "sha": "789", "url": "/some_group"}, + ] + ), httpx.Response( + request=fake_request, + status_code=200, + json=[{"type": "file", "name": "grouped_tag.md", "sha": "789123"}] + )] + + # Main content request bodies = ( "This is the first tag!", textwrap.dedent(""" @@ -156,33 +192,36 @@ class TagUtilsTests(TestCase): body = (tar_folder / "temp.tar").read_bytes() - get_mock.return_value = httpx.Response( + returns.append(httpx.Response( status_code=200, content=body, - request=httpx.Request("GET", "https://google.com"), - ) + request=fake_request, + )) + get_mock.side_effect = returns result = utils.fetch_tags() def sort(_tag: models.Tag) -> str: return _tag.name self.assertEqual(sorted([ - models.Tag(name="first_tag", body=bodies[0]), - models.Tag(name="second_tag", body=bodies[1]), - models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name), + models.Tag(name="first_tag", body=bodies[0], sha="123"), + models.Tag(name="second_tag", body=bodies[1], sha="245"), + models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name, sha="789123"), ], key=sort), sorted(result, key=sort)) def test_get_real_tag(self): """Test that a single tag is returned if it exists.""" - tag = models.Tag.objects.create(name="real-tag") + tag = models.Tag.objects.create(name="real-tag", last_commit=self.commit) result = utils.get_tag("real-tag") self.assertEqual(tag, result) def test_get_grouped_tag(self): """Test fetching a tag from a group.""" - tag = models.Tag.objects.create(name="real-tag", group="real-group") + tag = models.Tag.objects.create( + name="real-tag", group="real-group", last_commit=self.commit + ) result = utils.get_tag("real-group/real-tag") self.assertEqual(tag, result) @@ -269,3 +308,78 @@ class TagUtilsTests(TestCase): tag = models.Tag(**options) with self.subTest(tag=tag): self.assertEqual(url, tag.url) + + @mock.patch("httpx.Client.get") + def test_get_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a normal tag.""" + tag = models.Tag.objects.create(name="example") + + authors = json.loads(self.commit.author) + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[1], + } + }] + ) + + result = utils.get_tag(tag.name) + self.assertEqual(tag, result) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch("httpx.Client.get") + def test_get_group_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a group tag.""" + tag = models.Tag.objects.create(name="example", group="group-name") + + authors = json.loads(self.commit.author) + authors.pop() + self.commit.author = json.dumps(authors) + self.commit.save() + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[0], + } + }] + ) + + utils.set_tag_commit(tag) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/group-name/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch.object(utils, "set_tag_commit") + def test_exiting_commit(self, set_commit_mock: mock.Mock): + """Test that a commit is saved when the data has not changed.""" + tag = models.Tag.objects.create(name="tag-name", body="old body", last_commit=self.commit) + + # This is only applied to the object, not to the database + tag.last_commit = None + + utils.record_tags([tag]) + self.assertEqual(self.commit, tag.last_commit) + + result = utils.get_tag("tag-name") + self.assertEqual(tag, result) + set_commit_mock.assert_not_called() diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index c5c25be4..658ac2cc 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -8,10 +8,11 @@ from django.http import Http404 from django.test import RequestFactory, SimpleTestCase, override_settings from django.urls import reverse -from pydis_site.apps.content.models import Tag +from pydis_site.apps.content.models import Commit, Tag from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +from pydis_site.apps.content.tests.test_utils import TEST_COMMIT_KWARGS from pydis_site.apps.content.views import PageOrCategoryView @@ -193,11 +194,12 @@ class TagViewTests(django.test.TestCase): def setUp(self): """Set test helpers, then set up fake filesystem.""" super().setUp() + self.commit = Commit.objects.create(**TEST_COMMIT_KWARGS) def test_routing(self): """Test that the correct template is returned for each route.""" - Tag.objects.create(name="example") - Tag.objects.create(name="grouped-tag", group="group-name") + Tag.objects.create(name="example", last_commit=self.commit) + Tag.objects.create(name="grouped-tag", group="group-name", last_commit=self.commit) cases = [ ("/pages/tags/example/", "content/tag.html"), @@ -213,7 +215,7 @@ class TagViewTests(django.test.TestCase): def test_valid_tag_returns_200(self): """Test that a page is returned for a valid tag.""" - Tag.objects.create(name="example", body="This is the tag body.") + Tag.objects.create(name="example", body="This is the tag body.", last_commit=self.commit) response = self.client.get("/pages/tags/example/") self.assertEqual(200, response.status_code) self.assertIn("This is the tag body", response.content.decode("utf-8")) @@ -233,7 +235,7 @@ class TagViewTests(django.test.TestCase): Tag content here. """) - tag = Tag.objects.create(name="example", body=body) + tag = Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") expected = { "page_title": "example", @@ -256,7 +258,9 @@ class TagViewTests(django.test.TestCase): The only difference between this and a regular tag are the breadcrumbs, so only those are checked. """ - Tag.objects.create(name="example", body="Body text", group="group-name") + Tag.objects.create( + name="example", body="Body text", group="group-name", last_commit=self.commit + ) response = self.client.get("/pages/tags/group-name/example/") self.assertListEqual([ {"name": "Pages", "path": "."}, @@ -266,9 +270,9 @@ class TagViewTests(django.test.TestCase): def test_group_page(self): """Test rendering of a group's root page.""" - Tag.objects.create(name="tag-1", body="Body 1", group="group-name") - Tag.objects.create(name="tag-2", body="Body 2", group="group-name") - Tag.objects.create(name="not-included") + Tag.objects.create(name="tag-1", body="Body 1", group="group-name", last_commit=self.commit) + Tag.objects.create(name="tag-2", body="Body 2", group="group-name", last_commit=self.commit) + Tag.objects.create(name="not-included", last_commit=self.commit) response = self.client.get("/pages/tags/group-name/") content = response.content.decode("utf-8") @@ -298,7 +302,7 @@ class TagViewTests(django.test.TestCase): **This text is in bold** """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") content = response.content.decode("utf-8") @@ -317,7 +321,7 @@ class TagViewTests(django.test.TestCase): Tag body. """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") content = response.content.decode("utf-8") @@ -333,7 +337,7 @@ class TagViewTests(django.test.TestCase): --- """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") self.assertEqual( "Embed title", @@ -345,7 +349,7 @@ class TagViewTests(django.test.TestCase): """Test hyperlinking of tags works as intended.""" filler_before, filler_after = "empty filler text\n\n", "more\nfiller" body = filler_before + "`!tags return`" + filler_after - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) other_url = reverse("content:tag", kwargs={"location": "return"}) response = self.client.get("/pages/tags/example/") @@ -356,9 +360,9 @@ class TagViewTests(django.test.TestCase): def test_tag_root_page(self): """Test the root tag page which lists all tags.""" - Tag.objects.create(name="tag-1") - Tag.objects.create(name="tag-2") - Tag.objects.create(name="tag-3") + Tag.objects.create(name="tag-1", last_commit=self.commit) + Tag.objects.create(name="tag-2", last_commit=self.commit) + Tag.objects.create(name="tag-3", last_commit=self.commit) response = self.client.get("/pages/tags/") content = response.content.decode("utf-8") diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index e4a24a73..63f1c41c 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -130,7 +130,7 @@ def fetch_tags() -> list[Tag]: def set_tag_commit(tag: Tag) -> None: """Fetch commit information from the API, and save it for the tag.""" - if settings.STATIC_BUILD: + if settings.STATIC_BUILD: # pragma: no cover # Static builds request every page during build, which can ratelimit it. # Instead, we return some fake data. tag.last_commit = Commit( -- cgit v1.2.3 From efe784fe8a6c23248c6698aefab8bf54c5c85900 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 23 Aug 2022 12:54:16 +0400 Subject: Support Hyperlinked Tag Group Replacement Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/tests/test_views.py | 28 ++++++++++++++++++++++++++++ pydis_site/apps/content/utils.py | 4 ++-- pydis_site/apps/content/views/tags.py | 26 +++++++++++++++++++++++--- 3 files changed, 53 insertions(+), 5 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index 658ac2cc..da06fc80 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -358,6 +358,34 @@ class TagViewTests(django.test.TestCase): response.context.get("page") ) + def test_hyperlinked_group(self): + """Test hyperlinking with a group works as intended.""" + Tag.objects.create( + name="example", body="!tags group-name grouped-tag", last_commit=self.commit + ) + Tag.objects.create(name="grouped-tag", group="group-name") + + other_url = reverse("content:tag", kwargs={"location": "group-name/grouped-tag"}) + response = self.client.get("/pages/tags/example/") + self.assertEqual( + markdown.markdown(f"[!tags group-name grouped-tag]({other_url})"), + response.context.get("page") + ) + + def test_hyperlinked_extra_text(self): + """Test hyperlinking when a tag is followed by extra, unrelated text.""" + Tag.objects.create( + name="example", body="!tags other unrelated text", last_commit=self.commit + ) + Tag.objects.create(name="other") + + other_url = reverse("content:tag", kwargs={"location": "other"}) + response = self.client.get("/pages/tags/example/") + self.assertEqual( + markdown.markdown(f"[!tags other]({other_url}) unrelated text"), + response.context.get("page") + ) + def test_tag_root_page(self): """Test the root tag page which lists all tags.""" Tag.objects.create(name="tag-1", last_commit=self.commit) diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 63f1c41c..32d3d638 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -221,7 +221,7 @@ def get_tags() -> list[Tag]: return Tag.objects.all() -def get_tag(path: str) -> typing.Union[Tag, list[Tag]]: +def get_tag(path: str, *, skip_sync: bool = False) -> typing.Union[Tag, list[Tag]]: """ Return a tag based on the search location. @@ -243,7 +243,7 @@ def get_tag(path: str) -> typing.Union[Tag, list[Tag]]: matches = [] for tag in get_tags(): if tag.name == name and tag.group == group: - if tag.last_commit is None: + if tag.last_commit is None and not skip_sync: set_tag_commit(tag) return tag elif tag.group == name and group is None: diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py index a8df65db..4f4bb5a2 100644 --- a/pydis_site/apps/content/views/tags.py +++ b/pydis_site/apps/content/views/tags.py @@ -11,7 +11,12 @@ from django.views.generic import TemplateView from pydis_site.apps.content import utils from pydis_site.apps.content.models import Tag -COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w\d-]+)`*") +# The following regex tries to parse a tag command +# It'll read up to two words seperated by spaces +# If the command does not include a group, the tag name will be in the `first` group +# If there's a second word after the command, or if there's a tag group, extra logic +# is necessary to determine whether it's a tag with a group, or a tag with text after it +COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w-]+)(?P [\w-]+)?`*") class TagView(TemplateView): @@ -75,8 +80,23 @@ class TagView(TemplateView): # Check for tags which can be hyperlinked def sub(match: re.Match) -> str: - link = reverse("content:tag", kwargs={"location": match.group("name")}) - return f"[{match.group()}]({link})" + first, second = match.groups() + location = first + text, extra = match.group(), "" + + if second is not None: + # Possibly a tag group + try: + new_location = f"{first}/{second.strip()}" + utils.get_tag(new_location, skip_sync=True) + location = new_location + except Tag.DoesNotExist: + # Not a group, remove the second argument from the link + extra = text[text.find(second):] + text = text[:text.find(second)] + + link = reverse("content:tag", kwargs={"location": location}) + return f"[{text}]({link}){extra}" content = COMMAND_REGEX.sub(sub, content) # Add support for some embed elements -- cgit v1.2.3 From 2343f7ab5c738b5b28554c766e78eb6a663bd064 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 23 Aug 2022 13:11:35 +0400 Subject: Rename Tag Model Author Field Renames the tag model `author` field to `authors` to better indicate what's contained in the field, and updates the documentation. Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/migrations/0001_add_tags.py | 4 ++-- pydis_site/apps/content/models/tag.py | 9 +++++++-- pydis_site/apps/content/tests/test_utils.py | 8 ++++---- pydis_site/apps/content/utils.py | 8 ++++---- 4 files changed, 17 insertions(+), 12 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/migrations/0001_add_tags.py b/pydis_site/apps/content/migrations/0001_add_tags.py index 73525243..2c31e4c1 100644 --- a/pydis_site/apps/content/migrations/0001_add_tags.py +++ b/pydis_site/apps/content/migrations/0001_add_tags.py @@ -1,4 +1,4 @@ -# Generated by Django 4.0.6 on 2022-08-16 17:38 +# Generated by Django 4.0.6 on 2022-08-23 09:06 import django.db.models.deletion from django.db import migrations, models @@ -18,7 +18,7 @@ class Migration(migrations.Migration): ('sha', models.CharField(help_text='The SHA hash of this commit.', max_length=40, primary_key=True, serialize=False)), ('message', models.TextField(help_text='The commit message.')), ('date', models.DateTimeField(help_text='The date and time the commit was created.')), - ('author', models.TextField(help_text='The person(s) who created the commit.')), + ('authors', models.TextField(help_text='The person(s) who created the commit. This is a serialized JSON object. Refer to the GitHub documentation on the commit endpoint (schema/commit.author & schema/commit.committer) for more info. https://docs.github.com/en/rest/commits/commits#get-a-commit')), ], ), migrations.CreateModel( diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py index e06ce067..1a20d775 100644 --- a/pydis_site/apps/content/models/tag.py +++ b/pydis_site/apps/content/models/tag.py @@ -16,7 +16,12 @@ class Commit(models.Model): ) message = models.TextField(help_text="The commit message.") date = models.DateTimeField(help_text="The date and time the commit was created.") - author = models.TextField(help_text="The person(s) who created the commit.") + authors = models.TextField(help_text=( + "The person(s) who created the commit. This is a serialized JSON object. " + "Refer to the GitHub documentation on the commit endpoint " + "(schema/commit.author & schema/commit.committer) for more info. " + "https://docs.github.com/en/rest/commits/commits#get-a-commit" + )) @property def url(self) -> str: @@ -30,7 +35,7 @@ class Commit(models.Model): def format_authors(self) -> collections.abc.Iterable[str]: """Return a nice representation of the author(s)' name and email.""" - for author in json.loads(self.author): + for author in json.loads(self.authors): yield f"{author['name']} <{author['email']}>" diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index 2ef033e4..462818b5 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -23,7 +23,7 @@ TEST_COMMIT_KWARGS = { "sha": "123", "message": "Hello world\n\nThis is a commit message", "date": _time, - "author": json.dumps([ + "authors": json.dumps([ {"name": "Author 1", "email": "mail1@example.com", "date": _time_str}, {"name": "Author 2", "email": "mail2@example.com", "date": _time_str}, ]), @@ -314,7 +314,7 @@ class TagUtilsTests(TestCase): """Test the get commit function with a normal tag.""" tag = models.Tag.objects.create(name="example") - authors = json.loads(self.commit.author) + authors = json.loads(self.commit.authors) get_mock.return_value = httpx.Response( request=httpx.Request("GET", "https://google.com"), @@ -343,9 +343,9 @@ class TagUtilsTests(TestCase): """Test the get commit function with a group tag.""" tag = models.Tag.objects.create(name="example", group="group-name") - authors = json.loads(self.commit.author) + authors = json.loads(self.commit.authors) authors.pop() - self.commit.author = json.dumps(authors) + self.commit.authors = json.dumps(authors) self.commit.save() get_mock.return_value = httpx.Response( diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 32d3d638..a1171a45 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -137,7 +137,7 @@ def set_tag_commit(tag: Tag) -> None: sha="68da80efc00d9932a209d5cccd8d344cec0f09ea", message="Initial Commit\n\nTHIS IS FAKE DEMO DATA", date=datetime.datetime(2018, 2, 3, 12, 20, 26, tzinfo=datetime.timezone.utc), - author=json.dumps([{"name": "Joseph", "email": "joseph@josephbanks.me"}]), + authors=json.dumps([{"name": "Joseph", "email": "joseph@josephbanks.me"}]), ) return @@ -159,15 +159,15 @@ def set_tag_commit(tag: Tag) -> None: date = date.replace(tzinfo=datetime.timezone.utc) if author["email"] == committer["email"]: - commit_author = [author] + authors = [author] else: - commit_author = [author, committer] + authors = [author, committer] commit_obj, _ = Commit.objects.get_or_create( sha=data["sha"], message=commit["message"], date=date, - author=json.dumps(commit_author), + authors=json.dumps(authors), ) tag.last_commit = commit_obj tag.save() -- cgit v1.2.3 From 1e538704f5f3518b805720eb38eaa1743b18e2f1 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 28 Oct 2022 01:23:38 +0300 Subject: Update pydis_site/apps/content/utils.py Co-authored-by: Johannes Christ --- pydis_site/apps/content/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index a1171a45..d533d95b 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -235,7 +235,7 @@ def get_tag(path: str, *, skip_sync: bool = False) -> typing.Union[Tag, list[Tag """ path = path.split("/") if len(path) == 2: - group, name = path[0], path[1] + group, name = path else: name = path[0] group = None -- cgit v1.2.3 From 153059c051e9b14913c0f6cfba40d8b3ae067555 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 28 Oct 2022 02:26:10 +0400 Subject: Clean Up Typing Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/utils.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index d533d95b..8e3b0bd7 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -3,7 +3,6 @@ import functools import json import tarfile import tempfile -import typing from io import BytesIO from pathlib import Path @@ -218,10 +217,10 @@ def get_tags() -> list[Tag]: return tags else: # Get tags from database - return Tag.objects.all() + return list(Tag.objects.all()) -def get_tag(path: str, *, skip_sync: bool = False) -> typing.Union[Tag, list[Tag]]: +def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: """ Return a tag based on the search location. @@ -255,9 +254,7 @@ def get_tag(path: str, *, skip_sync: bool = False) -> typing.Union[Tag, list[Tag raise Tag.DoesNotExist() -def get_tag_category( - tags: typing.Optional[list[Tag]] = None, *, collapse_groups: bool -) -> dict[str, dict]: +def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]: """ Generate context data for `tags`, or all tags if None. -- cgit v1.2.3 From 426fc71b25e3ba0572d8a294ef98c2302528a661 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 28 Oct 2022 02:27:26 +0400 Subject: Use `casefold` Instead Of `lower` Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 8e3b0bd7..5fa6353c 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -295,7 +295,7 @@ def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> data.append(group) # Sort the tags, and return them in the proper format - return {tag["title"]: tag for tag in sorted(data, key=lambda tag: tag["title"].lower())} + return {tag["title"]: tag for tag in sorted(data, key=lambda tag: tag["title"].casefold())} def get_category_pages(path: Path) -> dict[str, dict]: -- cgit v1.2.3 From e8b3745c916ec78f4b7a6232ebb55682a06ef8f0 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 28 Oct 2022 02:29:56 +0400 Subject: Implicitly Close Client Using `with` Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/utils.py | 41 +++++++++++++++++++--------------------- 1 file changed, 19 insertions(+), 22 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 5fa6353c..8c9ad036 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -75,30 +75,27 @@ def fetch_tags() -> list[Tag]: The entire repository is downloaded and extracted locally because getting file content would require one request per file, and can get rate-limited. """ - client = github_client() - - # Grab metadata - metadata = client.get("/repos/python-discord/bot/contents/bot/resources") - metadata.raise_for_status() - - hashes = {} - for entry in metadata.json(): - if entry["type"] == "dir": - # Tag group - files = client.get(entry["url"]) - files.raise_for_status() - files = files.json() - else: - files = [entry] - - for file in files: - hashes[file["name"]] = file["sha"] + with github_client() as client: + # Grab metadata + metadata = client.get("/repos/python-discord/bot/contents/bot/resources") + metadata.raise_for_status() + + hashes = {} + for entry in metadata.json(): + if entry["type"] == "dir": + # Tag group + files = client.get(entry["url"]) + files.raise_for_status() + files = files.json() + else: + files = [entry] - # Download the files - tar_file = client.get("/repos/python-discord/bot/tarball") - tar_file.raise_for_status() + for file in files: + hashes[file["name"]] = file["sha"] - client.close() + # Download the files + tar_file = client.get("/repos/python-discord/bot/tarball") + tar_file.raise_for_status() tags = [] with tempfile.TemporaryDirectory() as folder: -- cgit v1.2.3 From 3940e34b89b67f1d45076bb2645f701e40d3357a Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 28 Oct 2022 02:26:10 +0400 Subject: Clarify Code Intentions Signed-off-by: Hassan Abouelela --- pydis_site/apps/content/urls.py | 2 ++ pydis_site/apps/content/utils.py | 12 ++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py index 163d05bc..a7695a27 100644 --- a/pydis_site/apps/content/urls.py +++ b/pydis_site/apps/content/urls.py @@ -40,6 +40,8 @@ def get_all_pages() -> DISTILL_RETURN: def get_all_tags() -> DISTILL_RETURN: """Return all tag names and groups in static builds.""" + # We instantiate the set with None here to make filtering it out later easier + # whether it was added in the loop or not groups = {None} for tag in utils.get_tags_static(): groups.add(tag.group) diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 8c9ad036..c12893ef 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -175,19 +175,19 @@ def record_tags(tags: list[Tag]) -> None: Tag.objects.exclude(name__in=[tag.name for tag in tags]).delete() # Insert/update the tags - for tag in tags: + for new_tag in tags: try: - old_tag = Tag.objects.get(name=tag.name) + old_tag = Tag.objects.get(name=new_tag.name) except Tag.DoesNotExist: # The tag is not in the database yet, # pretend it's previous state is the current state - old_tag = tag + old_tag = new_tag - if old_tag.sha == tag.sha and old_tag.last_commit is not None: + if old_tag.sha == new_tag.sha and old_tag.last_commit is not None: # We still have an up-to-date commit entry - tag.last_commit = old_tag.last_commit + new_tag.last_commit = old_tag.last_commit - tag.save() + new_tag.save() # Drop old, unused commits Commit.objects.filter(tag__isnull=True).delete() -- cgit v1.2.3 From 07855963a1eedd80c410ab2dd51fcae1200c9cee Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Wed, 10 May 2023 12:30:57 +0200 Subject: Switch to ruff for linting --- .github/workflows/lint-test.yaml | 17 +- .pre-commit-config.yaml | 12 +- poetry.lock | 700 ++++++--------------- pydis_site/apps/api/admin.py | 29 +- pydis_site/apps/api/github_utils.py | 13 +- pydis_site/apps/api/models/bot/message.py | 7 +- pydis_site/apps/api/models/bot/metricity.py | 19 +- pydis_site/apps/api/tests/base.py | 3 +- pydis_site/apps/api/tests/test_bumped_threads.py | 2 +- pydis_site/apps/api/tests/test_deleted_messages.py | 11 +- .../apps/api/tests/test_documentation_links.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 12 +- pydis_site/apps/api/tests/test_github_utils.py | 41 +- pydis_site/apps/api/tests/test_infractions.py | 110 ++-- pydis_site/apps/api/tests/test_models.py | 4 +- pydis_site/apps/api/tests/test_nominations.py | 2 +- .../apps/api/tests/test_off_topic_channel_names.py | 2 +- .../apps/api/tests/test_offensive_message.py | 10 +- pydis_site/apps/api/tests/test_reminders.py | 4 +- pydis_site/apps/api/tests/test_roles.py | 2 +- pydis_site/apps/api/tests/test_rules.py | 2 +- pydis_site/apps/api/tests/test_users.py | 29 +- pydis_site/apps/api/tests/test_validators.py | 4 +- pydis_site/apps/api/views.py | 4 +- pydis_site/apps/api/viewsets/bot/filters.py | 4 +- .../api/viewsets/bot/off_topic_channel_name.py | 7 +- pydis_site/apps/api/viewsets/bot/user.py | 5 +- pydis_site/apps/content/models/tag.py | 3 +- .../guides/pydis-guides/contributing/linting.md | 2 +- pydis_site/apps/content/tests/helpers.py | 20 +- pydis_site/apps/content/urls.py | 2 +- pydis_site/apps/content/utils.py | 16 +- pydis_site/apps/content/views/tags.py | 3 +- pydis_site/apps/events/urls.py | 2 +- pydis_site/apps/events/views/page.py | 3 +- .../apps/home/tests/test_repodata_helpers.py | 2 +- pydis_site/apps/home/views.py | 13 +- pydis_site/apps/redirect/urls.py | 25 +- pydis_site/apps/redirect/views.py | 3 +- pydis_site/apps/resources/views.py | 7 +- .../staff/templatetags/deletedmessage_filters.py | 3 +- .../staff/tests/test_deletedmessage_filters.py | 2 +- pyproject.toml | 39 +- static-builds/netlify_build.py | 6 +- 44 files changed, 445 insertions(+), 763 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index 1c744470..a12fb650 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -24,20 +24,11 @@ jobs: # We will not run `flake8` here, as we will use a separate flake8 # action. - name: Run pre-commit hooks - run: SKIP=flake8 pre-commit run --all-files + run: SKIP=ruff pre-commit run --all-files - # Run flake8 and have it format the linting errors in the format of - # the GitHub Workflow command to register error annotations. This - # means that our flake8 output is automatically added as an error - # annotation to both the run result and in the "Files" tab of a - # pull request. - # - # Format used: - # ::error file={filename},line={line},col={col}::{message} - - name: Run flake8 - run: "flake8 \ - --format='::error file=%(path)s,line=%(row)d,col=%(col)d::\ - [flake8] %(code)s: %(text)s'" + # Run `ruff` using github formatting to enable automatic inline annotations. + - name: Run ruff + run: "ruff check --format=github ." - name: Migrations and run tests with coverage.py run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b2a03559..700dd0a0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,15 +10,11 @@ repos: args: [--fix=lf] - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.5.1 - hooks: - - id: python-check-blanket-noqa - repo: local hooks: - - id: flake8 - name: Flake8 - description: This hook runs flake8 within our project's environment. - entry: poetry run flake8 + - id: ruff + name: ruff + description: This hook runs ruff within our project's environment. + entry: poetry run ruff language: system types: [python] diff --git a/poetry.lock b/poetry.lock index d6e09cbe..4d9a87de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "anyio" @@ -36,58 +36,16 @@ files = [ [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - -[[package]] -name = "bandit" -version = "1.7.4" -description = "Security oriented static analyser for python code." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -181,100 +139,87 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = "*" -files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] @@ -355,35 +300,31 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "39.0.1" +version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"}, - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"}, - {file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"}, - {file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"}, - {file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, + {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, + {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, + {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, ] [package.dependencies] @@ -392,10 +333,10 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] test-randomorder = ["pytest-randomly"] tox = ["tox"] @@ -536,193 +477,19 @@ pytz = "*" [[package]] name = "filelock" -version = "3.9.0" +version = "3.12.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, - {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, + {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, + {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - -[[package]] -name = "flake8-annotations" -version = "3.0.1" -description = "Flake8 Type Annotation Checks" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8_annotations-3.0.1-py3-none-any.whl", hash = "sha256:af78e3216ad800d7e144745ece6df706c81b3255290cbf870e54879d495e8ade"}, - {file = "flake8_annotations-3.0.1.tar.gz", hash = "sha256:ff37375e71e3b83f2a5a04d443c41e2c407de557a884f3300a7fa32f3c41cb0a"}, -] - -[package.dependencies] -attrs = ">=21.4" -flake8 = ">=5.0" - -[[package]] -name = "flake8-bandit" -version = "4.1.1" -description = "Automated security testing with bandit and flake8." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, - {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, -] - -[package.dependencies] -bandit = ">=1.7.3" -flake8 = ">=5.0.0" - -[[package]] -name = "flake8-bugbear" -version = "23.5.9" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-bugbear-23.5.9.tar.gz", hash = "sha256:695c84a5d7da54eb35d79a7354dbaf3aaba80de32250608868aa1c85534b2a86"}, - {file = "flake8_bugbear-23.5.9-py3-none-any.whl", hash = "sha256:631fa927fbc799e8ca636b849dd7dfc304812287137b6ecb3277821f028bee40"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=6.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-import-order" -version = "0.18.2" -description = "Flake8 and pylama plugin that checks the ordering of import statements." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-import-order-0.18.2.tar.gz", hash = "sha256:e23941f892da3e0c09d711babbb0c73bc735242e9b216b726616758a920d900e"}, - {file = "flake8_import_order-0.18.2-py2.py3-none-any.whl", hash = "sha256:82ed59f1083b629b030ee9d3928d9e06b6213eb196fe745b3a7d4af2168130df"}, -] - -[package.dependencies] -pycodestyle = "*" -setuptools = "*" - -[[package]] -name = "flake8-string-format" -version = "0.3.0" -description = "string format checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, - {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-tidy-imports" -version = "4.8.0" -description = "A flake8 plugin that helps you write tidier imports." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, - {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, -] - -[package.dependencies] -flake8 = ">=3.8.0" - -[[package]] -name = "flake8-todo" -version = "0.7" -description = "TODO notes checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, -] - -[package.dependencies] -pycodestyle = ">=2.0.0,<3.0.0" - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.31" -description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, - {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "gunicorn" @@ -759,14 +526,14 @@ files = [ [[package]] name = "httpcore" -version = "0.16.3" +version = "0.17.0" description = "A minimal low-level HTTP client." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, + {file = "httpcore-0.17.0-py3-none-any.whl", hash = "sha256:0fdfea45e94f0c9fd96eab9286077f9ff788dd186635ae61b312693e4d943599"}, + {file = "httpcore-0.17.0.tar.gz", hash = "sha256:cc045a3241afbf60ce056202301b4d8b6af08845e3294055eb26b09913ef903c"}, ] [package.dependencies] @@ -805,14 +572,14 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "identify" -version = "2.5.18" +version = "2.5.24" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.18-py2.py3-none-any.whl", hash = "sha256:93aac7ecf2f6abf879b8f29a8002d3c6de7086b8c28d88e1ad15045a15ab63f9"}, - {file = "identify-2.5.18.tar.gz", hash = "sha256:89e144fa560cc4cffb6ef2ab5e9fb18ed9f9b3cb054384bab4b95c12f6c309fe"}, + {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, + {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, ] [package.extras] @@ -860,18 +627,6 @@ files = [ [package.extras] testing = ["coverage", "pyyaml"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mslex" version = "0.3.0" @@ -899,48 +654,21 @@ files = [ [package.dependencies] setuptools = "*" -[[package]] -name = "pbr" -version = "5.11.1" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" -files = [ - {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, - {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, -] - -[[package]] -name = "pep8-naming" -version = "0.13.3" -description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"}, - {file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"}, -] - -[package.dependencies] -flake8 = ">=5.0.0" - [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.5.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, - {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, + {file = "platformdirs-3.5.0-py3-none-any.whl", hash = "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4"}, + {file = "platformdirs-3.5.0.tar.gz", hash = "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pre-commit" @@ -978,26 +706,26 @@ twisted = ["twisted"] [[package]] name = "psutil" -version = "5.9.4" +version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, ] [package.extras] @@ -1075,18 +803,6 @@ files = [ {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, ] -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] - [[package]] name = "pycparser" version = "2.21" @@ -1099,36 +815,6 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - [[package]] name = "pyjwt" version = "2.7.0" @@ -1202,14 +888,14 @@ test = ["pyaml", "pytest", "toml"] [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] @@ -1264,26 +950,53 @@ files = [ [[package]] name = "requests" -version = "2.28.2" +version = "2.30.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, + {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.0.265" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.265-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:30ddfe22de6ce4eb1260408f4480bbbce998f954dbf470228a21a9b2c45955e4"}, + {file = "ruff-0.0.265-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a11bd0889e88d3342e7bc514554bb4461bf6cc30ec115821c2425cfaac0b1b6a"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a9b38bdb40a998cbc677db55b6225a6c4fadcf8819eb30695e1b8470942426b"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8b44a245b60512403a6a03a5b5212da274d33862225c5eed3bcf12037eb19bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b279fa55ea175ef953208a6d8bfbcdcffac1c39b38cdb8c2bfafe9222add70bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5028950f7af9b119d43d91b215d5044976e43b96a0d1458d193ef0dd3c587bf8"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4057eb539a1d88eb84e9f6a36e0a999e0f261ed850ae5d5817e68968e7b89ed9"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d586e69ab5cbf521a1910b733412a5735936f6a610d805b89d35b6647e2a66aa"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa17b13cd3f29fc57d06bf34c31f21d043735cc9a681203d634549b0e41047d1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9ac13b11d9ad3001de9d637974ec5402a67cefdf9fffc3929ab44c2fcbb850a1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:62a9578b48cfd292c64ea3d28681dc16b1aa7445b7a7709a2884510fc0822118"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0f9967f84da42d28e3d9d9354cc1575f96ed69e6e40a7d4b780a7a0418d9409"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1d5a8de2fbaf91ea5699451a06f4074e7a312accfa774ad9327cde3e4fda2081"}, + {file = "ruff-0.0.265-py3-none-win32.whl", hash = "sha256:9e9db5ccb810742d621f93272e3cc23b5f277d8d00c4a79668835d26ccbe48dd"}, + {file = "ruff-0.0.265-py3-none-win_amd64.whl", hash = "sha256:f54facf286103006171a00ce20388d88ed1d6732db3b49c11feb9bf3d46f90e9"}, + {file = "ruff-0.0.265-py3-none-win_arm64.whl", hash = "sha256:c78470656e33d32ddc54e8482b1b0fc6de58f1195586731e5ff1405d74421499"}, + {file = "ruff-0.0.265.tar.gz", hash = "sha256:53c17f0dab19ddc22b254b087d1381b601b155acfa8feed514f0d6a413d0ab3a"}, +] + [[package]] name = "sentry-sdk" version = "1.22.2" @@ -1328,14 +1041,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.4.0" +version = "67.7.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"}, - {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"}, + {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, + {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, ] [package.extras] @@ -1343,18 +1056,6 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] - [[package]] name = "sniffio" version = "1.3.0" @@ -1367,18 +1068,6 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - [[package]] name = "sqlparse" version = "0.4.4" @@ -1396,21 +1085,6 @@ dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] -[[package]] -name = "stevedore" -version = "5.0.0" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, - {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, -] - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - [[package]] name = "taskipy" version = "1.10.4" @@ -1443,26 +1117,26 @@ files = [ [[package]] name = "tzdata" -version = "2022.7" +version = "2023.3" description = "Provider of IANA time zone data" category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2022.7-py2.py3-none-any.whl", hash = "sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d"}, - {file = "tzdata-2022.7.tar.gz", hash = "sha256:fe5f866eddd8b96e9fcba978f8e503c909b19ea7efda11e52e39494bad3a7bfa"}, + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.extras] @@ -1472,24 +1146,24 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.19.0" +version = "20.23.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"}, - {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"}, + {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, + {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +filelock = ">=3.11,<4" +platformdirs = ">=3.2,<4" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] [[package]] name = "whitenoise" @@ -1509,4 +1183,4 @@ brotli = ["Brotli"] [metadata] lock-version = "2.0" python-versions = "3.10.*" -content-hash = "a89b60823f9d7717b78c1071654c0c1f237768669a8e37e76175730de209e804" +content-hash = "f9076a1d72b610e77d0c389a4992c814a4c81028232a1ec4f060f77f2cb9125c" diff --git a/pydis_site/apps/api/admin.py b/pydis_site/apps/api/admin.py index e123d150..f3cc0405 100644 --- a/pydis_site/apps/api/admin.py +++ b/pydis_site/apps/api/admin.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import Iterable, Optional, Tuple +from collections.abc import Iterable from django import urls from django.contrib import admin @@ -62,16 +62,16 @@ class InfractionActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = Infraction.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -149,7 +149,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): list_display = ("id", "author", "channel_id") - def embed_data(self, message: DeletedMessage) -> Optional[str]: + def embed_data(self, message: DeletedMessage) -> str | None: """Format embed data in a code block for better readability.""" if message.embeds: return format_html( @@ -157,6 +157,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): "{0}", json.dumps(message.embeds, indent=4) ) + return None embed_data.short_description = "Embeds" @@ -229,16 +230,16 @@ class NominationActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None nomination_ids = NominationEntry.objects.filter( actor__id=self.value() ).values_list("nomination_id").distinct() @@ -292,16 +293,16 @@ class NominationEntryActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -425,15 +426,15 @@ class UserRoleFilter(admin.SimpleListFilter): title = "Role" parameter_name = "role" - def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[Tuple[str, str]]: + def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[tuple[str, str]]: """Selectable values for viewer to filter by.""" roles = Role.objects.all() return ((r.name, r.name) for r in roles) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None role = Role.objects.get(name=self.value()) return queryset.filter(roles__contains=[role.id]) diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 44c571c3..af659195 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -82,7 +82,7 @@ def generate_token() -> str: Refer to: https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app """ - now = datetime.datetime.now() + now = datetime.datetime.now(tz=datetime.timezone.utc) return jwt.encode( { "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at @@ -145,8 +145,12 @@ def authorize(owner: str, repo: str) -> httpx.Client: def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" - created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) - run_time = datetime.datetime.utcnow() - created_at + created_at = ( + datetime.datetime + .strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) + run_time = datetime.datetime.now(tz=datetime.timezone.utc) - created_at if run.status != "completed": if run_time <= MAX_RUN_TIME: @@ -154,8 +158,7 @@ def check_run_status(run: WorkflowRun) -> str: f"The requested run is still pending. It was created " f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." ) - else: - raise RunTimeoutError("The requested workflow was not ready in time.") + raise RunTimeoutError("The requested workflow was not ready in time.") if run.conclusion != "success": # The action failed, or did not run diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index 89ae27e4..d8147cd4 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -61,9 +61,10 @@ class Message(ModelReprMixin, models.Model): @property def timestamp(self) -> datetime.datetime: """Attribute that represents the message timestamp as derived from the snowflake id.""" - return datetime.datetime.utcfromtimestamp( - ((self.id >> 22) + 1420070400000) / 1000 - ).replace(tzinfo=datetime.timezone.utc) + return datetime.datetime.fromtimestamp( + ((self.id >> 22) + 1420070400000) / 1000, + tz=datetime.timezone.utc, + ) class Meta: """Metadata provided for Django's ORM.""" diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index f53dd33c..a55f5e5b 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -1,4 +1,3 @@ -from typing import List, Tuple from django.db import connections @@ -10,10 +9,9 @@ EXCLUDE_CHANNELS = ( ) -class NotFoundError(Exception): # noqa: N818 +class NotFoundError(Exception): """Raised when an entity cannot be found.""" - pass class Metricity: @@ -31,15 +29,14 @@ class Metricity: def user(self, user_id: str) -> dict: """Query a user's data.""" # TODO: Swap this back to some sort of verified at date - columns = ["joined_at"] - query = f"SELECT {','.join(columns)} FROM users WHERE id = '%s'" + query = "SELECT joined_at FROM users WHERE id = '%s'" self.cursor.execute(query, [user_id]) values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError - return dict(zip(columns, values)) + return {'joined_at': values[0]} def total_messages(self, user_id: str) -> int: """Query total number of messages for a user.""" @@ -58,7 +55,7 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] @@ -88,11 +85,11 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] - def top_channel_activity(self, user_id: str) -> List[Tuple[str, int]]: + def top_channel_activity(self, user_id: str) -> list[tuple[str, int]]: """ Query the top three channels in which the user is most active. @@ -127,7 +124,7 @@ class Metricity: values = self.cursor.fetchall() if not values: - raise NotFoundError() + raise NotFoundError return values diff --git a/pydis_site/apps/api/tests/base.py b/pydis_site/apps/api/tests/base.py index c9f3cb7e..704b22cf 100644 --- a/pydis_site/apps/api/tests/base.py +++ b/pydis_site/apps/api/tests/base.py @@ -61,6 +61,7 @@ class AuthenticatedAPITestCase(APITestCase): ... self.assertEqual(response.status_code, 200) """ - def setUp(self): + def setUp(self) -> None: + """Bootstrap the user and authenticate it.""" super().setUp() self.client.force_authenticate(test_user) diff --git a/pydis_site/apps/api/tests/test_bumped_threads.py b/pydis_site/apps/api/tests/test_bumped_threads.py index 316e3f0b..2e3892c7 100644 --- a/pydis_site/apps/api/tests/test_bumped_threads.py +++ b/pydis_site/apps/api/tests/test_bumped_threads.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import BumpedThread +from pydis_site.apps.api.models import BumpedThread class UnauthedBumpedThreadAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_deleted_messages.py b/pydis_site/apps/api/tests/test_deleted_messages.py index 1eb535d8..62d17e58 100644 --- a/pydis_site/apps/api/tests/test_deleted_messages.py +++ b/pydis_site/apps/api/tests/test_deleted_messages.py @@ -1,10 +1,9 @@ -from datetime import datetime +from datetime import datetime, timezone from django.urls import reverse -from django.utils import timezone from .base import AuthenticatedAPITestCase -from ..models import MessageDeletionContext, User +from pydis_site.apps.api.models import MessageDeletionContext, User class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): @@ -18,7 +17,7 @@ class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': None, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -58,7 +57,7 @@ class DeletedMessagesWithActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': cls.actor.id, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -90,7 +89,7 @@ class DeletedMessagesLogURLTests(AuthenticatedAPITestCase): cls.deletion_context = MessageDeletionContext.objects.create( actor=cls.actor, - creation=timezone.now() + creation=datetime.now(tz=timezone.utc), ) def test_valid_log_url(self): diff --git a/pydis_site/apps/api/tests/test_documentation_links.py b/pydis_site/apps/api/tests/test_documentation_links.py index 4e238cbb..f4a332cb 100644 --- a/pydis_site/apps/api/tests/test_documentation_links.py +++ b/pydis_site/apps/api/tests/test_documentation_links.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import DocumentationLink +from pydis_site.apps.api.models import DocumentationLink class UnauthedDocumentationLinkAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 5059d651..4cef1c8f 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -1,7 +1,7 @@ import contextlib from dataclasses import dataclass from datetime import timedelta -from typing import Any, Dict, Tuple, Type +from typing import Any from django.db.models import Model from django.urls import reverse @@ -12,22 +12,22 @@ from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @dataclass() class TestSequence: - model: Type[Model] + model: type[Model] route: str - object: Dict[str, Any] - ignored_fields: Tuple[str, ...] = () + object: dict[str, Any] + ignored_fields: tuple[str, ...] = () def url(self, detail: bool = False) -> str: return reverse(f'api:bot:{self.route}-{"detail" if detail else "list"}') -FK_FIELDS: Dict[Type[Model], Tuple[str, ...]] = { +FK_FIELDS: dict[type[Model], tuple[str, ...]] = { FilterList: (), Filter: ("filter_list",), } -def get_test_sequences() -> Dict[str, TestSequence]: +def get_test_sequences() -> dict[str, TestSequence]: filter_list1_deny_dict = { "name": "testname", "list_type": 0, diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index 95bafec0..34fae875 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -12,7 +12,7 @@ import rest_framework.test from django.urls import reverse from pydis_site import settings -from .. import github_utils +from pydis_site.apps.api import github_utils class GeneralUtilityTests(unittest.TestCase): @@ -39,7 +39,8 @@ class GeneralUtilityTests(unittest.TestCase): delta = datetime.timedelta(minutes=10) self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) - self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) + then = datetime.datetime.now(tz=datetime.timezone.utc) + delta + self.assertLess(decoded["exp"], then.timestamp()) class CheckRunTests(unittest.TestCase): @@ -50,7 +51,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + "created_at": datetime.datetime.now(tz=datetime.timezone.utc).strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -74,7 +75,8 @@ class CheckRunTests(unittest.TestCase): # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion kwargs["created_at"] = ( - datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + datetime.datetime.now(tz=datetime.timezone.utc) + - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) with self.assertRaises(github_utils.RunTimeoutError): @@ -103,29 +105,26 @@ def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> htt "account": {"login": "VALID_OWNER"}, "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL" }]) - else: - return httpx.Response( - 401, json={"error": "auth app/installations"}, request=request - ) + return httpx.Response( + 401, json={"error": "auth app/installations"}, request=request + ) - elif path == "/installation/repositories": + elif path == "/installation/repositories": # noqa: RET505 if auth == "bearer app access token": return httpx.Response(200, request=request, json={ "repositories": [{ "name": "VALID_REPO" }] }) - else: # pragma: no cover - return httpx.Response( - 401, json={"error": "auth installation/repositories"}, request=request - ) + return httpx.Response( # pragma: no cover + 401, json={"error": "auth installation/repositories"}, request=request + ) - elif request.method == "POST": + elif request.method == "POST": # noqa: RET505 if path == "/ACCESS_TOKEN_URL": if auth == "bearer JWT initial token": return httpx.Response(200, request=request, json={"token": "app access token"}) - else: # pragma: no cover - return httpx.Response(401, json={"error": "auth access_token"}, request=request) + return httpx.Response(401, json={"error": "auth access_token"}, request=request) # pragma: no cover # Reaching this point means something has gone wrong return httpx.Response(500, request=request) # pragma: no cover @@ -138,7 +137,7 @@ class AuthorizeTests(unittest.TestCase): def test_invalid_apps_auth(self): """Test that an exception is raised if authorization was attempted with an invalid token.""" - with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): + with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): # noqa: SIM117 with self.assertRaises(httpx.HTTPStatusError) as error: github_utils.authorize("VALID_OWNER", "VALID_REPO") @@ -179,7 +178,11 @@ class ArtifactFetcherTests(unittest.TestCase): run = github_utils.WorkflowRun( name="action_name", head_sha="action_sha", - created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + created_at=( + datetime.datetime + .now(tz=datetime.timezone.utc) + .strftime(settings.GITHUB_TIMESTAMP_FORMAT) + ), status="completed", conclusion="success", artifacts_url="artifacts_url" @@ -187,7 +190,7 @@ class ArtifactFetcherTests(unittest.TestCase): return httpx.Response( 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]} ) - elif path == "/artifact_url": + elif path == "/artifact_url": # noqa: RET505 return httpx.Response( 200, request=request, json={"artifacts": [{ "name": "artifact_name", diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index ceb5591b..71611ee9 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -8,8 +8,8 @@ from django.db.utils import IntegrityError from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, User -from ..serializers import InfractionSerializer +from pydis_site.apps.api.models import Infraction, User +from pydis_site.apps.api.serializers import InfractionSerializer class UnauthenticatedTests(AuthenticatedAPITestCase): @@ -152,8 +152,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_after(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_after': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -161,8 +161,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_before': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -185,11 +185,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_before_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=4) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=4) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?expires_before={target_time_late.isoformat()}' - f'&expires_after={target_time.isoformat()}' + url, + {'expires_before': target_time_late.isoformat(), + 'expires_after': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -198,11 +199,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_after_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=9) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=9) response = self.client.get( - f'{url}?expires_before={target_time.isoformat()}' - f'&expires_after={target_time_late.isoformat()}' + url, + {'expires_before': target_time.isoformat(), + 'expires_after': target_time_late.isoformat()}, ) self.assertEqual(response.status_code, 400) @@ -212,8 +214,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_after_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_after': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -221,8 +226,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_before': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -230,9 +238,10 @@ class InfractionTests(AuthenticatedAPITestCase): def test_nonpermanent_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?permanent=false&expires_before={target_time.isoformat()}' + url, + {'permanent': 'false', 'expires_before': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -522,39 +531,38 @@ class CreationTests(AuthenticatedAPITestCase): active_infraction_types = ('timeout', 'ban', 'superstar') for infraction_type in active_infraction_types: - with self.subTest(infraction_type=infraction_type): - with transaction.atomic(): - first_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take me on!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' - } + with self.subTest(infraction_type=infraction_type), transaction.atomic(): + first_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take me on!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + + # Post the first active infraction of a type and confirm it's accepted. + first_response = self.client.post(url, data=first_active_infraction) + self.assertEqual(first_response.status_code, 201) - # Post the first active infraction of a type and confirm it's accepted. - first_response = self.client.post(url, data=first_active_infraction) - self.assertEqual(first_response.status_code, 201) - - second_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take on me!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' + second_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take on me!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + second_response = self.client.post(url, data=second_active_infraction) + self.assertEqual(second_response.status_code, 400) + self.assertEqual( + second_response.json(), + { + 'non_field_errors': [ + 'This user already has an active infraction of this type.' + ] } - second_response = self.client.post(url, data=second_active_infraction) - self.assertEqual(second_response.status_code, 400) - self.assertEqual( - second_response.json(), - { - 'non_field_errors': [ - 'This user already has an active infraction of this type.' - ] - } - ) + ) def test_returns_201_for_second_active_infraction_of_different_type(self): """Test if the API accepts a second active infraction of a different type than the first.""" diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index d3341b35..1cca133d 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -118,7 +118,7 @@ class StringDunderMethodTests(SimpleTestCase): OffensiveMessage( id=602951077675139072, channel_id=291284109232308226, - delete_date=dt(3000, 1, 1) + delete_date=dt(3000, 1, 1, tzinfo=timezone.utc) ), OffTopicChannelName(name='bob-the-builders-playground'), Role( @@ -132,7 +132,7 @@ class StringDunderMethodTests(SimpleTestCase): name='shawn', discriminator=555, ), - creation=dt.utcnow() + creation=dt.now(tz=timezone.utc) ), User( id=5, diff --git a/pydis_site/apps/api/tests/test_nominations.py b/pydis_site/apps/api/tests/test_nominations.py index b3742cdd..ee6b1fbd 100644 --- a/pydis_site/apps/api/tests/test_nominations.py +++ b/pydis_site/apps/api/tests/test_nominations.py @@ -3,7 +3,7 @@ from datetime import datetime as dt, timedelta, timezone from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Nomination, NominationEntry, User +from pydis_site.apps.api.models import Nomination, NominationEntry, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_off_topic_channel_names.py b/pydis_site/apps/api/tests/test_off_topic_channel_names.py index 34098c92..315f707d 100644 --- a/pydis_site/apps/api/tests/test_off_topic_channel_names.py +++ b/pydis_site/apps/api/tests/test_off_topic_channel_names.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffTopicChannelName +from pydis_site.apps.api.models import OffTopicChannelName class UnauthenticatedTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_offensive_message.py b/pydis_site/apps/api/tests/test_offensive_message.py index 3cf95b75..53f9cb48 100644 --- a/pydis_site/apps/api/tests/test_offensive_message.py +++ b/pydis_site/apps/api/tests/test_offensive_message.py @@ -3,13 +3,13 @@ import datetime from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffensiveMessage +from pydis_site.apps.api.models import OffensiveMessage class CreationTests(AuthenticatedAPITestCase): def test_accept_valid_data(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -32,7 +32,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_non_future_date(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() - datetime.timedelta(days=1) + delete_at = datetime.datetime.now() - datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -46,7 +46,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_negative_id_or_channel_id(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -72,7 +72,7 @@ class CreationTests(AuthenticatedAPITestCase): class ListTests(AuthenticatedAPITestCase): @classmethod def setUpTestData(cls): - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 aware_delete_at = delete_at.replace(tzinfo=datetime.timezone.utc) cls.messages = [ diff --git a/pydis_site/apps/api/tests/test_reminders.py b/pydis_site/apps/api/tests/test_reminders.py index e17569f0..9bb5fe4d 100644 --- a/pydis_site/apps/api/tests/test_reminders.py +++ b/pydis_site/apps/api/tests/test_reminders.py @@ -4,7 +4,7 @@ from django.forms.models import model_to_dict from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Reminder, User +from pydis_site.apps.api.models import Reminder, User class UnauthedReminderAPITests(AuthenticatedAPITestCase): @@ -59,7 +59,7 @@ class ReminderCreationTests(AuthenticatedAPITestCase): data = { 'author': self.author.id, 'content': 'Remember to...wait what was it again?', - 'expiration': datetime.utcnow().isoformat(), + 'expiration': datetime.now(tz=timezone.utc).isoformat(), 'jump_url': "https://www.google.com", 'channel_id': 123, 'mentions': [8888, 9999], diff --git a/pydis_site/apps/api/tests/test_roles.py b/pydis_site/apps/api/tests/test_roles.py index 73c80c77..d3031990 100644 --- a/pydis_site/apps/api/tests/test_roles.py +++ b/pydis_site/apps/api/tests/test_roles.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Role, User +from pydis_site.apps.api.models import Role, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_rules.py b/pydis_site/apps/api/tests/test_rules.py index 3ee2d4e0..662fb8e9 100644 --- a/pydis_site/apps/api/tests/test_rules.py +++ b/pydis_site/apps/api/tests/test_rules.py @@ -5,7 +5,7 @@ from pathlib import Path from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..views import RulesView +from pydis_site.apps.api.views import RulesView class RuleAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index d86e80bb..cff4a825 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -4,9 +4,9 @@ from unittest.mock import Mock, patch from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, Role, User -from ..models.bot.metricity import NotFoundError -from ..viewsets.bot.user import UserListPagination +from pydis_site.apps.api.models import Infraction, Role, User +from pydis_site.apps.api.models.bot.metricity import NotFoundError +from pydis_site.apps.api.viewsets.bot.user import UserListPagination class UnauthedUserAPITests(AuthenticatedAPITestCase): @@ -469,18 +469,17 @@ class UserMetricityTests(AuthenticatedAPITestCase): with self.subTest( voice_infractions=case['voice_infractions'], voice_gate_blocked=case['voice_gate_blocked'] - ): - with patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: - p.return_value = case['voice_infractions'] - - url = reverse('api:bot:user-metricity-data', args=[0]) - response = self.client.get(url) - - self.assertEqual(response.status_code, 200) - self.assertEqual( - response.json()["voice_gate_blocked"], - case["voice_gate_blocked"] - ) + ), patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: + p.return_value = case['voice_infractions'] + + url = reverse('api:bot:user-metricity-data', args=[0]) + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.json()["voice_gate_blocked"], + case["voice_gate_blocked"] + ) def test_metricity_review_data(self): # Given diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index 8c46fcbc..a7ec6e38 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -3,8 +3,8 @@ from datetime import datetime, timezone from django.core.exceptions import ValidationError from django.test import TestCase -from ..models.bot.bot_setting import validate_bot_setting_name -from ..models.bot.offensive_message import future_date_validator +from pydis_site.apps.api.models.bot.bot_setting import validate_bot_setting_name +from pydis_site.apps.api.models.bot.offensive_message import future_date_validator REQUIRED_KEYS = ( diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index b1b7dc0f..32f41667 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -93,7 +93,7 @@ class RulesView(APIView): """ if target == 'html': return f'{description}' - elif target == 'md': + elif target == 'md': # noqa: RET505 return f'[{description}]({link})' else: raise ValueError( @@ -101,7 +101,7 @@ class RulesView(APIView): ) # `format` here is the result format, we have a link format here instead. - def get(self, request, format=None): # noqa: D102,ANN001,ANN201 + def get(self, request, format=None): # noqa: ANN001, ANN201 """ Returns a list of our community rules coupled with their keywords. diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index d6c2d18c..9c9e8338 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -1,10 +1,10 @@ from rest_framework.viewsets import ModelViewSet -from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.models.bot.filters import ( # - Preserving the filter order FilterList, Filter ) -from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.serializers import ( # - Preserving the filter order FilterListSerializer, FilterSerializer, ) diff --git a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py index d0519e86..1774004c 100644 --- a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py +++ b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py @@ -85,10 +85,9 @@ class OffTopicChannelNameViewSet(ModelViewSet): serializer.save() return Response(create_data, status=HTTP_201_CREATED) - else: - raise ParseError(detail={ - 'name': ["This query parameter is required."] - }) + raise ParseError(detail={ + 'name': ["This query parameter is required."] + }) def list(self, request: Request, *args, **kwargs) -> Response: """ diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index db73a83c..88fa3415 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -1,4 +1,3 @@ -import typing from collections import OrderedDict from django.db.models import Q @@ -24,14 +23,14 @@ class UserListPagination(PageNumberPagination): page_size = 2500 page_size_query_param = "page_size" - def get_next_page_number(self) -> typing.Optional[int]: + def get_next_page_number(self) -> int | None: """Get the next page number.""" if not self.page.has_next(): return None page_number = self.page.next_page_number() return page_number - def get_previous_page_number(self) -> typing.Optional[int]: + def get_previous_page_number(self) -> int | None: """Get the previous page number.""" if not self.page.has_previous(): return None diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py index 1a20d775..7c49902f 100644 --- a/pydis_site/apps/content/models/tag.py +++ b/pydis_site/apps/content/models/tag.py @@ -30,8 +30,7 @@ class Commit(models.Model): def lines(self) -> collections.abc.Iterable[str]: """Return each line in the commit message.""" - for line in self.message.split("\n"): - yield line + yield from self.message.split("\n") def format_authors(self) -> collections.abc.Iterable[str]: """Return a nice representation of the author(s)' name and email.""" diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md index f6f8a5f2..b634f513 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md @@ -4,7 +4,7 @@ description: A guide for linting and setting up pre-commit. --- Your commit will be rejected by the build server if it fails to lint. -On most of our projects, we use `flake8` and `pre-commit` to ensure that the code style is consistent across the code base. +On most of our projects, we use `ruff` and `pre-commit` to ensure that the code style is consistent across the code base. `pre-commit` is a powerful tool that helps you automatically lint before you commit. If the linter complains, the commit is aborted so that you can fix the linting errors before committing again. diff --git a/pydis_site/apps/content/tests/helpers.py b/pydis_site/apps/content/tests/helpers.py index fad91050..0e7562e8 100644 --- a/pydis_site/apps/content/tests/helpers.py +++ b/pydis_site/apps/content/tests/helpers.py @@ -62,19 +62,19 @@ class MockPagesTestCase(TestCase): ├── not_a_page.md ├── tmp.md ├── tmp - |   ├── _info.yml - |   └── category - |    ├── _info.yml - |      └── subcategory_without_info + | ├── _info.yml + | └── category + | ├── _info.yml + | └── subcategory_without_info └── category -    ├── _info.yml -    ├── with_metadata.md -    └── subcategory -    ├── with_metadata.md -       └── without_metadata.md + ├── _info.yml + ├── with_metadata.md + └── subcategory + ├── with_metadata.md + └── without_metadata.md """ - def setUp(self): + def setUp(self) -> None: """Create the fake filesystem.""" Path(f"{BASE_PATH}/_info.yml").write_text(CATEGORY_INFO) Path(f"{BASE_PATH}/root.md").write_text(MARKDOWN_WITH_METADATA) diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py index a7695a27..baae154d 100644 --- a/pydis_site/apps/content/urls.py +++ b/pydis_site/apps/content/urls.py @@ -8,7 +8,7 @@ from . import utils, views app_name = "content" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and markdown files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index c12893ef..347640dd 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -151,8 +151,11 @@ def set_tag_commit(tag: Tag) -> None: commit = data["commit"] author, committer = commit["author"], commit["committer"] - date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) - date = date.replace(tzinfo=datetime.timezone.utc) + date = ( + datetime.datetime + .strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) if author["email"] == committer["email"]: authors = [author] @@ -212,9 +215,8 @@ def get_tags() -> list[Tag]: record_tags(tags) return tags - else: - # Get tags from database - return list(Tag.objects.all()) + + return list(Tag.objects.all()) def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: @@ -242,13 +244,13 @@ def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: if tag.last_commit is None and not skip_sync: set_tag_commit(tag) return tag - elif tag.group == name and group is None: + elif tag.group == name and group is None: # noqa: RET505 matches.append(tag) if matches: return matches - raise Tag.DoesNotExist() + raise Tag.DoesNotExist def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]: diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py index 4f4bb5a2..8d3e3321 100644 --- a/pydis_site/apps/content/views/tags.py +++ b/pydis_site/apps/content/views/tags.py @@ -1,5 +1,4 @@ import re -import typing import frontmatter import markdown @@ -22,7 +21,7 @@ COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w-]+)(?P [\w-]+)?`*") class TagView(TemplateView): """Handles tag pages.""" - tag: typing.Union[Tag, list[Tag]] + tag: Tag | list[Tag] is_group: bool def setup(self, *args, **kwargs) -> None: diff --git a/pydis_site/apps/events/urls.py b/pydis_site/apps/events/urls.py index 7ea65a31..6121d264 100644 --- a/pydis_site/apps/events/urls.py +++ b/pydis_site/apps/events/urls.py @@ -8,7 +8,7 @@ from pydis_site.apps.events.views import IndexView, PageView app_name = "events" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and HTML files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/events/views/page.py b/pydis_site/apps/events/views/page.py index 1622ad70..adf9e952 100644 --- a/pydis_site/apps/events/views/page.py +++ b/pydis_site/apps/events/views/page.py @@ -1,4 +1,3 @@ -from typing import List from django.conf import settings from django.http import Http404 @@ -8,7 +7,7 @@ from django.views.generic import TemplateView class PageView(TemplateView): """Handles event pages showing.""" - def get_template_names(self) -> List[str]: + def get_template_names(self) -> list[str]: """Get specific template names.""" path: str = self.kwargs['path'] page_path = settings.EVENTS_PAGES_PATH / path diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index a963f733..acf4a817 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -22,7 +22,7 @@ def mocked_requests_get(*args, **kwargs) -> "MockResponse": # noqa: F821 if args[0] == HomeView.github_api: json_path = Path(__file__).resolve().parent / "mock_github_api_response.json" - with open(json_path, 'r') as json_file: + with open(json_path) as json_file: mock_data = json.load(json_file) return MockResponse(mock_data, 200) diff --git a/pydis_site/apps/home/views.py b/pydis_site/apps/home/views.py index 8a165682..bfa9e02d 100644 --- a/pydis_site/apps/home/views.py +++ b/pydis_site/apps/home/views.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, List import httpx from django.core.handlers.wsgi import WSGIRequest @@ -45,7 +44,7 @@ class HomeView(View): else: self.headers = {} - def _get_api_data(self) -> Dict[str, Dict[str, str]]: + def _get_api_data(self) -> dict[str, dict[str, str]]: """ Call the GitHub API and get information about our repos. @@ -54,7 +53,7 @@ class HomeView(View): repo_dict = {} try: # Fetch the data from the GitHub API - api_data: List[dict] = httpx.get( + api_data: list[dict] = httpx.get( self.github_api, headers=self.headers, timeout=settings.TIMEOUT_PERIOD @@ -89,7 +88,7 @@ class HomeView(View): return repo_dict - def _get_repo_data(self) -> List[RepositoryMetadata]: + def _get_repo_data(self) -> list[RepositoryMetadata]: """Build a list of RepositoryMetadata objects that we can use to populate the front page.""" # First off, load the timestamp of the least recently updated entry. if settings.STATIC_BUILD: @@ -121,8 +120,7 @@ class HomeView(View): if settings.STATIC_BUILD: return data - else: - return RepositoryMetadata.objects.bulk_create(data) + return RepositoryMetadata.objects.bulk_create(data) # If the data is stale, we should refresh it. if (timezone.now() - last_update).seconds > self.repository_cache_ttl: @@ -149,8 +147,7 @@ class HomeView(View): return database_repositories # Otherwise, if the data is fresher than 2 minutes old, we should just return it. - else: - return RepositoryMetadata.objects.all() + return RepositoryMetadata.objects.all() def get(self, request: WSGIRequest) -> HttpResponse: """Collect repo data and render the homepage view.""" diff --git a/pydis_site/apps/redirect/urls.py b/pydis_site/apps/redirect/urls.py index 067cccc3..a221ea12 100644 --- a/pydis_site/apps/redirect/urls.py +++ b/pydis_site/apps/redirect/urls.py @@ -83,22 +83,21 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]: return paths + redirect_path_name = "pages" if new_app_name == "content" else new_app_name + if len(data.redirect_arguments) > 0: + redirect_arg = data.redirect_arguments[0] else: - redirect_path_name = "pages" if new_app_name == "content" else new_app_name - if len(data.redirect_arguments) > 0: - redirect_arg = data.redirect_arguments[0] - else: - redirect_arg = "resources/" - new_redirect = f"/{redirect_path_name}/{redirect_arg}" + redirect_arg = "resources/" + new_redirect = f"/{redirect_path_name}/{redirect_arg}" - if new_redirect == "/resources/resources/": - new_redirect = "/resources/" + if new_redirect == "/resources/resources/": + new_redirect = "/resources/" - return [distill_path( - data.original_path, - lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), - name=name, - )] + return [distill_path( + data.original_path, + lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), + name=name, + )] urlpatterns = [] diff --git a/pydis_site/apps/redirect/views.py b/pydis_site/apps/redirect/views.py index 21180cdf..374daf2b 100644 --- a/pydis_site/apps/redirect/views.py +++ b/pydis_site/apps/redirect/views.py @@ -1,4 +1,3 @@ -import typing as t from django.views.generic import RedirectView @@ -15,7 +14,7 @@ class CustomRedirectView(RedirectView): """Overwrites original as_view to add static args.""" return super().as_view(**initkwargs) - def get_redirect_url(self, *args, **kwargs) -> t.Optional[str]: + def get_redirect_url(self, *args, **kwargs) -> str | None: """Extends default behaviour to use static args.""" args = self.static_args + args + tuple(kwargs.values()) if self.prefix_redirect: diff --git a/pydis_site/apps/resources/views.py b/pydis_site/apps/resources/views.py index 2375f722..a2cd8d0c 100644 --- a/pydis_site/apps/resources/views.py +++ b/pydis_site/apps/resources/views.py @@ -1,5 +1,4 @@ import json -import typing as t from pathlib import Path import yaml @@ -22,7 +21,7 @@ class ResourceView(View): """Sort a tuple by its key alphabetically, disregarding 'the' as a prefix.""" name, resource = tuple_ name = name.casefold() - if name.startswith("the ") or name.startswith("the_"): + if name.startswith(("the ", "the_")): return name[4:] return name @@ -48,7 +47,7 @@ class ResourceView(View): } for resource_name, resource in self.resources.items(): css_classes = [] - for tag_type in resource_tags.keys(): + for tag_type in resource_tags: # Store the tags into `resource_tags` tags = resource.get("tags", {}).get(tag_type, []) for tag in tags: @@ -102,7 +101,7 @@ class ResourceView(View): "difficulty": [to_kebabcase(tier) for tier in self.filters["Difficulty"]["filters"]], } - def get(self, request: WSGIRequest, resource_type: t.Optional[str] = None) -> HttpResponse: + def get(self, request: WSGIRequest, resource_type: str | None = None) -> HttpResponse: """List out all the resources, and any filtering options from the URL.""" # Add type filtering if the request is made to somewhere like /resources/video. # We also convert all spaces to dashes, so they'll correspond with the filters. diff --git a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py index 9d8f1819..c6638a3b 100644 --- a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py +++ b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Union from django import template @@ -7,7 +6,7 @@ register = template.Library() @register.filter -def hex_colour(colour: Union[str, int]) -> str: +def hex_colour(colour: str | int) -> str: """ Converts the given representation of a colour to its RGB hex string. diff --git a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py index 31215784..5e49f103 100644 --- a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py +++ b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py @@ -3,7 +3,7 @@ import enum from django.test import TestCase from django.utils import timezone -from ..templatetags import deletedmessage_filters +from pydis_site.apps.staff.templatetags import deletedmessage_filters class Colour(enum.IntEnum): diff --git a/pyproject.toml b/pyproject.toml index 40461f0b..9019efb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,18 +29,9 @@ whitenoise = "6.4.0" [tool.poetry.group.dev.dependencies] python-dotenv = "1.0.0" taskipy = "1.10.4" +ruff = "^0.0.265" [tool.poetry.group.lint.dependencies] -flake8 = "6.0.0" -flake8-annotations = "3.0.1" -flake8-bandit = "4.1.1" -flake8-bugbear = "23.5.9" -flake8-docstrings = "1.7.0" -flake8-import-order = "0.18.2" -flake8-tidy-imports = "4.8.0" -flake8-string-format = "0.3.0" -flake8-todo = "0.7" -pep8-naming = "0.13.3" pre-commit = "3.3.1" [tool.poetry.group.test.dependencies] @@ -50,6 +41,34 @@ coverage = "7.2.5" requires = ["poetry-core>=1.2.0"] build-backend = "poetry.core.masonry.api" +[tool.ruff] +target-version = "py310" +extend-exclude = [".cache"] +ignore = [ + "ANN002", "ANN003", "ANN101", "ANN102", "ANN204", "ANN206", "ANN401", + "B904", + "C401", "C408", + "D100", "D104", "D105", "D107", "D203", "D212", "D214", "D215", "D301", + "D400", "D401", "D402", "D404", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D414", "D416", "D417", + "E731", + "RET504", + "RUF005", + "S311", + "SIM102", "SIM108", +] +line-length = 120 +select = ["ANN", "B", "C4", "D", "DTZ", "E", "F", "ISC", "INT", "N", "PGH", "PIE", "RET", "RSE", "RUF", "S", "SIM", "T20", "TID", "UP", "W"] + +[tool.ruff.per-file-ignores] +"pydis_site/apps/**/migrations/*.py" = ["ALL"] +"manage.py" = ["T201"] +"pydis_site/apps/api/tests/base.py" = ["S106"] +"pydis_site/apps/**/tests/test_*.py" = ["ANN", "D"] +"static-builds/netlify_build.py" = ["T201"] +"pydis_site/apps/api/tests/test_off_topic_channel_names.py" = ["RUF001"] +"gunicorn.conf.py" = ["ANN", "D"] +"pydis_site/apps/api/models/bot/off_topic_channel_name.py" = ["RUF001"] + [tool.taskipy.tasks] start = "python manage.py run --debug" makemigrations = "python manage.py makemigrations" diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 36520c28..2d311a11 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -14,15 +14,15 @@ from pathlib import Path from urllib import parse import httpx +import contextlib def raise_response(response: httpx.Response) -> None: """Raise an exception from a response if necessary.""" if response.status_code // 100 != 2: - try: + with contextlib.suppress(json.JSONDecodeError): print(response.json()) - except json.JSONDecodeError: - pass + response.raise_for_status() -- cgit v1.2.3 From 509449c1a7d4230c3a4b1e335b681fbecf0313c0 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Sat, 13 May 2023 14:31:39 +0100 Subject: Use the new datetime.UTC alias over datetime.timezone.utc --- pydis_site/apps/api/github_utils.py | 6 ++-- pydis_site/apps/api/models/bot/message.py | 2 +- .../apps/api/models/bot/offensive_message.py | 2 +- pydis_site/apps/api/tests/test_deleted_messages.py | 8 ++--- pydis_site/apps/api/tests/test_github_utils.py | 8 ++--- pydis_site/apps/api/tests/test_infractions.py | 40 +++++++++++----------- pydis_site/apps/api/tests/test_models.py | 14 ++++---- pydis_site/apps/api/tests/test_nominations.py | 6 ++-- .../apps/api/tests/test_offensive_message.py | 8 ++--- pydis_site/apps/api/tests/test_reminders.py | 14 ++++---- pydis_site/apps/api/tests/test_validators.py | 6 ++-- pydis_site/apps/api/viewsets/bot/infraction.py | 4 +-- pydis_site/apps/content/tests/test_utils.py | 2 +- pydis_site/apps/content/utils.py | 4 +-- 14 files changed, 62 insertions(+), 62 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index af659195..b1a7d07d 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -82,7 +82,7 @@ def generate_token() -> str: Refer to: https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app """ - now = datetime.datetime.now(tz=datetime.timezone.utc) + now = datetime.datetime.now(tz=datetime.UTC) return jwt.encode( { "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at @@ -148,9 +148,9 @@ def check_run_status(run: WorkflowRun) -> str: created_at = ( datetime.datetime .strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) - .replace(tzinfo=datetime.timezone.utc) + .replace(tzinfo=datetime.UTC) ) - run_time = datetime.datetime.now(tz=datetime.timezone.utc) - created_at + run_time = datetime.datetime.now(tz=datetime.UTC) - created_at if run.status != "completed": if run_time <= MAX_RUN_TIME: diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index fb3c47fc..f90f5dd0 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -68,5 +68,5 @@ class Message(ModelReprMixin, models.Model): """Attribute that represents the message timestamp as derived from the snowflake id.""" return datetime.datetime.fromtimestamp( ((self.id >> 22) + 1420070400000) / 1000, - tz=datetime.timezone.utc, + tz=datetime.UTC, ) diff --git a/pydis_site/apps/api/models/bot/offensive_message.py b/pydis_site/apps/api/models/bot/offensive_message.py index 74dab59b..41805a16 100644 --- a/pydis_site/apps/api/models/bot/offensive_message.py +++ b/pydis_site/apps/api/models/bot/offensive_message.py @@ -9,7 +9,7 @@ from pydis_site.apps.api.models.mixins import ModelReprMixin def future_date_validator(date: datetime.date) -> None: """Raise ValidationError if the date isn't a future date.""" - if date < datetime.datetime.now(datetime.timezone.utc): + if date < datetime.datetime.now(datetime.UTC): raise ValidationError("Date must be a future date") diff --git a/pydis_site/apps/api/tests/test_deleted_messages.py b/pydis_site/apps/api/tests/test_deleted_messages.py index 62d17e58..d5501202 100644 --- a/pydis_site/apps/api/tests/test_deleted_messages.py +++ b/pydis_site/apps/api/tests/test_deleted_messages.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from django.urls import reverse @@ -17,7 +17,7 @@ class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': None, - 'creation': datetime.now(tz=timezone.utc).isoformat(), + 'creation': datetime.now(tz=UTC).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -57,7 +57,7 @@ class DeletedMessagesWithActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': cls.actor.id, - 'creation': datetime.now(tz=timezone.utc).isoformat(), + 'creation': datetime.now(tz=UTC).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -89,7 +89,7 @@ class DeletedMessagesLogURLTests(AuthenticatedAPITestCase): cls.deletion_context = MessageDeletionContext.objects.create( actor=cls.actor, - creation=datetime.now(tz=timezone.utc), + creation=datetime.now(tz=UTC), ) def test_valid_log_url(self): diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index 34fae875..d36111c9 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -39,7 +39,7 @@ class GeneralUtilityTests(unittest.TestCase): delta = datetime.timedelta(minutes=10) self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) - then = datetime.datetime.now(tz=datetime.timezone.utc) + delta + then = datetime.datetime.now(tz=datetime.UTC) + delta self.assertLess(decoded["exp"], then.timestamp()) @@ -51,7 +51,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.now(tz=datetime.timezone.utc).strftime(settings.GITHUB_TIMESTAMP_FORMAT), + "created_at": datetime.datetime.now(tz=datetime.UTC).strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -75,7 +75,7 @@ class CheckRunTests(unittest.TestCase): # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion kwargs["created_at"] = ( - datetime.datetime.now(tz=datetime.timezone.utc) + datetime.datetime.now(tz=datetime.UTC) - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) @@ -180,7 +180,7 @@ class ArtifactFetcherTests(unittest.TestCase): head_sha="action_sha", created_at=( datetime.datetime - .now(tz=datetime.timezone.utc) + .now(tz=datetime.UTC) .strftime(settings.GITHUB_TIMESTAMP_FORMAT) ), status="completed", diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index 71611ee9..b9b33ff3 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -1,5 +1,5 @@ import datetime -from datetime import datetime as dt, timedelta, timezone +from datetime import UTC, datetime as dt, timedelta from unittest.mock import patch from urllib.parse import quote @@ -56,8 +56,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='ban', reason='He terk my jerb!', hidden=True, - inserted_at=dt(2020, 10, 10, 0, 0, 0, tzinfo=timezone.utc), - expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc), + inserted_at=dt(2020, 10, 10, 0, 0, 0, tzinfo=UTC), + expires_at=dt(5018, 11, 20, 15, 52, tzinfo=UTC), active=True, ) cls.ban_inactive = Infraction.objects.create( @@ -66,7 +66,7 @@ class InfractionTests(AuthenticatedAPITestCase): type='ban', reason='James is an ass, and we won\'t be working with him again.', active=False, - inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc), + inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=UTC), ) cls.timeout_permanent = Infraction.objects.create( user_id=cls.user.id, @@ -74,7 +74,7 @@ class InfractionTests(AuthenticatedAPITestCase): type='timeout', reason='He has a filthy mouth and I am his soap.', active=True, - inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc), + inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=UTC), expires_at=None, ) cls.superstar_expires_soon = Infraction.objects.create( @@ -83,8 +83,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='superstar', reason='This one doesn\'t matter anymore.', active=True, - inserted_at=dt(2020, 10, 10, 0, 3, 0, tzinfo=timezone.utc), - expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5), + inserted_at=dt(2020, 10, 10, 0, 3, 0, tzinfo=UTC), + expires_at=dt.now(UTC) + datetime.timedelta(hours=5), ) cls.voiceban_expires_later = Infraction.objects.create( user_id=cls.user.id, @@ -92,8 +92,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='voice_ban', reason='Jet engine mic', active=True, - inserted_at=dt(2020, 10, 10, 0, 4, 0, tzinfo=timezone.utc), - expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5), + inserted_at=dt(2020, 10, 10, 0, 4, 0, tzinfo=UTC), + expires_at=dt.now(UTC) + datetime.timedelta(days=5), ) def test_list_all(self): @@ -152,7 +152,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_after(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=5) response = self.client.get(url, {'type': 'superstar', 'expires_after': target_time.isoformat()}) self.assertEqual(response.status_code, 200) @@ -161,7 +161,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=5) response = self.client.get(url, {'type': 'superstar', 'expires_before': target_time.isoformat()}) self.assertEqual(response.status_code, 200) @@ -185,8 +185,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_before_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=4) - target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=4) + target_time_late = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=6) response = self.client.get( url, {'expires_before': target_time_late.isoformat(), @@ -199,8 +199,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_after_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) - target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=9) + target_time = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=5) + target_time_late = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=9) response = self.client.get( url, {'expires_before': target_time.isoformat(), @@ -214,7 +214,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_after_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=5) response = self.client.get( url, {'permanent': 'true', 'expires_after': target_time.isoformat()}, @@ -226,7 +226,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=5) response = self.client.get( url, {'permanent': 'true', 'expires_before': target_time.isoformat()}, @@ -238,7 +238,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_nonpermanent_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=UTC) + datetime.timedelta(hours=6) response = self.client.get( url, {'permanent': 'false', 'expires_before': target_time.isoformat()}, @@ -370,7 +370,7 @@ class CreationTests(AuthenticatedAPITestCase): infraction = Infraction.objects.get(id=response.json()['id']) self.assertAlmostEqual( infraction.inserted_at, - dt.now(timezone.utc), + dt.now(UTC), delta=timedelta(seconds=2) ) self.assertEqual(infraction.expires_at.isoformat(), data['expires_at']) @@ -814,7 +814,7 @@ class SerializerTests(AuthenticatedAPITestCase): actor_id=self.user.id, type=_type, reason='A reason.', - expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc), + expires_at=dt(5018, 11, 20, 15, 52, tzinfo=UTC), active=active ) diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 1cca133d..456ac408 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -1,4 +1,4 @@ -from datetime import datetime as dt, timezone +from datetime import UTC, datetime as dt from django.core.exceptions import ValidationError from django.test import SimpleTestCase, TestCase @@ -41,7 +41,7 @@ class NitroMessageLengthTest(TestCase): self.context = MessageDeletionContext.objects.create( id=50, actor=self.user, - creation=dt.now(timezone.utc) + creation=dt.now(UTC) ) def test_create(self): @@ -99,7 +99,7 @@ class StringDunderMethodTests(SimpleTestCase): name='shawn', discriminator=555, ), - creation=dt.now(timezone.utc) + creation=dt.now(UTC) ), embeds=[] ), @@ -118,7 +118,7 @@ class StringDunderMethodTests(SimpleTestCase): OffensiveMessage( id=602951077675139072, channel_id=291284109232308226, - delete_date=dt(3000, 1, 1, tzinfo=timezone.utc) + delete_date=dt(3000, 1, 1, tzinfo=UTC) ), OffTopicChannelName(name='bob-the-builders-playground'), Role( @@ -132,7 +132,7 @@ class StringDunderMethodTests(SimpleTestCase): name='shawn', discriminator=555, ), - creation=dt.now(tz=timezone.utc) + creation=dt.now(tz=UTC) ), User( id=5, @@ -151,7 +151,7 @@ class StringDunderMethodTests(SimpleTestCase): hidden=True, type='kick', reason='He terk my jerb!', - expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc) + expires_at=dt(5018, 11, 20, 15, 52, tzinfo=UTC) ), Reminder( author=User( @@ -165,7 +165,7 @@ class StringDunderMethodTests(SimpleTestCase): '267624335836053506/291284109232308226/463087129459949587' ), content="oh no", - expiration=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc) + expiration=dt(5018, 11, 20, 15, 52, tzinfo=UTC) ), NominationEntry( nomination_id=self.nomination.id, diff --git a/pydis_site/apps/api/tests/test_nominations.py b/pydis_site/apps/api/tests/test_nominations.py index ee6b1fbd..7fe2f0a8 100644 --- a/pydis_site/apps/api/tests/test_nominations.py +++ b/pydis_site/apps/api/tests/test_nominations.py @@ -1,4 +1,4 @@ -from datetime import datetime as dt, timedelta, timezone +from datetime import UTC, datetime as dt, timedelta from django.urls import reverse @@ -38,7 +38,7 @@ class CreationTests(AuthenticatedAPITestCase): ) self.assertAlmostEqual( nomination.inserted_at, - dt.now(timezone.utc), + dt.now(UTC), delta=timedelta(seconds=2) ) self.assertEqual(nomination.user.id, data['user']) @@ -319,7 +319,7 @@ class NominationTests(AuthenticatedAPITestCase): self.assertAlmostEqual( nomination.ended_at, - dt.now(timezone.utc), + dt.now(UTC), delta=timedelta(seconds=2) ) self.assertFalse(nomination.active) diff --git a/pydis_site/apps/api/tests/test_offensive_message.py b/pydis_site/apps/api/tests/test_offensive_message.py index 53f9cb48..f45b5a66 100644 --- a/pydis_site/apps/api/tests/test_offensive_message.py +++ b/pydis_site/apps/api/tests/test_offensive_message.py @@ -16,7 +16,7 @@ class CreationTests(AuthenticatedAPITestCase): 'delete_date': delete_at.isoformat()[:-1] } - aware_delete_at = delete_at.replace(tzinfo=datetime.timezone.utc) + aware_delete_at = delete_at.replace(tzinfo=datetime.UTC) response = self.client.post(url, data=data) self.assertEqual(response.status_code, 201) @@ -73,7 +73,7 @@ class ListTests(AuthenticatedAPITestCase): @classmethod def setUpTestData(cls): delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 - aware_delete_at = delete_at.replace(tzinfo=datetime.timezone.utc) + aware_delete_at = delete_at.replace(tzinfo=datetime.UTC) cls.messages = [ { @@ -111,7 +111,7 @@ class ListTests(AuthenticatedAPITestCase): class DeletionTests(AuthenticatedAPITestCase): @classmethod def setUpTestData(cls): - delete_at = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=1) + delete_at = datetime.datetime.now(tz=datetime.UTC) + datetime.timedelta(days=1) cls.valid_offensive_message = OffensiveMessage.objects.create( id=602951077675139072, @@ -135,7 +135,7 @@ class DeletionTests(AuthenticatedAPITestCase): class NotAllowedMethodsTests(AuthenticatedAPITestCase): @classmethod def setUpTestData(cls): - delete_at = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=1) + delete_at = datetime.datetime.now(tz=datetime.UTC) + datetime.timedelta(days=1) cls.valid_offensive_message = OffensiveMessage.objects.create( id=602951077675139072, diff --git a/pydis_site/apps/api/tests/test_reminders.py b/pydis_site/apps/api/tests/test_reminders.py index 9bb5fe4d..98e93bb7 100644 --- a/pydis_site/apps/api/tests/test_reminders.py +++ b/pydis_site/apps/api/tests/test_reminders.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from django.forms.models import model_to_dict from django.urls import reverse @@ -59,7 +59,7 @@ class ReminderCreationTests(AuthenticatedAPITestCase): data = { 'author': self.author.id, 'content': 'Remember to...wait what was it again?', - 'expiration': datetime.now(tz=timezone.utc).isoformat(), + 'expiration': datetime.now(tz=UTC).isoformat(), 'jump_url': "https://www.google.com", 'channel_id': 123, 'mentions': [8888, 9999], @@ -91,7 +91,7 @@ class ReminderDeletionTests(AuthenticatedAPITestCase): cls.reminder = Reminder.objects.create( author=cls.author, content="Don't forget to set yourself a reminder", - expiration=datetime.now(timezone.utc), + expiration=datetime.now(UTC), jump_url="https://www.decliningmentalfaculties.com", channel_id=123 ) @@ -122,7 +122,7 @@ class ReminderListTests(AuthenticatedAPITestCase): cls.reminder_one = Reminder.objects.create( author=cls.author, content="We should take Bikini Bottom, and push it somewhere else!", - expiration=datetime.now(timezone.utc), + expiration=datetime.now(UTC), jump_url="https://www.icantseemyforehead.com", channel_id=123 ) @@ -130,7 +130,7 @@ class ReminderListTests(AuthenticatedAPITestCase): cls.reminder_two = Reminder.objects.create( author=cls.author, content="Gahhh-I love being purple!", - expiration=datetime.now(timezone.utc), + expiration=datetime.now(UTC), jump_url="https://www.goofygoobersicecreampartyboat.com", channel_id=123, active=False @@ -176,7 +176,7 @@ class ReminderRetrieveTests(AuthenticatedAPITestCase): cls.reminder = Reminder.objects.create( author=cls.author, content="Reminder content", - expiration=datetime.now(timezone.utc), + expiration=datetime.now(UTC), jump_url="http://example.com/", channel_id=123 ) @@ -204,7 +204,7 @@ class ReminderUpdateTests(AuthenticatedAPITestCase): cls.reminder = Reminder.objects.create( author=cls.author, content="Squash those do-gooders", - expiration=datetime.now(timezone.utc), + expiration=datetime.now(UTC), jump_url="https://www.decliningmentalfaculties.com", channel_id=123 ) diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index a7ec6e38..abff8f55 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from django.core.exceptions import ValidationError from django.test import TestCase @@ -23,8 +23,8 @@ class BotSettingValidatorTests(TestCase): class OffensiveMessageValidatorsTests(TestCase): def test_accepts_future_date(self): - future_date_validator(datetime(3000, 1, 1, tzinfo=timezone.utc)) + future_date_validator(datetime(3000, 1, 1, tzinfo=UTC)) def test_rejects_non_future_date(self): with self.assertRaises(ValidationError): - future_date_validator(datetime(1000, 1, 1, tzinfo=timezone.utc)) + future_date_validator(datetime(1000, 1, 1, tzinfo=UTC)) diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py index ec8b83a1..26cae3ad 100644 --- a/pydis_site/apps/api/viewsets/bot/infraction.py +++ b/pydis_site/apps/api/viewsets/bot/infraction.py @@ -190,7 +190,7 @@ class InfractionViewSet( except ValueError: raise ValidationError({'expires_after': ['failed to convert to datetime']}) additional_filters['expires_at__gte'] = expires_after_parsed.replace( - tzinfo=datetime.timezone.utc + tzinfo=datetime.UTC ) filter_expires_before = self.request.query_params.get('expires_before') @@ -200,7 +200,7 @@ class InfractionViewSet( except ValueError: raise ValidationError({'expires_before': ['failed to convert to datetime']}) additional_filters['expires_at__lte'] = expires_before_parsed.replace( - tzinfo=datetime.timezone.utc + tzinfo=datetime.UTC ) if 'expires_at__lte' in additional_filters and 'expires_at__gte' in additional_filters: diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index 462818b5..7f7736f9 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -17,7 +17,7 @@ from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) -_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.timezone.utc) +_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.UTC) _time_str = _time.strftime(settings.GITHUB_TIMESTAMP_FORMAT) TEST_COMMIT_KWARGS = { "sha": "123", diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 347640dd..56f6283d 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -132,7 +132,7 @@ def set_tag_commit(tag: Tag) -> None: tag.last_commit = Commit( sha="68da80efc00d9932a209d5cccd8d344cec0f09ea", message="Initial Commit\n\nTHIS IS FAKE DEMO DATA", - date=datetime.datetime(2018, 2, 3, 12, 20, 26, tzinfo=datetime.timezone.utc), + date=datetime.datetime(2018, 2, 3, 12, 20, 26, tzinfo=datetime.UTC), authors=json.dumps([{"name": "Joseph", "email": "joseph@josephbanks.me"}]), ) return @@ -154,7 +154,7 @@ def set_tag_commit(tag: Tag) -> None: date = ( datetime.datetime .strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) - .replace(tzinfo=datetime.timezone.utc) + .replace(tzinfo=datetime.UTC) ) if author["email"] == committer["email"]: -- cgit v1.2.3 From d0b792b169e1b70f592f0ab01d4407e04fb6fa49 Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Fri, 21 Jul 2023 20:50:25 +0200 Subject: Soft fail for upstream error on fetching tag commits Closes #1053 --- pydis_site/apps/content/utils.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/content/utils.py') diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index 56f6283d..cfd73d67 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -1,8 +1,10 @@ import datetime import functools import json +import logging import tarfile import tempfile +from http import HTTPStatus from io import BytesIO from pathlib import Path @@ -18,6 +20,7 @@ from pydis_site import settings from .models import Commit, Tag TAG_CACHE_TTL = datetime.timedelta(hours=1) +log = logging.getLogger(__name__) def github_client(**kwargs) -> httpx.Client: @@ -144,9 +147,26 @@ def set_tag_commit(tag: Tag) -> None: # Fetch and set the commit with github_client() as client: - data = client.get("/repos/python-discord/bot/commits", params={"path": path}) - data.raise_for_status() - data = data.json()[0] + response = client.get("/repos/python-discord/bot/commits", params={"path": path}) + if ( + # We want to hop out early in three cases: + # - We got a forbidden response. (GitHub wrongfully uses this for rate limits.) + # - We got ratelimited. + response.status_code in (HTTPStatus.FORBIDDEN, HTTPStatus.TOO_MANY_REQUESTS) + # - GitHub has unicorn time again and is returning 5xx codes. + or int(response.status_code / 100) == 5 + ): # pragma: no cover + log.warning( + "Received code %d from GitHub for commit history for bot file %r", + response.status_code, path, + ) + # We hop out early because otherwise, these failures may result in the + # overall request to the tag page breaking. + return + + # This should only be permanent issues from here, such as bad requests. + response.raise_for_status() + data = response.json()[0] commit = data["commit"] author, committer = commit["author"], commit["committer"] -- cgit v1.2.3